gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright (C) 2016 Hurence (support@hurence.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hurence.logisland.documentation.rst;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.Arrays;
public class RstPrintWriter extends PrintWriter {
private static final Character[] SECTION_DELIMITERS = new Character[]{'=', '-', '_', '.', ':', '`', '\'', '\"', '~', '^', '*', '+', '#'};
public RstPrintWriter(OutputStream out, boolean autoFlush) {
super(out, autoFlush);
}
public void printDescriptionString(String descriptionString) {
println(descriptionString);
}
/**
* Writes a section title followed by an RST delimiter
*
* @param sectionLevel
* @param title
*/
public void writeSectionTitle(final int sectionLevel, final String title) {
assert sectionLevel < SECTION_DELIMITERS.length;
assert title != null;
assert !title.isEmpty();
char[] charArray = new char[title.length()];
Arrays.fill(charArray, SECTION_DELIMITERS[sectionLevel - 1]);
String delimiter = new String(charArray);
println();
println(title);
println(delimiter);
}
/**
* Writes a transition. Transitions are commonly seen in novels and short fiction,
* as a gap spanning one or more lines, with or without a type ornament such as a row of asterisks.
* Transitions separate other body elements.
* A transition should not begin or end a section or document,
* nor should two transitions be immediately adjacent.
* <p>
* The syntax for a transition marker is a horizontal line of 4 or more repeated punctuation characters.
* The syntax is the same as section title underlines without title text.
* Transition markers require blank lines before and after:
*/
public void writeTransition() {
char[] charArray = new char[10];
Arrays.fill(charArray, SECTION_DELIMITERS[1]);
String delimiter = new String(charArray);
println();
println(delimiter);
println();
}
/**
* Writes a begin element, an id attribute(if specified), then text, then
* end element for element of the users choosing. Example: <p
* id="p-id">text</p>
*
* @param characters the text of the element
*/
public void printStrong(final String characters) {
print("**");
print(characters);
print("**");
}
/**
* A helper method to write a link
*
* @param text the text of the link
* @param location the location of the link
*/
public void writeLink(final String text, final String location) {
print(" `");
print(text);
print(" <");
print(location);
print(">`_ ");
}
/**
* A helper method to write a crossreference target
* <p>
* .. _example:
*
* @param name the text of the target
*/
public void writeInternalReference(final String name) {
print(".. _");
print(name);
print(": ");
println();
}
/**
* A helper method to write an internal link
*
* @param name the text of the link
*/
public void writeInternalReferenceLink(final String name) {
print("`");
print(name);
print("`_ ");
}
/**
* A helper method to include another file.
*
* @param filepath the file to include
*/
public void writeIncludeReference(final String filepath) {
print(".. include:: ");
print(filepath);
}
/**
* .. image:: picture.jpeg
* :height: 100px
* :width: 200 px
* :scale: 50 %
* :alt: alternate text
* :align: right
* <p>
* print(".. image:: ");
*/
public void writeImage(final String imageSrc, final String alt, final String align, final Integer height, final Integer width, final Integer scale) {
print(".. image:: ");
println(imageSrc);
if (alt != null) {
print(" :alt: ");
println(alt);
}
if (align != null) {
print(" :align: ");
println(align);
}
if (height != null) {
print(" :height: ");
print(height);
println(" px");
}
if (width != null) {
print(" :width: ");
print(width);
println(" px");
}
if (scale != null) {
print(" :scale: ");
print(scale);
println(" %");
}
}
/**
* A helper method to write an unordered list item
*
* @param content the text
*/
public void printListItem(final String content) {
print("- ");
println(content);
}
public void printCsvTable(final String title, final String[] headers, final int[] widths, final Character escape) {
println();
print(".. csv-table:: ");
println(title);
if (headers != null) {
StringBuilder strHeaders = new StringBuilder();
for (int i = 0; i < headers.length; i++) {
strHeaders.append('"');
strHeaders.append(headers[i]);
strHeaders.append('"');
if (i < headers.length - 1)
strHeaders.append(',');
}
print(" :header: ");
println(strHeaders.toString());
}
if (widths != null) {
StringBuilder strWidths = new StringBuilder();
for (int i = 0; i < widths.length; i++) {
strWidths.append(widths[i]);
if (i < widths.length - 1)
strWidths.append(',');
}
print(" :widths: ");
println(strWidths.toString());
}
if (escape != null) {
print(" :escape: ");
println(escape);
}
println();
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.model;
import java.util.Calendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.unitime.timetable.model.base.BaseReservation;
/**
* @author Tomas Muller
*/
public abstract class Reservation extends BaseReservation implements Comparable<Reservation> {
private static final long serialVersionUID = 1L;
/*[CONSTRUCTOR MARKER BEGIN]*/
public Reservation () {
super();
}
/**
* Constructor for primary key
*/
public Reservation (java.lang.Long uniqueId) {
super(uniqueId);
}
/*[CONSTRUCTOR MARKER END]*/
public boolean isExpired() {
if (getExpirationDate() == null) return false;
Calendar c = Calendar.getInstance(Locale.US);
c.set(Calendar.HOUR_OF_DAY, 0);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
return getExpirationDate().before(c.getTime());
}
public abstract boolean isApplicable(Student student, CourseRequest request);
public int getReservationLimit() {
return (getLimit() == null ? -1 : getLimit().intValue());
}
private boolean hasClass(Class_ clazz) {
for (Class_ other: getClasses()) {
if (clazz.equals(other) || other.isParentOf(clazz) || clazz.isParentOf(other)) return true;
}
return false;
}
public boolean isMatching(List<StudentClassEnrollment> enrollment) {
if (enrollment.isEmpty()) return false;
if (!getConfigurations().isEmpty()) {
for (StudentClassEnrollment e: enrollment) {
if (!getConfigurations().contains(e.getClazz().getSchedulingSubpart().getInstrOfferingConfig()))
return false;
}
}
if (!getClasses().isEmpty()) {
for (StudentClassEnrollment e: enrollment) {
if (!hasClass(e.getClazz())) return false;
}
}
return true;
}
public boolean isMatching(Class_ clazz) {
if (!getConfigurations().isEmpty() && !getConfigurations().contains(clazz.getSchedulingSubpart().getInstrOfferingConfig()))
return false;
if (!getClasses().isEmpty() && !hasClass(clazz))
return false;
return true;
}
public abstract int getPriority();
public abstract boolean isCanAssignOverLimit();
public abstract boolean isMustBeUsed();
public abstract boolean isAllowOverlap();
@Override
public int compareTo(Reservation r) {
if (getPriority() != r.getPriority()) {
return (getPriority() < r.getPriority() ? -1 : 1);
}
int cmp = Double.compare(getRestrictivity(), r.getRestrictivity());
if (cmp != 0) return cmp;
return getUniqueId().compareTo(r.getUniqueId());
}
public double getRestrictivity() {
if (getConfigurations().isEmpty()) return 1.0;
double restrictivity = ((double)getConfigurations().size()) / getInstructionalOffering().getInstrOfferingConfigs().size();
if (getClasses().isEmpty()) return restrictivity;
Map<SchedulingSubpart, Integer> counts = new HashMap<SchedulingSubpart, Integer>();
for (Class_ clazz: getClasses()) {
Integer old = counts.get(clazz.getSchedulingSubpart());
counts.put(clazz.getSchedulingSubpart(), 1 + (old == null ? 0 : old.intValue()));
}
for (Map.Entry<SchedulingSubpart, Integer> entry: counts.entrySet()) {
restrictivity *= ((double)entry.getValue().intValue()) / entry.getKey().getClasses().size();
}
return restrictivity;
}
protected Map<Long, Set<Long>> getSections() {
Map<Long, Set<Long>> ret = new HashMap<Long, Set<Long>>();
for (Class_ clazz: getClasses()) {
while (clazz != null) {
Set<Long> sections = ret.get(clazz.getSchedulingSubpart().getUniqueId());
if (sections == null) {
sections = new HashSet<Long>();
ret.put(clazz.getSchedulingSubpart().getUniqueId(), sections);
}
sections.add(clazz.getUniqueId());
clazz = clazz.getParentClass();
}
}
return ret;
}
public int getReservedAvailableSpace() {
// Unlimited
if (getReservationLimit() < 0) return Integer.MAX_VALUE;
return getReservationLimit() - countEnrollmentsForReservation();
}
private int countEnrollmentsForReservation() {
Set<Long> checked = new HashSet<Long>();
Set<Long> students = new HashSet<Long>();
for (InstrOfferingConfig config: getInstructionalOffering().getInstrOfferingConfigs())
for (SchedulingSubpart subpart: config.getSchedulingSubparts())
for (Class_ clazz: subpart.getClasses())
for (StudentClassEnrollment e: clazz.getStudentEnrollments())
if (e.getCourseRequest() != null && checked.add(e.getCourseRequest().getUniqueId()) && isApplicable(e.getStudent(), e.getCourseRequest()) && isMatching(e.getCourseRequest().getClassEnrollments())) {
students.add(e.getStudent().getUniqueId());
}
return students.size();
}
protected Set<InstrOfferingConfig> getAllConfigurations() {
Set<InstrOfferingConfig> configs = new HashSet<InstrOfferingConfig>();
if (getConfigurations() != null)
configs.addAll(getConfigurations());
if (getClasses() != null)
for (Class_ clazz: getClasses())
configs.add(clazz.getSchedulingSubpart().getInstrOfferingConfig());
return configs;
}
protected Map<SchedulingSubpart, Set<Class_>> getAllSections() {
Map<SchedulingSubpart, Set<Class_>> ret = new HashMap<SchedulingSubpart, Set<Class_>>();
for (Class_ clazz: getClasses()) {
while (clazz != null) {
Set<Class_> sections = ret.get(clazz.getSchedulingSubpart());
if (sections == null) {
sections = new HashSet<Class_>();
ret.put(clazz.getSchedulingSubpart(), sections);
}
sections.add(clazz);
clazz = clazz.getParentClass();
}
}
return ret;
}
public Integer getLimitCap() {
Set<InstrOfferingConfig> configs = getAllConfigurations();
if (configs.isEmpty()) return null;
// config cap
int cap = 0;
for (InstrOfferingConfig config: configs)
cap = add(cap, config.isUnlimitedEnrollment() ? -1 : config.getLimit());
for (Set<Class_> sections: getAllSections().values()) {
// subpart cap
int subpartCap = 0;
for (Class_ section: sections)
subpartCap = add(subpartCap, section.getClassLimit());
// minimize
cap = min(cap, subpartCap);
}
return (cap < 0 ? null : new Integer(cap));
}
private static int min(int l1, int l2) {
return (l1 < 0 ? l2 : l2 < 0 ? l1 : Math.min(l1, l2));
}
private static int add(int l1, int l2) {
return (l1 < 0 ? -1 : l2 < 0 ? -1 : l1 + l2);
}
}
| |
/*
* Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.core.image.impl;
import boofcv.struct.image.*;
/**
* <p>
* Functions for converting between different primitive image types. Numerical values do not change or are closely approximated
* in these functions.
* </p>
*
* <p>
* DO NOT MODIFY: This class was automatically generated by {@link boofcv.core.image.impl.GenerateImplConvertImage}
* </p>
*
* @author Peter Abeles
*/
public class ImplConvertImage {
public static void convert( ImageUInt8 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] & 0xFF);
}
}
} else {
final int N = from.width * from.height;
System.arraycopy(from.data, 0, to.data, 0, N);
}
}
public static void convert( ImageUInt8 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] & 0xFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( short )( from.data[i] & 0xFF);
}
}
}
public static void convert( ImageUInt8 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] & 0xFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] & 0xFF);
}
}
}
public static void convert( ImageUInt8 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] & 0xFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] & 0xFF);
}
}
}
public static void convert( ImageUInt8 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] & 0xFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] & 0xFF);
}
}
}
public static void convert( ImageUInt8 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] & 0xFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] & 0xFF);
}
}
}
public static void convert( ImageSInt8 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
System.arraycopy(from.data, 0, to.data, 0, N);
}
}
public static void convert( ImageSInt8 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( short )( from.data[i] );
}
}
}
public static void convert( ImageSInt8 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] );
}
}
}
public static void convert( ImageSInt8 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] );
}
}
}
public static void convert( ImageSInt8 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] );
}
}
}
public static void convert( ImageSInt8 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] );
}
}
}
public static void convert( ImageUInt16 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] & 0xFFFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( byte )( from.data[i] & 0xFFFF);
}
}
}
public static void convert( ImageUInt16 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] & 0xFFFF);
}
}
} else {
final int N = from.width * from.height;
System.arraycopy(from.data, 0, to.data, 0, N);
}
}
public static void convert( ImageUInt16 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] & 0xFFFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] & 0xFFFF);
}
}
}
public static void convert( ImageUInt16 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] & 0xFFFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] & 0xFFFF);
}
}
}
public static void convert( ImageUInt16 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] & 0xFFFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] & 0xFFFF);
}
}
}
public static void convert( ImageUInt16 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] & 0xFFFF);
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] & 0xFFFF);
}
}
}
public static void convert( ImageSInt16 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( byte )( from.data[i] );
}
}
}
public static void convert( ImageSInt16 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
System.arraycopy(from.data, 0, to.data, 0, N);
}
}
public static void convert( ImageSInt16 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] );
}
}
}
public static void convert( ImageSInt16 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] );
}
}
}
public static void convert( ImageSInt16 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] );
}
}
}
public static void convert( ImageSInt16 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] );
}
}
}
public static void convert( ImageSInt32 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( byte )( from.data[i] );
}
}
}
public static void convert( ImageSInt32 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( short )( from.data[i] );
}
}
}
public static void convert( ImageSInt32 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( from.data[i] );
}
}
}
public static void convert( ImageSInt32 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] );
}
}
}
public static void convert( ImageSInt32 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] );
}
}
}
public static void convert( ImageSInt64 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( byte )( from.data[i] );
}
}
}
public static void convert( ImageSInt64 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( short )( from.data[i] );
}
}
}
public static void convert( ImageSInt64 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( int )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( int )( from.data[i] );
}
}
}
public static void convert( ImageSInt64 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] );
}
}
}
public static void convert( ImageSInt64 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] );
}
}
}
public static void convert( ImageFloat32 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( byte )( from.data[i] );
}
}
}
public static void convert( ImageFloat32 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( short )( from.data[i] );
}
}
}
public static void convert( ImageFloat32 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( int )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( int )( from.data[i] );
}
}
}
public static void convert( ImageFloat32 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( long )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( long )( from.data[i] );
}
}
}
public static void convert( ImageFloat32 from, ImageFloat64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( double )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( double )( from.data[i] );
}
}
}
public static void convert( ImageFloat64 from, ImageInt8 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( byte )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( byte )( from.data[i] );
}
}
}
public static void convert( ImageFloat64 from, ImageInt16 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( short )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( short )( from.data[i] );
}
}
}
public static void convert( ImageFloat64 from, ImageSInt32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( int )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( int )( from.data[i] );
}
}
}
public static void convert( ImageFloat64 from, ImageSInt64 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( long )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( long )( from.data[i] );
}
}
}
public static void convert( ImageFloat64 from, ImageFloat32 to ) {
if (from.isSubimage() || to.isSubimage()) {
for (int y = 0; y < from.height; y++) {
int indexFrom = from.getIndex(0, y);
int indexTo = to.getIndex(0, y);
for (int x = 0; x < from.width; x++) {
to.data[indexTo++] = ( float )( from.data[indexFrom++] );
}
}
} else {
final int N = from.width * from.height;
for (int i = 0; i < N; i++) {
to.data[i] = ( float )( from.data[i] );
}
}
}
}
| |
package com.deve.pig.model;
import java.util.ArrayList;
import java.util.List;
public class PrivExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
protected Integer startIndex;
protected Integer pageSize;
public PrivExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
public void setStartIndex(Integer startIndex) {
this.startIndex=startIndex;
}
public Integer getStartIndex() {
return startIndex;
}
public void setPageSize(Integer pageSize) {
this.pageSize=pageSize;
}
public Integer getPageSize() {
return pageSize;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andIdIsNull() {
addCriterion("id is null");
return (Criteria) this;
}
public Criteria andIdIsNotNull() {
addCriterion("id is not null");
return (Criteria) this;
}
public Criteria andIdEqualTo(Long value) {
addCriterion("id =", value, "id");
return (Criteria) this;
}
public Criteria andIdNotEqualTo(Long value) {
addCriterion("id <>", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThan(Long value) {
addCriterion("id >", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThanOrEqualTo(Long value) {
addCriterion("id >=", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThan(Long value) {
addCriterion("id <", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThanOrEqualTo(Long value) {
addCriterion("id <=", value, "id");
return (Criteria) this;
}
public Criteria andIdIn(List<Long> values) {
addCriterion("id in", values, "id");
return (Criteria) this;
}
public Criteria andIdNotIn(List<Long> values) {
addCriterion("id not in", values, "id");
return (Criteria) this;
}
public Criteria andIdBetween(Long value1, Long value2) {
addCriterion("id between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andIdNotBetween(Long value1, Long value2) {
addCriterion("id not between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andPrivNameIsNull() {
addCriterion("priv_name is null");
return (Criteria) this;
}
public Criteria andPrivNameIsNotNull() {
addCriterion("priv_name is not null");
return (Criteria) this;
}
public Criteria andPrivNameEqualTo(String value) {
addCriterion("priv_name =", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameNotEqualTo(String value) {
addCriterion("priv_name <>", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameGreaterThan(String value) {
addCriterion("priv_name >", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameGreaterThanOrEqualTo(String value) {
addCriterion("priv_name >=", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameLessThan(String value) {
addCriterion("priv_name <", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameLessThanOrEqualTo(String value) {
addCriterion("priv_name <=", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameLike(String value) {
addCriterion("priv_name like", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameNotLike(String value) {
addCriterion("priv_name not like", value, "privName");
return (Criteria) this;
}
public Criteria andPrivNameIn(List<String> values) {
addCriterion("priv_name in", values, "privName");
return (Criteria) this;
}
public Criteria andPrivNameNotIn(List<String> values) {
addCriterion("priv_name not in", values, "privName");
return (Criteria) this;
}
public Criteria andPrivNameBetween(String value1, String value2) {
addCriterion("priv_name between", value1, value2, "privName");
return (Criteria) this;
}
public Criteria andPrivNameNotBetween(String value1, String value2) {
addCriterion("priv_name not between", value1, value2, "privName");
return (Criteria) this;
}
public Criteria andMeueNameIsNull() {
addCriterion("meue_name is null");
return (Criteria) this;
}
public Criteria andMeueNameIsNotNull() {
addCriterion("meue_name is not null");
return (Criteria) this;
}
public Criteria andMeueNameEqualTo(String value) {
addCriterion("meue_name =", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameNotEqualTo(String value) {
addCriterion("meue_name <>", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameGreaterThan(String value) {
addCriterion("meue_name >", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameGreaterThanOrEqualTo(String value) {
addCriterion("meue_name >=", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameLessThan(String value) {
addCriterion("meue_name <", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameLessThanOrEqualTo(String value) {
addCriterion("meue_name <=", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameLike(String value) {
addCriterion("meue_name like", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameNotLike(String value) {
addCriterion("meue_name not like", value, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameIn(List<String> values) {
addCriterion("meue_name in", values, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameNotIn(List<String> values) {
addCriterion("meue_name not in", values, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameBetween(String value1, String value2) {
addCriterion("meue_name between", value1, value2, "meueName");
return (Criteria) this;
}
public Criteria andMeueNameNotBetween(String value1, String value2) {
addCriterion("meue_name not between", value1, value2, "meueName");
return (Criteria) this;
}
public Criteria andActionIsNull() {
addCriterion("action is null");
return (Criteria) this;
}
public Criteria andActionIsNotNull() {
addCriterion("action is not null");
return (Criteria) this;
}
public Criteria andActionEqualTo(String value) {
addCriterion("action =", value, "action");
return (Criteria) this;
}
public Criteria andActionNotEqualTo(String value) {
addCriterion("action <>", value, "action");
return (Criteria) this;
}
public Criteria andActionGreaterThan(String value) {
addCriterion("action >", value, "action");
return (Criteria) this;
}
public Criteria andActionGreaterThanOrEqualTo(String value) {
addCriterion("action >=", value, "action");
return (Criteria) this;
}
public Criteria andActionLessThan(String value) {
addCriterion("action <", value, "action");
return (Criteria) this;
}
public Criteria andActionLessThanOrEqualTo(String value) {
addCriterion("action <=", value, "action");
return (Criteria) this;
}
public Criteria andActionLike(String value) {
addCriterion("action like", value, "action");
return (Criteria) this;
}
public Criteria andActionNotLike(String value) {
addCriterion("action not like", value, "action");
return (Criteria) this;
}
public Criteria andActionIn(List<String> values) {
addCriterion("action in", values, "action");
return (Criteria) this;
}
public Criteria andActionNotIn(List<String> values) {
addCriterion("action not in", values, "action");
return (Criteria) this;
}
public Criteria andActionBetween(String value1, String value2) {
addCriterion("action between", value1, value2, "action");
return (Criteria) this;
}
public Criteria andActionNotBetween(String value1, String value2) {
addCriterion("action not between", value1, value2, "action");
return (Criteria) this;
}
public Criteria andDescriptionIsNull() {
addCriterion("description is null");
return (Criteria) this;
}
public Criteria andDescriptionIsNotNull() {
addCriterion("description is not null");
return (Criteria) this;
}
public Criteria andDescriptionEqualTo(String value) {
addCriterion("description =", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotEqualTo(String value) {
addCriterion("description <>", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionGreaterThan(String value) {
addCriterion("description >", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionGreaterThanOrEqualTo(String value) {
addCriterion("description >=", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionLessThan(String value) {
addCriterion("description <", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionLessThanOrEqualTo(String value) {
addCriterion("description <=", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionLike(String value) {
addCriterion("description like", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotLike(String value) {
addCriterion("description not like", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionIn(List<String> values) {
addCriterion("description in", values, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotIn(List<String> values) {
addCriterion("description not in", values, "description");
return (Criteria) this;
}
public Criteria andDescriptionBetween(String value1, String value2) {
addCriterion("description between", value1, value2, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotBetween(String value1, String value2) {
addCriterion("description not between", value1, value2, "description");
return (Criteria) this;
}
public Criteria andIsShowIsNull() {
addCriterion("is_show is null");
return (Criteria) this;
}
public Criteria andIsShowIsNotNull() {
addCriterion("is_show is not null");
return (Criteria) this;
}
public Criteria andIsShowEqualTo(Byte value) {
addCriterion("is_show =", value, "isShow");
return (Criteria) this;
}
public Criteria andIsShowNotEqualTo(Byte value) {
addCriterion("is_show <>", value, "isShow");
return (Criteria) this;
}
public Criteria andIsShowGreaterThan(Byte value) {
addCriterion("is_show >", value, "isShow");
return (Criteria) this;
}
public Criteria andIsShowGreaterThanOrEqualTo(Byte value) {
addCriterion("is_show >=", value, "isShow");
return (Criteria) this;
}
public Criteria andIsShowLessThan(Byte value) {
addCriterion("is_show <", value, "isShow");
return (Criteria) this;
}
public Criteria andIsShowLessThanOrEqualTo(Byte value) {
addCriterion("is_show <=", value, "isShow");
return (Criteria) this;
}
public Criteria andIsShowIn(List<Byte> values) {
addCriterion("is_show in", values, "isShow");
return (Criteria) this;
}
public Criteria andIsShowNotIn(List<Byte> values) {
addCriterion("is_show not in", values, "isShow");
return (Criteria) this;
}
public Criteria andIsShowBetween(Byte value1, Byte value2) {
addCriterion("is_show between", value1, value2, "isShow");
return (Criteria) this;
}
public Criteria andIsShowNotBetween(Byte value1, Byte value2) {
addCriterion("is_show not between", value1, value2, "isShow");
return (Criteria) this;
}
public Criteria andPIdIsNull() {
addCriterion("pid is null");
return (Criteria) this;
}
public Criteria andPIdIsNotNull() {
addCriterion("pid is not null");
return (Criteria) this;
}
public Criteria andPIdEqualTo(Long value) {
addCriterion("pid =", value, "pid");
return (Criteria) this;
}
public Criteria andPIdNotEqualTo(Long value) {
addCriterion("pid <>", value, "pid");
return (Criteria) this;
}
public Criteria andPIdGreaterThan(Long value) {
addCriterion("pid >", value, "pid");
return (Criteria) this;
}
public Criteria andPIdGreaterThanOrEqualTo(Long value) {
addCriterion("pid >=", value, "pid");
return (Criteria) this;
}
public Criteria andPIdLessThan(Long value) {
addCriterion("pid <", value, "pid");
return (Criteria) this;
}
public Criteria andPIdLessThanOrEqualTo(Long value) {
addCriterion("pid <=", value, "pid");
return (Criteria) this;
}
public Criteria andPIdIn(List<Long> values) {
addCriterion("pid in", values, "pid");
return (Criteria) this;
}
public Criteria andPIdNotIn(List<Long> values) {
addCriterion("pid not in", values, "pid");
return (Criteria) this;
}
public Criteria andPIdBetween(Long value1, Long value2) {
addCriterion("pid between", value1, value2, "pid");
return (Criteria) this;
}
public Criteria andPIdNotBetween(Long value1, Long value2) {
addCriterion("pid not between", value1, value2, "pid");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cortrium.cortriumc3;
import android.Manifest;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ListActivity;
import android.bluetooth.BluetoothAdapter;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.cortrium.opkit.ConnectionManager;
import com.cortrium.opkit.CortriumC3;
import java.util.ArrayList;
/**
* Activity for scanning and displaying available Bluetooth LE devices.
*/
public class DeviceScanActivity extends ListActivity {
private static final int PERMISSION_REQUEST_COARSE_LOCATION = 1;
private final static String TAG = "CortriumC3Ecg";
private LeDeviceListAdapter mLeDeviceListAdapter;
private ConnectionManager connectionManager;
private Context mContext = this;
private String paired_id;
private static final int REQUEST_ENABLE_BT = 1;
private final ConnectionManager.OnConnectionManagerListener mListener = new ConnectionManager.OnConnectionManagerListener() {
@Override
public void startedScanning(ConnectionManager manager) {
}
@Override
public void stoppedScanning(ConnectionManager manager) {
}
@Override
public void discoveredDevice(final CortriumC3 device) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mLeDeviceListAdapter.addDevice(device);
mLeDeviceListAdapter.notifyDataSetChanged();
}
});
}
@Override
public void connectedToDevice(CortriumC3 device) {
}
@Override
public void disconnectedFromDevice(CortriumC3 device) {
}
};
private final BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final String action = intent.getAction();
if (action.equals(BluetoothAdapter.ACTION_STATE_CHANGED)) {
final int state = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE,
BluetoothAdapter.ERROR);
switch (state) {
case BluetoothAdapter.STATE_OFF:
break;
case BluetoothAdapter.STATE_TURNING_OFF:
break;
case BluetoothAdapter.STATE_ON:
//setButtonText("Bluetooth on");
break;
case BluetoothAdapter.STATE_TURNING_ON:
break;
}
}
}
};
@TargetApi(Build.VERSION_CODES.M)
private void askForLocationPermissions()
{
// Android M Permission check
if (this.checkSelfPermission(Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED)
{
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("This app needs location access");
builder.setMessage("Please grant location access so this app can detect Cortrium devices");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener()
{
@Override
public void onDismiss(DialogInterface dialog)
{
requestPermissions(new String[]{Manifest.permission.ACCESS_COARSE_LOCATION}, PERMISSION_REQUEST_COARSE_LOCATION);
}
});
builder.show();
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// TODO: Enable this for production
//Fabric.with(this, new Crashlytics());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
askForLocationPermissions();
}
}
@Override
public void onStart(){
super.onStart();
//recover paired id
paired_id = Utils.getPairedDevice(this);
connectionManager = ConnectionManager.getInstance(this);
connectionManager.setConnectionManagerListener(mListener);
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
switch (requestCode) {
case PERMISSION_REQUEST_COARSE_LOCATION: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Coarse location permission granted");
} else {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Functionality limited");
builder.setMessage("Since location access has not been granted, this app will not be able to Cortrium devices when in the background.");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface dialog) {
}
});
builder.show();
}
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
if (connectionManager.getConnectionState() == ConnectionManager.ConnectionStates.Scanning) {
menu.findItem(R.id.menu_stop).setVisible(true);
menu.findItem(R.id.menu_scan).setVisible(false);
menu.findItem(R.id.menu_refresh).setActionView(
R.layout.actionbar_indeterminate_progress);
} else {
menu.findItem(R.id.menu_stop).setVisible(false);
menu.findItem(R.id.menu_scan).setVisible(true);
menu.findItem(R.id.menu_refresh).setActionView(null);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_scan:
//mLeDeviceListAdapter.clear();
scanLeDevice(true);
break;
case R.id.menu_stop:
scanLeDevice(false);
break;
}
return true;
}
@Override
protected void onResume() {
super.onResume();
// Ensures Bluetooth is enabled on the device. If Bluetooth is not currently enabled,
// fire an intent to display a dialog asking the user to grant permission to enable it.
// UPDATE: The intent request was causing a timimg error, now is enabled directly from adapter.
if (!connectionManager.getBluetoothIsEnabled()) {
/*Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);*/
BluetoothAdapter.getDefaultAdapter().enable();
}
if (!connectionManager.isCortriumDeviceSupported()) {
Toast.makeText(this, R.string.error_bluetooth_not_supported, Toast.LENGTH_LONG).show();
Log.e(TAG, "Error bluetooth not supported");
finish();
} else {
mLeDeviceListAdapter = new LeDeviceListAdapter();
setListAdapter(mLeDeviceListAdapter);
scanLeDevice(true);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// User chose not to enable Bluetooth.
if (requestCode == REQUEST_ENABLE_BT && resultCode == Activity.RESULT_CANCELED) {
finish();
return;
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
protected void onPause() {
super.onPause();
scanLeDevice(false);
if(mLeDeviceListAdapter != null)
mLeDeviceListAdapter.clear();
connectionManager.clear();
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
final CortriumC3 device = mLeDeviceListAdapter.getDevice(position);
if (device == null) return;
if (connectionManager.getConnectionState() == ConnectionManager.ConnectionStates.Scanning) {
scanLeDevice(false);
}
Log.d(TAG,"Device selected: "+device.getName());
Utils.setPairedDevice(this, device.getName());
connectToDevice(device);
}
private void connectToDevice(CortriumC3 device){
connectionManager.connectDevice(device);
Intent intent = new Intent(mContext, CortriumC3Ecg.class);
startActivity(intent);
}
private void scanLeDevice(final boolean enable) {
if(BluetoothAdapter.getDefaultAdapter() != null && BluetoothAdapter.getDefaultAdapter().isEnabled()){
if (enable) {
Log.d(TAG, "Start scanning");
connectionManager.startScanning();
} else {
Log.d(TAG, "Stop scanning");
connectionManager.stopScanning();
}
}
invalidateOptionsMenu();
}
/**
* Adapter for holding devices found through scanning.
*/
private class LeDeviceListAdapter extends BaseAdapter {
private ArrayList<CortriumC3> mLeDevices;
private LayoutInflater mInflator;
public LeDeviceListAdapter() {
super();
mLeDevices = new ArrayList<>();
mInflator = DeviceScanActivity.this.getLayoutInflater();
}
public void addDevice(CortriumC3 device) {
if (!mLeDevices.contains(device)) {
if(paired_id.compareTo(device.getName()) == 0)
connectToDevice(device);
else{
mLeDevices.add(device);
}
}
}
public CortriumC3 getDevice(int position) {
return mLeDevices.get(position);
}
public void clear() {
mLeDevices.clear();
}
@Override
public int getCount() {
return mLeDevices.size();
}
@Override
public Object getItem(int i) {
return mLeDevices.get(i);
}
@Override
public long getItemId(int i) {
return i;
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
ViewHolder viewHolder;
// General ListView optimization code.
if (view == null) {
view = mInflator.inflate(R.layout.listitem_device, null);
viewHolder = new ViewHolder();
viewHolder.deviceAddress = (TextView) view.findViewById(R.id.device_address);
viewHolder.deviceName = (TextView) view.findViewById(R.id.device_name);
view.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) view.getTag();
}
CortriumC3 device = mLeDevices.get(i);
final String deviceName = device.getName();
if (deviceName != null && deviceName.length() > 0)
viewHolder.deviceName.setText(deviceName);
else
viewHolder.deviceName.setText(R.string.unknown_device);
viewHolder.deviceAddress.setText(device.getAddress());
return view;
}
}
static class ViewHolder {
TextView deviceName;
TextView deviceAddress;
}
}
| |
package org.concord.energy3d.simulation;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.EventQueue;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import javax.swing.*;
import org.concord.energy3d.gui.EnergyPanel;
import org.concord.energy3d.gui.PvProjectDailyEnergyGraph;
import org.concord.energy3d.model.Foundation;
import org.concord.energy3d.model.HousePart;
import org.concord.energy3d.model.Human;
import org.concord.energy3d.model.Rack;
import org.concord.energy3d.model.SolarPanel;
import org.concord.energy3d.model.Tree;
import org.concord.energy3d.scene.Scene;
import org.concord.energy3d.scene.SceneManager;
import org.concord.energy3d.shapes.Heliodon;
import org.concord.energy3d.util.BugReporter;
/**
* For fast feedback, only 12 days are calculated.
*
* @author Charles Xie
*/
public class PvAnnualAnalysis extends AnnualAnalysis {
private UtilityBill utilityBill;
static List<double[]> storedResults;
public PvAnnualAnalysis() {
super();
graph = new PartEnergyAnnualGraph();
graph.setPreferredSize(new Dimension(600, 400));
graph.setBackground(Color.WHITE);
if (storedResults == null) {
storedResults = new ArrayList<>();
}
}
@Override
void runAnalysis(final JDialog parent) {
graph.info = "Calculating...";
graph.repaint();
onStart();
final EnergyPanel e = EnergyPanel.getInstance();
for (final int m : MONTHS) {
SceneManager.getTaskManager().update(() -> {
if (!analysisStopped) {
final HousePart selectedPart = SceneManager.getInstance().getSelectedPart();
if (selectedPart instanceof Tree || selectedPart instanceof Human) { // make sure that we deselect trees or humans, which cannot be attributed to a foundation
SceneManager.getInstance().setSelectedPart(null);
}
final Calendar c = Heliodon.getInstance().getCalendar();
c.set(Calendar.MONTH, m);
final Calendar today = (Calendar) c.clone();
Scene.getInstance().updateTrackables();
final Throwable t = compute();
if (t != null) {
stopAnalysis();
EventQueue.invokeLater(() -> BugReporter.report(t));
}
if (selectedPart instanceof Foundation) { // synchronize with daily graph
final PvProjectDailyEnergyGraph g = e.getPvProjectDailyEnergyGraph();
if (g.hasGraph()) {
g.setCalendar(today);
g.updateGraph();
}
}
EventQueue.invokeLater(() -> {
e.getDateSpinner().setValue(c.getTime());
if (selectedPart instanceof Foundation) {
final PvProjectDailyEnergyGraph g = e.getPvProjectDailyEnergyGraph();
e.getPvProjectTabbedPane().setSelectedComponent(g);
if (!g.hasGraph()) {
g.setCalendar(today);
g.addGraph((Foundation) selectedPart);
}
}
});
}
return null;
});
}
SceneManager.getTaskManager().update(() -> {
EventQueue.invokeLater(() -> {
onCompletion();
if (Heliodon.getInstance().getCalendar().get(Calendar.MONTH) != Calendar.DECEMBER) {
return; // annual calculation aborted
}
runFinancialAnalysis(parent);
});
return null;
});
}
@Override
void runFinancialAnalysis(JDialog parent) {
final double annualOutput = getResult("Solar");
final PvFinancialModel fm = Scene.getInstance().getPvFinancialModel();
final int lifespan = fm.getLifespan();
final double roi = fm.calculateROI(Scene.getInstance().getTotalFoundationAreas(), Scene.getInstance().countSolarPanels(), annualOutput);
double paybackPeriod = roi > -100 ? 100.0 / (roi + 100.0) * lifespan : Double.POSITIVE_INFINITY;
reportResults(storedResults, annualOutput, lifespan, roi, paybackPeriod, parent);
storedResults.add(new double[]{annualOutput, lifespan, roi, paybackPeriod});
}
@Override
public void updateGraph() {
final HousePart selectedPart = SceneManager.getInstance().getSelectedPart();
if (selectedPart != null) {
if (selectedPart instanceof SolarPanel) {
graph.addData("Solar", ((SolarPanel) selectedPart).getYieldToday());
} else if (selectedPart instanceof Rack) {
graph.addData("Solar", ((Rack) selectedPart).getYieldToday());
} else if (selectedPart instanceof Foundation) {
double output = 0;
for (final HousePart p : Scene.getInstance().getParts()) {
if (p.getTopContainer() == selectedPart) {
if (p instanceof SolarPanel) {
output += ((SolarPanel) p).getYieldToday();
} else if (p instanceof Rack) {
output += ((Rack) p).getYieldToday();
}
}
}
graph.addData("Solar", output);
} else if (selectedPart.getTopContainer() != null) {
double output = 0;
for (final HousePart p : Scene.getInstance().getParts()) {
if (p.getTopContainer() == selectedPart.getTopContainer()) {
if (p instanceof SolarPanel) {
output += ((SolarPanel) p).getYieldToday();
} else if (p instanceof Rack) {
output += ((Rack) p).getYieldToday();
}
}
}
graph.addData("Solar", output);
}
} else {
double output = 0;
for (final HousePart p : Scene.getInstance().getParts()) {
if (p instanceof SolarPanel) {
output += ((SolarPanel) p).getYieldToday();
} else if (p instanceof Rack) {
output += ((Rack) p).getYieldToday();
}
}
graph.addData("Solar", output);
}
graph.repaint();
}
public void setUtilityBill(final UtilityBill utilityBill) {
if (utilityBill == null) {
return;
}
this.utilityBill = utilityBill;
final double[] bill = utilityBill.getMonthlyEnergy();
for (double v : bill) {
graph.addData("Utility", v / (365.0 / 12.0));
}
graph.repaint();
}
@Override
void onStart() {
super.onStart();
if (utilityBill != null) {
final double[] bill = utilityBill.getMonthlyEnergy();
for (double v : bill) {
graph.addData("Utility", v / (365.0 / 12.0));
}
}
}
public void show() {
final HousePart selectedPart = SceneManager.getInstance().getSelectedPart();
String s = null;
int cost = -1;
String title = "Annual Yield of All Solar Panels (" + Scene.getInstance().countSolarPanels() + " Panels)";
if (selectedPart != null) {
if (selectedPart instanceof SolarPanel) {
cost = (int) ProjectCost.getCost(selectedPart);
s = selectedPart.toString().substring(0, selectedPart.toString().indexOf(')') + 1);
title = "Annual Yield";
} else if (selectedPart instanceof Rack) {
final Rack rack = (Rack) selectedPart;
cost = (int) ProjectCost.getCost(rack);
s = selectedPart.toString().substring(0, selectedPart.toString().indexOf(')') + 1);
title = "Annual Yield (" + rack.getNumberOfSolarPanels() + " Solar Panels)";
} else if (selectedPart instanceof Foundation) {
title = "Annual Yield on Selected Foundation (" + ((Foundation) selectedPart).getNumberOfSolarPanels() + " Solar Panels)";
} else if (selectedPart.getTopContainer() != null) {
title = "Annual Yield on Selected Foundation (" + selectedPart.getTopContainer().getNumberOfSolarPanels() + " Solar Panels)";
}
}
final JDialog dialog = createDialog(s == null ? title : title + ": " + s + " (Cost: $" + cost + ")");
final JMenuBar menuBar = new JMenuBar();
dialog.setJMenuBar(menuBar);
menuBar.add(createOptionsMenu(dialog, null, true, true));
menuBar.add(createRunsMenu());
dialog.setVisible(true);
}
@Override
public String toJson() {
String s = "{\"Months\": " + getNumberOfDataPoints();
final HousePart selectedPart = SceneManager.getInstance().getSelectedPart();
if (selectedPart != null) {
if (selectedPart instanceof SolarPanel) {
s += ", \"Panel\": \"" + selectedPart.toString().substring(0, selectedPart.toString().indexOf(')') + 1) + "\"";
} else if (selectedPart instanceof Rack) {
s += ", \"Rack\": \"" + selectedPart.toString().substring(0, selectedPart.toString().indexOf(')') + 1) + "\"";
} else if (selectedPart instanceof Foundation) {
s += ", \"Foundation\": \"" + selectedPart.toString().substring(0, selectedPart.toString().indexOf(')') + 1) + "\"";
} else if (selectedPart.getTopContainer() != null) {
s += ", \"Foundation\": \"" + selectedPart.getTopContainer().toString().substring(0, selectedPart.getTopContainer().toString().indexOf(')') + 1) + "\"";
}
} else {
s += ", \"Panel\": \"All\"";
}
final String name = "Solar";
final List<Double> data = graph.getData(name);
s += ", \"" + name + "\": {";
s += "\"Monthly\": [";
if (data != null) {
for (final Double x : data) {
s += Graph.ENERGY_FORMAT.format(x) + ",";
}
s = s.substring(0, s.length() - 1);
}
s += "]\n";
s += ", \"Total\": " + Graph.ENERGY_FORMAT.format(getResult(name));
s += "}";
s += "}";
return s;
}
}
| |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
*
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.modelcompiler.builder.generator.visitor.accumulate;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import com.github.javaparser.StaticJavaParser;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.Modifier;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.AssignExpr;
import com.github.javaparser.ast.expr.ClassExpr;
import com.github.javaparser.ast.expr.EnclosedExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.FieldAccessExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.MethodReferenceExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.VariableDeclarationExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.ReturnStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import com.github.javaparser.ast.type.Type;
import org.drools.compiler.lang.descr.AccumulateDescr;
import org.drools.compiler.lang.descr.PatternDescr;
import org.drools.modelcompiler.builder.PackageModel;
import org.drools.modelcompiler.builder.generator.DeclarationSpec;
import org.drools.modelcompiler.builder.generator.DrlxParseUtil;
import org.drools.modelcompiler.builder.generator.RuleContext;
import org.drools.modelcompiler.util.StringUtil;
import org.drools.mvelcompiler.MvelCompiler;
import org.drools.mvelcompiler.ParsingResult;
import org.drools.mvelcompiler.context.MvelCompilerContext;
import static com.github.javaparser.StaticJavaParser.parseStatement;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.addCurlyBracesToBlock;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.addSemicolon;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.forceCastForName;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.rescopeNamesToNewScope;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.ACC_FUNCTION_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.BIND_AS_CALL;
import static org.drools.modelcompiler.builder.generator.visitor.accumulate.AccumulateVisitor.collectNamesInBlock;
public class AccumulateInline {
protected final RuleContext context;
protected final PackageModel packageModel;
private final String REVERSE = "reverse";
private AccumulateDescr accumulateDescr;
private PatternDescr basePattern;
private ClassOrInterfaceDeclaration accumulateInlineClass;
private ClassOrInterfaceDeclaration contextData;
private String accumulateInlineClassName;
private final List<DeclarationSpec> accumulateDeclarations = new ArrayList<>();
private final List<String> contextFieldNames = new ArrayList<>();
private Set<String> usedExternalDeclarations = new HashSet<>();
private Type singleAccumulateType;
Set<String> getUsedExternalDeclarations() {
return usedExternalDeclarations;
}
private MvelCompiler mvelCompiler;
AccumulateInline(RuleContext context,
PackageModel packageModel,
AccumulateDescr descr,
PatternDescr basePattern) {
this.context = context;
this.packageModel = packageModel;
this.accumulateDescr = descr;
this.basePattern = basePattern;
MvelCompilerContext mvelCompilerContext = new MvelCompilerContext(context.getTypeResolver());
for (DeclarationSpec ds : context.getAllDeclarations()) {
mvelCompilerContext.addDeclaration(ds.getBindingId(), ds.getDeclarationClass());
}
mvelCompiler = new MvelCompiler(mvelCompilerContext);
singleAccumulateType = null;
}
/**
* By design this legacy accumulate (with inline custome code) visitor supports only with 1-and-only binding in the accumulate code/expressions.
*/
void visitAccInlineCustomCode(MethodCallExpr accumulateDSL, Set<String> externalDeclarations, String identifier) {
initInlineAccumulateTemplate();
parseInitBlock();
Collection<String> allNamesInActionBlock = parseActionBlock(externalDeclarations);
parseReverseBlock(externalDeclarations, allNamesInActionBlock);
parseResultMethod();
if (!usedExternalDeclarations.isEmpty()) {
throw new UnsupportedInlineAccumulate();
}
for (DeclarationSpec d : accumulateDeclarations) {
context.addDeclaration(d);
}
addAccumulateClassInitializationToMethod(accumulateDSL, identifier);
}
private void initInlineAccumulateTemplate() {
accumulateInlineClassName = StringUtil.toId(context.getRuleDescr().getName()) + "Accumulate" + accumulateDescr.getLine();
CompilationUnit templateCU;
try {
templateCU = StaticJavaParser.parseResource("AccumulateInlineTemplate.java");
} catch (IOException e) {
throw new InvalidInlineTemplateException(e);
}
ClassOrInterfaceDeclaration parsedClass =
templateCU
.getClassByName("AccumulateInlineFunction")
.orElseThrow(InvalidInlineTemplateException::new);
parsedClass.setName(accumulateInlineClassName);
parsedClass.findAll(ClassOrInterfaceType.class, c -> "CONTEXT_DATA_GENERIC".equals(c.asString()))
.forEach(c -> c.setName(accumulateInlineClassName + ".ContextData"));
this.accumulateInlineClass = parsedClass;
contextData = this.accumulateInlineClass.findFirst(ClassOrInterfaceDeclaration.class
, c -> "ContextData".equals(c.getNameAsString()))
.orElseThrow(InvalidInlineTemplateException::new);
}
private void parseInitBlock() {
MethodDeclaration initMethod = getMethodFromTemplateClass("init");
String mvelBlock = addCurlyBracesToBlock(addSemicolon(accumulateDescr.getInitCode()));
ParsingResult initCodeCompilationResult = mvelCompiler.compile(mvelBlock);
BlockStmt initBlock = initCodeCompilationResult.statementResults();
for (Statement stmt : initBlock.getStatements()) {
final BlockStmt initMethodBody = initMethod.getBody().orElseThrow(InvalidInlineTemplateException::new);
if (stmt.isExpressionStmt() && stmt.asExpressionStmt().getExpression().isVariableDeclarationExpr()) {
VariableDeclarationExpr vdExpr = stmt.asExpressionStmt().getExpression().asVariableDeclarationExpr();
for (VariableDeclarator vd : vdExpr.getVariables()) {
final String variableName = vd.getNameAsString();
contextFieldNames.add(variableName);
contextData.addField(vd.getType(), variableName, Modifier.publicModifier().getKeyword());
Optional<Expression> optInitializer = vd.getInitializer();
optInitializer.ifPresent(initializer -> {
Expression target = new FieldAccessExpr(getDataNameExpr(), variableName);
Statement initStmt = new ExpressionStmt(new AssignExpr(target, initializer, AssignExpr.Operator.ASSIGN));
initMethodBody.addStatement(initStmt);
initStmt.findAll(NameExpr.class).stream().map(Node::toString).filter(context::hasDeclaration).forEach(usedExternalDeclarations::add);
});
accumulateDeclarations.add(new DeclarationSpec(variableName, DrlxParseUtil.getClassFromContext(context.getTypeResolver(), vd.getType().asString())));
}
}
}
}
private void writeAccumulateMethod(List<String> contextFieldNames, MethodDeclaration accumulateMethod, BlockStmt actionBlock) {
for (Statement stmt : actionBlock.getStatements()) {
final ExpressionStmt convertedExpressionStatement = new ExpressionStmt();
for (ExpressionStmt eStmt : stmt.findAll(ExpressionStmt.class)) {
final Expression expressionUntyped = eStmt.getExpression();
final String parameterName = accumulateMethod.getParameter(1).getNameAsString();
forceCastForName(parameterName, singleAccumulateType, expressionUntyped);
rescopeNamesToNewScope(getDataNameExpr(), contextFieldNames, expressionUntyped);
convertedExpressionStatement.setExpression(expressionUntyped);
}
accumulateMethod.getBody().orElseThrow(InvalidInlineTemplateException::new)
.addStatement(convertedExpressionStatement);
}
}
private Collection<String> parseActionBlock(Set<String> externalDeclarations) {
MethodDeclaration accumulateMethod = getMethodFromTemplateClass("accumulate");
String actionCode = accumulateDescr.getActionCode();
if(blockIsNonEmptyWithoutSemicolon(actionCode)) {
throw new MissingSemicolonInlineAccumulateException("action");
}
ParsingResult actionBlockCompilationResult = mvelCompiler.compile(addCurlyBracesToBlock(actionCode));
BlockStmt actionBlock = actionBlockCompilationResult.statementResults();
Collection<String> allNamesInActionBlock = collectNamesInBlock(actionBlock, context);
if (allNamesInActionBlock.size() == 1) {
String nameExpr = allNamesInActionBlock.iterator().next();
accumulateMethod.getParameter(1).setName(nameExpr);
singleAccumulateType =
context.getDeclarationById(nameExpr)
.orElseThrow(() -> new IllegalStateException("Cannot find declaration by name " + nameExpr + "!"))
.getBoxedType();
writeAccumulateMethod(contextFieldNames, accumulateMethod, actionBlock);
} else {
allNamesInActionBlock.removeIf(name -> !externalDeclarations.contains(name));
usedExternalDeclarations.addAll(allNamesInActionBlock);
throw new UnsupportedInlineAccumulate();
}
return allNamesInActionBlock;
}
private void parseReverseBlock(Set<String> externalDeclarations, Collection<String> allNamesInActionBlock) {
String reverseCode = accumulateDescr.getReverseCode();
ParsingResult reverseBlockCompilationResult = mvelCompiler.compile(addCurlyBracesToBlock(reverseCode));
BlockStmt reverseBlock = reverseBlockCompilationResult.statementResults();
if (reverseCode != null) {
if(blockIsNonEmptyWithoutSemicolon(reverseCode)) {
throw new MissingSemicolonInlineAccumulateException(REVERSE);
}
Collection<String> allNamesInReverseBlock = collectNamesInBlock(reverseBlock, context);
if (allNamesInReverseBlock.size() == 1) {
MethodDeclaration reverseMethod = getMethodFromTemplateClass(REVERSE);
reverseMethod.getParameter(1).setName(allNamesInReverseBlock.iterator().next());
writeAccumulateMethod(contextFieldNames, reverseMethod, reverseBlock);
MethodDeclaration supportsReverseMethod = getMethodFromTemplateClass("supportsReverse");
supportsReverseMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(parseStatement("return true;"));
} else {
allNamesInActionBlock.removeIf(name -> !externalDeclarations.contains(name));
usedExternalDeclarations.addAll(allNamesInActionBlock);
throw new UnsupportedInlineAccumulate();
}
} else {
MethodDeclaration supportsReverseMethod = getMethodFromTemplateClass("supportsReverse");
supportsReverseMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(parseStatement("return false;"));
MethodDeclaration reverseMethod = getMethodFromTemplateClass(REVERSE);
reverseMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(parseStatement("throw new UnsupportedOperationException(\"This function does not support reverse.\");"));
}
}
private void parseResultMethod() {
// <result expression>: this is a semantic expression in the selected dialect that is executed after all source objects are iterated.
MethodDeclaration resultMethod = getMethodFromTemplateClass("getResult");
Type returnExpressionType = StaticJavaParser.parseType("java.lang.Object");
Expression returnExpression = StaticJavaParser.parseExpression(accumulateDescr.getResultCode());
if (returnExpression instanceof NameExpr) {
returnExpression = new EnclosedExpr(returnExpression);
}
rescopeNamesToNewScope(getDataNameExpr(), contextFieldNames, returnExpression);
resultMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(new ReturnStmt(returnExpression));
MethodDeclaration getResultTypeMethod = getMethodFromTemplateClass("getResultType");
getResultTypeMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(new ReturnStmt(new ClassExpr(returnExpressionType)));
}
private void addAccumulateClassInitializationToMethod(MethodCallExpr accumulateDSL, String identifier) {
this.packageModel.addGeneratedPOJO(accumulateInlineClass);
final MethodCallExpr functionDSL = new MethodCallExpr(null, ACC_FUNCTION_CALL);
functionDSL.addArgument(new MethodReferenceExpr(new NameExpr(accumulateInlineClassName), new NodeList<>(), "new"));
functionDSL.addArgument(context.getVarExpr(identifier));
final String bindingId = this.basePattern.getIdentifier();
final MethodCallExpr asDSL = new MethodCallExpr(functionDSL, BIND_AS_CALL);
asDSL.addArgument(context.getVarExpr(bindingId));
accumulateDSL.addArgument(asDSL);
}
private NameExpr getDataNameExpr() {
return new NameExpr("data");
}
private MethodDeclaration getMethodFromTemplateClass(String init) {
return accumulateInlineClass.getMethodsByName(init).get(0);
}
private boolean blockIsNonEmptyWithoutSemicolon(String block) {
return !"".equals(block) && !block.endsWith(";");
}
}
| |
package com.wordpress.tipsforjava.swing;
import java.awt.*;
import java.awt.event.*;
import java.beans.*;
import java.util.HashMap;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.event.*;
import javax.swing.text.*;
/**
* This class will display line numbers for a related text component. The text
* component must use the same line height for each line. TextLineNumber
* supports wrapped lines and will highlight the line number of the current
* line in the text component.
*
* This class was designed to be used as a component added to the row header
* of a JScrollPane.
*
* @author Rob Camick
* @author Darryl Burke
*/
public class TextLineNumber extends JPanel
implements CaretListener, DocumentListener, PropertyChangeListener
{
public final static float LEFT = 0.0f;
public final static float CENTER = 0.5f;
public final static float RIGHT = 1.0f;
private final static Border OUTER = new MatteBorder(0, 0, 0, 2, Color.GRAY);
private final static int HEIGHT = Integer.MAX_VALUE - 1000000;
// Text component this TextTextLineNumber component is in sync with
private JTextComponent component;
// Properties that can be changed
private boolean updateFont;
private int borderGap;
private Color currentLineForeground;
private float digitAlignment;
private int minimumDisplayDigits;
// Keep history information to reduce the number of times the component
// needs to be repainted
private int lastDigits;
private int lastHeight;
private int lastLine;
private HashMap<String, FontMetrics> fonts;
/**
* Create a line number component for a text component. This minimum
* display width will be based on 3 digits.
*
* @param component the related text component
*/
public TextLineNumber(JTextComponent component)
{
this(component, 3);
}
/**
* Create a line number component for a text component.
*
* @param component the related text component
* @param minimumDisplayDigits the number of digits used to calculate
* the minimum width of the component
*/
public TextLineNumber(JTextComponent component, int minimumDisplayDigits)
{
this.component = component;
setFont( component.getFont() );
setBorderGap( 5 );
setCurrentLineForeground( Color.RED );
setDigitAlignment( RIGHT );
setMinimumDisplayDigits( minimumDisplayDigits );
component.getDocument().addDocumentListener(this);
component.addCaretListener( this );
component.addPropertyChangeListener("font", this);
}
/**
* Gets the update font property
*
* @return the update font property
*/
public boolean getUpdateFont()
{
return updateFont;
}
/**
* Set the update font property. Indicates whether this Font should be
* updated automatically when the Font of the related text component
* is changed.
*
* @param updateFont when true update the Font and repaint the line
* numbers, otherwise just repaint the line numbers.
*/
public void setUpdateFont(boolean updateFont)
{
this.updateFont = updateFont;
}
/**
* Gets the border gap
*
* @return the border gap in pixels
*/
public int getBorderGap()
{
return borderGap;
}
/**
* The border gap is used in calculating the left and right insets of the
* border. Default value is 5.
*
* @param borderGap the gap in pixels
*/
public void setBorderGap(int borderGap)
{
this.borderGap = borderGap;
Border inner = new EmptyBorder(0, borderGap, 0, borderGap);
setBorder( new CompoundBorder(OUTER, inner) );
lastDigits = 0;
setPreferredWidth();
}
/**
* Gets the current line rendering Color
*
* @return the Color used to render the current line number
*/
public Color getCurrentLineForeground()
{
return currentLineForeground == null ? getForeground() : currentLineForeground;
}
/**
* The Color used to render the current line digits. Default is Coolor.RED.
*
* @param currentLineForeground the Color used to render the current line
*/
public void setCurrentLineForeground(Color currentLineForeground)
{
this.currentLineForeground = currentLineForeground;
}
/**
* Gets the digit alignment
*
* @return the alignment of the painted digits
*/
public float getDigitAlignment()
{
return digitAlignment;
}
/**
* Specify the horizontal alignment of the digits within the component.
* Common values would be:
* <ul>
* <li>TextLineNumber.LEFT
* <li>TextLineNumber.CENTER
* <li>TextLineNumber.RIGHT (default)
* </ul>
* @param currentLineForeground the Color used to render the current line
*/
public void setDigitAlignment(float digitAlignment)
{
this.digitAlignment =
digitAlignment > 1.0f ? 1.0f : digitAlignment < 0.0f ? -1.0f : digitAlignment;
}
/**
* Gets the minimum display digits
*
* @return the minimum display digits
*/
public int getMinimumDisplayDigits()
{
return minimumDisplayDigits;
}
/**
* Specify the mimimum number of digits used to calculate the preferred
* width of the component. Default is 3.
*
* @param minimumDisplayDigits the number digits used in the preferred
* width calculation
*/
public void setMinimumDisplayDigits(int minimumDisplayDigits)
{
this.minimumDisplayDigits = minimumDisplayDigits;
setPreferredWidth();
}
/**
* Calculate the width needed to display the maximum line number
*/
private void setPreferredWidth()
{
Element root = component.getDocument().getDefaultRootElement();
int lines = root.getElementCount();
int digits = Math.max(String.valueOf(lines).length(), minimumDisplayDigits);
// Update sizes when number of digits in the line number changes
if (lastDigits != digits)
{
lastDigits = digits;
FontMetrics fontMetrics = getFontMetrics( getFont() );
int width = fontMetrics.charWidth( '0' ) * digits;
Insets insets = getInsets();
int preferredWidth = insets.left + insets.right + width;
Dimension d = getPreferredSize();
d.setSize(preferredWidth, HEIGHT);
setPreferredSize( d );
setSize( d );
}
}
/**
* Draw the line numbers
*/
@Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
// Determine the width of the space available to draw the line number
FontMetrics fontMetrics = component.getFontMetrics( component.getFont() );
Insets insets = getInsets();
int availableWidth = getSize().width - insets.left - insets.right;
// Determine the rows to draw within the clipped bounds.
Rectangle clip = g.getClipBounds();
int rowStartOffset = component.viewToModel( new Point(0, clip.y) );
int endOffset = component.viewToModel( new Point(0, clip.y + clip.height) );
while (rowStartOffset <= endOffset)
{
try
{
if (isCurrentLine(rowStartOffset))
g.setColor( getCurrentLineForeground() );
else
g.setColor( getForeground() );
// Get the line number as a string and then determine the
// "X" and "Y" offsets for drawing the string.
String lineNumber = getTextLineNumber(rowStartOffset);
int stringWidth = fontMetrics.stringWidth( lineNumber );
int x = getOffsetX(availableWidth, stringWidth) + insets.left;
int y = getOffsetY(rowStartOffset, fontMetrics);
g.drawString(lineNumber, x, y);
// Move to the next row
rowStartOffset = Utilities.getRowEnd(component, rowStartOffset) + 1;
}
catch(Exception e) {}
}
}
/*
* We need to know if the caret is currently positioned on the line we
* are about to paint so the line number can be highlighted.
*/
private boolean isCurrentLine(int rowStartOffset)
{
int caretPosition = component.getCaretPosition();
Element root = component.getDocument().getDefaultRootElement();
if (root.getElementIndex( rowStartOffset ) == root.getElementIndex(caretPosition))
return true;
else
return false;
}
/*
* Get the line number to be drawn. The empty string will be returned
* when a line of text has wrapped.
*/
protected String getTextLineNumber(int rowStartOffset)
{
Element root = component.getDocument().getDefaultRootElement();
int index = root.getElementIndex( rowStartOffset );
Element line = root.getElement( index );
if (line.getStartOffset() == rowStartOffset)
return String.valueOf(index + 1);
else
return "";
}
/*
* Determine the X offset to properly align the line number when drawn
*/
private int getOffsetX(int availableWidth, int stringWidth)
{
return (int)((availableWidth - stringWidth) * digitAlignment);
}
/*
* Determine the Y offset for the current row
*/
private int getOffsetY(int rowStartOffset, FontMetrics fontMetrics)
throws BadLocationException
{
// Get the bounding rectangle of the row
Rectangle r = component.modelToView( rowStartOffset );
int lineHeight = fontMetrics.getHeight();
int y = r.y + r.height;
int descent = 0;
// The text needs to be positioned above the bottom of the bounding
// rectangle based on the descent of the font(s) contained on the row.
if (r.height == lineHeight) // default font is being used
{
descent = fontMetrics.getDescent();
}
else // We need to check all the attributes for font changes
{
if (fonts == null)
fonts = new HashMap<String, FontMetrics>();
Element root = component.getDocument().getDefaultRootElement();
int index = root.getElementIndex( rowStartOffset );
Element line = root.getElement( index );
for (int i = 0; i < line.getElementCount(); i++)
{
Element child = line.getElement(i);
AttributeSet as = child.getAttributes();
String fontFamily = (String)as.getAttribute(StyleConstants.FontFamily);
Integer fontSize = (Integer)as.getAttribute(StyleConstants.FontSize);
String key = fontFamily + fontSize;
FontMetrics fm = fonts.get( key );
if (fm == null)
{
Font font = new Font(fontFamily, Font.PLAIN, fontSize);
fm = component.getFontMetrics( font );
fonts.put(key, fm);
}
descent = Math.max(descent, fm.getDescent());
}
}
return y - descent;
}
//
// Implement CaretListener interface
//
@Override
public void caretUpdate(CaretEvent e)
{
// Get the line the caret is positioned on
int caretPosition = component.getCaretPosition();
Element root = component.getDocument().getDefaultRootElement();
int currentLine = root.getElementIndex( caretPosition );
// Need to repaint so the correct line number can be highlighted
if (lastLine != currentLine)
{
repaint();
lastLine = currentLine;
}
}
//
// Implement DocumentListener interface
//
@Override
public void changedUpdate(DocumentEvent e)
{
documentChanged();
}
@Override
public void insertUpdate(DocumentEvent e)
{
documentChanged();
}
@Override
public void removeUpdate(DocumentEvent e)
{
documentChanged();
}
/*
* A document change may affect the number of displayed lines of text.
* Therefore the lines numbers will also change.
*/
private void documentChanged()
{
// Preferred size of the component has not been updated at the time
// the DocumentEvent is fired
SwingUtilities.invokeLater(new Runnable()
{
public void run()
{
int preferredHeight = component.getPreferredSize().height;
// Document change has caused a change in the number of lines.
// Repaint to reflect the new line numbers
if (lastHeight != preferredHeight)
{
setPreferredWidth();
repaint();
lastHeight = preferredHeight;
}
}
});
}
//
// Implement PropertyChangeListener interface
//
@Override
public void propertyChange(PropertyChangeEvent evt)
{
if (evt.getNewValue() instanceof Font)
{
if (updateFont)
{
Font newFont = (Font) evt.getNewValue();
setFont(newFont);
lastDigits = 0;
setPreferredWidth();
}
else
{
repaint();
}
}
}
}
| |
/*
* Copyright (C) 2012 United States Government as represented by the Administrator of the
* National Aeronautics and Space Administration.
* All Rights Reserved.
*/
package gov.nasa.worldwind.ogc.kml.impl;
import gov.nasa.worldwind.avlist.*;
import gov.nasa.worldwind.ogc.kml.*;
import gov.nasa.worldwind.render.*;
import gov.nasa.worldwind.util.*;
import gov.nasa.worldwind.util.webview.WebResourceResolver;
import java.awt.*;
import java.beans.*;
import java.io.*;
import java.net.URL;
import java.util.*;
import java.util.List;
import java.util.logging.Level;
import java.util.regex.*;
/**
* An implementation of {@link Balloon} that applies a {@link KMLBalloonStyle} to the balloon. Rather than fully
* implementing the Balloon interface, this class provides a thin wrapper around another Balloon implementation and adds
* the logic for styling the Balloon according to the KML style. All Balloon methods on this class pass through to the
* contained Balloon.
* <p/>
* To use KML Balloon, first create a Balloon of the desired type, and then create the KML Balloon. For example:
* <p/>
* <code>
* <pre>
* KMLPlacemark myPlacemark = ...;
* Position placemarkPosition = ...;
*
* // Create a BrowserBalloon for the placemark.
* GlobeBalloon globeBalloon = new GlobeBrowserBalloon(myPlacemark.getName(), placemarkPosition);
*
* // Create a KML Balloon to apply the placemark's KML BalloonStyle to the browser balloon.
* KMLGlobeBalloonImpl kmlBalloon = new KMLGlobeBalloonImpl(globeBalloon, myPlacemark);
* </pre>
* </code>
*
* @author pabercrombie
* @version $Id: KMLAbstractBalloon.java 1555 2013-08-20 13:33:12Z pabercrombie $
*/
public abstract class KMLAbstractBalloon implements Balloon, WebResourceResolver, PropertyChangeListener
{
public static final String DISPLAY_MODE_HIDE = "hide";
public static final String DISPLAY_MODE_DEFAULT = "default";
protected KMLAbstractFeature parent;
protected String displayMode = DISPLAY_MODE_DEFAULT;
/** Indicates that the balloon has default text loaded, rather than text supplied by the BalloonStyle. */
protected boolean usingDefaultText;
protected boolean normalAttributesResolved;
protected boolean highlightAttributesResolved;
/** Text when balloon is not highlighted. */
protected String normalText;
/** Text when balloon is highlighted. */
protected String highlightText;
/**
* Create a globe attached Balloon Impl object for a KML feature.
*
* @param feature Feature to create balloon annotation for.
*/
public KMLAbstractBalloon(KMLAbstractFeature feature)
{
if (feature == null)
{
String msg = Logging.getMessage("nullValue.FeatureIsNull");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
this.parent = feature;
}
/**
* Initialize the object.
*
* @param balloon The balloon contained in this wrapper object.
*/
protected void initialize(Balloon balloon)
{
balloon.setTextDecoder(this.createTextDecoder(this.parent));
balloon.setValue(AVKey.CONTEXT, this.parent);
// Configure this balloon to resolve relative paths in the KML balloon HTML via its resolve() method.
if (balloon instanceof AbstractBrowserBalloon)
{
((AbstractBrowserBalloon) balloon).setResourceResolver(this);
}
// Listen for balloon property changes. These will be forwarded to the parent KMLRoot so that the balloon
// can trigger a repaint when its contents have changed.
balloon.addPropertyChangeListener(this);
}
/**
* Get the Balloon object that is contained in the KMLBalloon object.
*
* @return The balloon contained by this object.
*/
protected abstract Balloon getBalloon();
/**
* Render the balloon. This method will attempt to resolve the balloon style, if it has not already been resolved.
*
* @param dc Draw context
*/
public void render(DrawContext dc)
{
Balloon balloon = this.getBalloon();
if (balloon.isHighlighted() && !this.highlightAttributesResolved)
{
this.makeAttributesCurrent(KMLConstants.HIGHLIGHT);
}
else if (!this.normalAttributesResolved)
{
this.makeAttributesCurrent(KMLConstants.NORMAL);
}
this.determineActiveText();
if (!WWUtil.isEmpty(this.getText()) && !DISPLAY_MODE_HIDE.equals(this.getDisplayMode()))
balloon.render(dc);
}
/** Determine the balloon text for this frame, depending on the balloon highlight state. */
protected void determineActiveText()
{
String activeText = null;
// If the balloon is highlighted, use the highlight text.
if (this.isHighlighted())
{
activeText = this.highlightText;
}
// If the balloon is not highlighted, or there is no highlight text, use the normal text.
if (activeText == null)
{
activeText = this.normalText;
}
// Set the text if it does not match the active text.
if (activeText != null && !activeText.equals(this.getText()))
{
this.setText(activeText);
}
}
/**
* Update the balloon attributes to match the KML BalloonStyle.
*
* @param attrType Type of attributes to update. Either {@link KMLConstants#NORMAL} or {@link
* KMLConstants#HIGHLIGHT}.
*/
protected void makeAttributesCurrent(String attrType)
{
BalloonAttributes attrs = this.getInitialBalloonAttributes();
KMLBalloonStyle balloonStyle = (KMLBalloonStyle) this.parent.getSubStyle(new KMLBalloonStyle(null), attrType);
String displayMode = balloonStyle.getDisplayMode();
if (displayMode != null)
this.setDisplayMode(displayMode);
this.assembleBalloonAttributes(balloonStyle, attrs);
if (balloonStyle.hasField(AVKey.UNRESOLVED))
attrs.setUnresolved(true);
else
attrs.setUnresolved(false);
if (KMLConstants.NORMAL.equals(attrType))
{
this.getBalloon().setAttributes(attrs);
// Set balloon text. If the style does not provide text, set the default text, if it has not been set
// already. We use a field to track if the default text has been set to avoid continually resetting default
// text if the style cannot be resolved.
String text = balloonStyle.getText();
if (text != null)
{
if (this.mustAddHyperlinks(text))
text = this.addHyperlinks(text);
this.getBalloon().setText(text);
this.normalText = text;
}
else if (!this.usingDefaultText)
{
text = this.createDefaultBalloonText();
if (this.mustAddHyperlinks(text))
text = this.addHyperlinks(text);
this.getBalloon().setText(text);
this.usingDefaultText = true;
this.normalText = text;
}
if (!attrs.isUnresolved() || !balloonStyle.hasFields())
this.normalAttributesResolved = true;
}
else
{
this.getBalloon().setHighlightAttributes(attrs);
String text = balloonStyle.getText();
if (this.mustAddHyperlinks(text))
text = this.addHyperlinks(text);
this.highlightText = text;
if (!attrs.isUnresolved() || !balloonStyle.hasFields())
this.highlightAttributesResolved = true;
}
}
/**
* Build a default balloon text string for the feature.
*
* @return Default balloon text.
*/
protected String createDefaultBalloonText()
{
StringBuilder sb = new StringBuilder();
// Create default text for features that have a description
String name = this.parent.getName();
String description = this.parent.getDescription();
if (!WWUtil.isEmpty(name))
sb.append("<b>").append(name).append("</b>");
if (!WWUtil.isEmpty(description))
sb.append("<br/>").append(description);
KMLExtendedData extendedData = this.parent.getExtendedData();
if (extendedData != null)
{
List<KMLData> data = extendedData.getData();
if (data != null && !data.isEmpty())
{
this.createDefaultExtendedDataText(sb, data);
}
List<KMLSchemaData> schemaData = extendedData.getSchemaData();
if (schemaData != null && !schemaData.isEmpty())
{
this.createDefaultSchemaDataText(sb, schemaData);
}
}
return sb.toString();
}
/**
* Build a default balloon text string for the feature's extended data. This implementation builds a simple data
* table.
*
* @param sb Extended data string will be appended to this StringBuilder.
* @param data The feature's extended data.
*/
protected void createDefaultExtendedDataText(StringBuilder sb, List<KMLData> data)
{
sb.append("<p/><table border=\"1\">");
for (KMLData item : data)
{
String value = item.getValue();
if (!WWUtil.isEmpty(value))
{
String name = item.getName() != null ? item.getName() : "";
sb.append("<tr><td>$[").append(name).append("/displayName]</td><td>").append(value).append(
"</td></tr>");
}
}
sb.append("</table>");
}
/**
* Build a default balloon text string for the feature's schema data. This implementation builds a simple data
* table.
*
* @param sb Extended data string will be appended to this StringBuilder.
* @param data The feature's schema data.
*/
protected void createDefaultSchemaDataText(StringBuilder sb, List<KMLSchemaData> data)
{
sb.append("<p/><table border=\"1\">");
for (KMLSchemaData schemaData : data)
{
KMLSchema schema = (KMLSchema) this.parent.getRoot().resolveReference(schemaData.getSchemaUrl());
for (KMLSimpleData simpleData : schemaData.getSimpleData())
{
String value = simpleData.getCharacters();
if (!WWUtil.isEmpty(value))
{
String dataName = simpleData.getName() != null ? simpleData.getName() : "";
sb.append("<tr><td>");
// Insert the schema name, if the schema can be resolved. Otherwise just use the data name.
if (schema != null && !WWUtil.isEmpty(schema.getName()) && !WWUtil.isEmpty(dataName))
{
sb.append("$[").append(schema.getName()).append("/").append(dataName).append("/displayName]");
}
else
{
sb.append(dataName);
}
sb.append("</td><td>").append(value).append("</td><td>");
}
}
}
sb.append("</table>");
}
/**
* Determines if URLs in the balloon text should be converted to hyperlinks. The Google KML specification states the
* GE will add hyperlinks to balloon text that does not contain HTML formatting. This method searches for a
* <html> tag in the content to determine if the content is HTML or plain text.
*
* @param text Balloon text to process.
*
* @return True if URLs should be converted links. Returns true if a <html> tag is found in the text.
*/
protected boolean mustAddHyperlinks(String text)
{
return text != null
&& !text.contains("<html")
&& !text.contains("<HTML");
}
/**
* Add hyperlink tags to URLs in the balloon text. The text may include some simple HTML markup. This method
* attempts to identify URLs in the text while not altering URLs that are already linked.
* <p/>
* This method is conservative about what is identified as a URL, in order to avoid adding links to text that the
* user did not intend to be linked. Only HTTP and HTTPS URLs are recognised, as well as text that begins with www.
* (in which case a http:// prefix will be prepended). Some punctuation characters that are valid URL characters
* (such as parentheses) are not treated as URL characters here because users may expect the punctuation to separate
* the URL from text.
*
* @param text Text to process. Each URL in the text will be replaced with <a href="url" target="_blank"> url
* </a>
*
* @return Text with hyperlinks added.
*/
protected String addHyperlinks(String text)
{
// Regular expression to match a http(s) URL, or an entire anchor tag. Note that this does not match all valid
// URLs. It is designed to match obvious URLs that occur in KML balloons, with minimal chance of matching text
// the user did not intend to be a link.
String regex =
"<a\\s.*?</a>" // Match all text between anchor tags
+ "|" // or
+ "[^'\"]" // Non-quote (avoids matching quoted urls in code)
+ "(" // Capture group 1
+ "(?:https?://|www\\.)" // HTTP(S) protocol or www. (non-capturing group)
+ "[a-z0-9.$%&#+/_-]+" // Match until a non-URL character
+ ")";
StringBuffer sb = new StringBuffer();
Matcher matcher = Pattern.compile(regex, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(text);
while (matcher.find())
{
// If the match is a URL then group 1 holds the matched URL. If group 1 is null then the match is an anchor
// tag, in which case we just skip it to avoid adding links to text that is already part of a link.
String url = matcher.group(1);
if (url != null)
{
String prefix = url.toLowerCase().startsWith("www") ? "http://" : "";
matcher.appendReplacement(sb, "<a href=\"" + prefix + "$1\" target=\"_blank\">$1</a>");
}
}
matcher.appendTail(sb);
return sb.toString();
}
/**
* Get the default attributes applied to the balloon. These attributes will be modified by {@link
* #assembleBalloonAttributes(gov.nasa.worldwind.ogc.kml.KMLBalloonStyle, gov.nasa.worldwind.render.BalloonAttributes)
* assembleBalloonAttributes} to reflect the settings in the KML <i>BalloonStyle</i>.
*
* @return Initial balloon attributes.
*/
protected BalloonAttributes getInitialBalloonAttributes()
{
BalloonAttributes attrs;
if (this.isHighlighted())
{
attrs = this.getHighlightAttributes();
// Copy the normal attributes if there are no highlight attributes
if (attrs == null && this.getAttributes() != null)
{
attrs = new BasicBalloonAttributes(this.getAttributes());
}
}
else
{
attrs = this.getAttributes();
}
if (attrs == null)
attrs = new BasicBalloonAttributes();
return attrs;
}
/**
* Apply a KML <i>BalloonStyle</i> to the balloon attributes object.
*
* @param style KML style to apply.
* @param balloonAttributes Attributes to modify.
*/
protected void assembleBalloonAttributes(KMLBalloonStyle style, BalloonAttributes balloonAttributes)
{
// Attempt to use the bgColor property. This is the preferred method for encoding a BalloonStyle's background
// color since KML 2.1, therefore we give it priority.
String bgColor = style.getBgColor();
// If the bgColor property is null, attempt to use the deprecated color property. color was deprecated in
// KML 2.1, but must be supported for backward compatibility. See the KML 2.1 reference, section 7.1.3.
if (bgColor == null)
bgColor = style.getColor();
if (bgColor != null)
balloonAttributes.setInteriorMaterial(new Material(WWUtil.decodeColorABGR(bgColor)));
String textColor = style.getTextColor();
if (textColor != null)
balloonAttributes.setTextColor(WWUtil.decodeColorABGR(textColor));
}
/**
* Create the text decoder that will process the text in the balloon.
*
* @param feature Feature to decode text for.
*
* @return New text decoder.
*/
protected TextDecoder createTextDecoder(KMLAbstractFeature feature)
{
return new KMLBalloonTextDecoder(feature);
}
/**
* Get the balloon display mode, either {@link #DISPLAY_MODE_DEFAULT} or {@link #DISPLAY_MODE_HIDE}.
*
* @return The current display mode.
*
* @see #setDisplayMode(String)
*/
public String getDisplayMode()
{
return this.displayMode;
}
/**
* Set the balloon's display mode, either {@link #DISPLAY_MODE_DEFAULT} or {@link #DISPLAY_MODE_HIDE}. When the mode
* is {@link #DISPLAY_MODE_HIDE}, the balloon will not be drawn.
*
* @param displayMode New display mode.
*
* @see #getDisplayMode()
*/
public void setDisplayMode(String displayMode)
{
if (displayMode == null)
{
String msg = Logging.getMessage("nullValue.StringIsNull");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
this.displayMode = displayMode;
}
/**
* {@inheritDoc}
* <p/>
* This implementation resolves relative resource paths by calling <code>{@link
* gov.nasa.worldwind.ogc.kml.io.KMLDoc#getSupportFilePath(String)}</code> on the parent
* <code>KMLAbstractFeature's</code> <code>KMLDoc</code>. This is necessary to correctly resolve relative references
* in a KMZ archive.
* <p/>
* This returns <code>null</code> if the specified <code>address</code> is <code>null</code>.
*/
public URL resolve(String address)
{
if (address == null)
return null;
try
{
// Resolve the relative path against the KMLDoc, and convert it to a URL. We use makeURL variant that
// accepts a default protocol, because we know the path is an absolute file path. If the path does not
// define a valid URL, makeURL returns null and the balloon treats this as an unresolved resource.
String absolutePath = this.parent.getRoot().getKMLDoc().getSupportFilePath(address);
if (!WWUtil.isEmpty(absolutePath))
{
File file = new File(absolutePath);
return file.toURI().toURL();
}
}
catch (IOException e)
{
Logging.logger().log(Level.WARNING, Logging.getMessage("KML.UnableToResolvePath", address), e.getMessage());
}
return null;
}
/**
* Forward property change events to the parent KMLRoot.
*
* @param evt Event to forward.
*/
public void propertyChange(PropertyChangeEvent evt)
{
this.parent.getRoot().firePropertyChange(evt);
}
//***************************************************************************//
//********************** Balloon implementation ***************************//
//**************************************************************************//
/** {@inheritDoc}. This method passes through to the contained balloon. */
public boolean isHighlighted()
{
return this.getBalloon().isHighlighted();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setHighlighted(boolean highlighted)
{
this.getBalloon().setHighlighted(highlighted);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public boolean isAlwaysOnTop()
{
return this.getBalloon().isAlwaysOnTop();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setAlwaysOnTop(boolean alwaysOnTop)
{
this.getBalloon().setAlwaysOnTop(alwaysOnTop);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public boolean isPickEnabled()
{
return this.getBalloon().isPickEnabled();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setPickEnabled(boolean enable)
{
this.getBalloon().setPickEnabled(enable);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public String getText()
{
return this.getBalloon().getText();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setText(String text)
{
this.getBalloon().setText(text);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public BalloonAttributes getAttributes()
{
return this.getBalloon().getAttributes();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setAttributes(BalloonAttributes attrs)
{
this.getBalloon().setAttributes(attrs);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public BalloonAttributes getHighlightAttributes()
{
return this.getBalloon().getHighlightAttributes();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setHighlightAttributes(BalloonAttributes attrs)
{
this.getBalloon().setHighlightAttributes(attrs);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public TextDecoder getTextDecoder()
{
return this.getBalloon().getTextDecoder();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setTextDecoder(TextDecoder decoder)
{
this.getBalloon().setTextDecoder(decoder);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Object getDelegateOwner()
{
return this.getBalloon().getDelegateOwner();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setDelegateOwner(Object owner)
{
this.getBalloon().setDelegateOwner(owner);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public boolean isVisible()
{
return this.getBalloon().isVisible();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setVisible(boolean visible)
{
this.getBalloon().setVisible(visible);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Rectangle getBounds(DrawContext dc)
{
return this.getBalloon().getBounds(dc);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public double getMinActiveAltitude()
{
return this.getBalloon().getMinActiveAltitude();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setMinActiveAltitude(double minActiveAltitude)
{
this.getBalloon().setMinActiveAltitude(minActiveAltitude);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public double getMaxActiveAltitude()
{
return this.getBalloon().getMaxActiveAltitude();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void setMaxActiveAltitude(double maxActiveAltitude)
{
this.getBalloon().setMaxActiveAltitude(maxActiveAltitude);
}
//***************************************************************************//
//********************** AVList implementation ***************************//
//**************************************************************************//
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Object setValue(String key, Object value)
{
return this.getBalloon().setValue(key, value);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public AVList setValues(AVList avList)
{
return this.getBalloon().setValues(avList);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Object getValue(String key)
{
return this.getBalloon().getValue(key);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Collection<Object> getValues()
{
return this.getBalloon().getValues();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public String getStringValue(String key)
{
return this.getBalloon().getStringValue(key);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Set<Map.Entry<String, Object>> getEntries()
{
return this.getBalloon().getEntries();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public boolean hasKey(String key)
{
return this.getBalloon().hasKey(key);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public Object removeKey(String key)
{
return this.getBalloon().removeKey(key);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void addPropertyChangeListener(String propertyName, PropertyChangeListener listener)
{
this.getBalloon().addPropertyChangeListener(propertyName, listener);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void removePropertyChangeListener(String propertyName, PropertyChangeListener listener)
{
this.getBalloon().removePropertyChangeListener(propertyName, listener);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void addPropertyChangeListener(PropertyChangeListener listener)
{
this.getBalloon().addPropertyChangeListener(listener);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void removePropertyChangeListener(PropertyChangeListener listener)
{
this.getBalloon().removePropertyChangeListener(listener);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void firePropertyChange(String propertyName, Object oldValue, Object newValue)
{
this.getBalloon().firePropertyChange(propertyName, oldValue, newValue);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public void firePropertyChange(PropertyChangeEvent propertyChangeEvent)
{
this.getBalloon().firePropertyChange(propertyChangeEvent);
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public AVList copy()
{
return this.getBalloon().copy();
}
/** {@inheritDoc}. This method passes through to the contained balloon. */
public AVList clearList()
{
return this.getBalloon().clearList();
}
}
| |
package org.xito.dcf.property.editor;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.GraphicsEnvironment;
import java.awt.Rectangle;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import javax.swing.AbstractButton;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.ButtonModel;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JToggleButton;
public class FontEditor extends EditorSupport implements ActionListener
{
private static int BUTTON_WIDTH = 20;
private static int BUTTON_HEIGHT = 30;
private static Dimension buttonSize = new Dimension(BUTTON_WIDTH, BUTTON_HEIGHT);
private String fonts[];
private static int[] pointSizes =
{ 3, 5, 8, 10, 12, 14, 18, 24, 36, 48 };
private int selectedStyle = Font.PLAIN;
private final static String sampleText = "Abcde...";
// Controls
private JComboBox familyNameCombo;
private JComboBox fontSizeCombo;
private JToggleButton pButton, iButton, bButton;
private FontDisplay iDisplay, pDisplay, bDisplay;
private JLabel labelDisplay;
public FontEditor()
{
fonts = GraphicsEnvironment.getLocalGraphicsEnvironment().getAvailableFontFamilyNames();
pDisplay = new FontDisplay(Font.PLAIN);
pButton = new JToggleButton(pDisplay);
pButton.setToolTipText("plain style");
iDisplay = new FontDisplay(Font.ITALIC);
iButton = new JToggleButton(iDisplay);
iButton.setToolTipText("italic style");
bDisplay = new FontDisplay(Font.BOLD);
bButton = new JToggleButton(bDisplay);
bButton.setToolTipText("bold style");
initializeButton(pButton);
initializeButton(iButton);
initializeButton(bButton);
ButtonGroup group = new ButtonGroup();
group.add(pButton);
group.add(iButton);
group.add(bButton);
// ComboBoxes
familyNameCombo = new JComboBox();
fontSizeCombo = new JComboBox();
labelDisplay = new JLabel(fonts[0]);
labelDisplay.setAlignmentX(Component.LEFT_ALIGNMENT);
labelDisplay.setPreferredSize(new Dimension(250,30));
labelDisplay.setMinimumSize(new Dimension(250,30));
initializeComboBoxes();
// Assemble the panel.
JPanel p = new JPanel();
p.setLayout(new BoxLayout(p,BoxLayout.X_AXIS));
p.add(familyNameCombo);
p.add(Box.createRigidArea(new Dimension(5,0)));
p.add(fontSizeCombo);
p.add(Box.createRigidArea(new Dimension(5,0)));
p.add(pButton);
p.add(iButton);
p.add(bButton);
p.setAlignmentX(Component.LEFT_ALIGNMENT);
panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
panel.add(p);
panel.add(labelDisplay);
}
private void initializeButton(JToggleButton b)
{
b.setBorderPainted(false);
b.setFocusPainted(false);
b.setContentAreaFilled(false);
b.setPreferredSize(buttonSize);
b.setMaximumSize(buttonSize);
b.setMinimumSize(buttonSize);
b.addActionListener(this);
setAlignment(b);
}
private void initializeComboBoxes()
{
for (int i = 0; i < fonts.length; i++)
familyNameCombo.addItem(fonts[i]);
familyNameCombo.setPreferredSize(EditorSupport.MEDIUM_DIMENSION);
familyNameCombo.setMinimumSize(EditorSupport.MEDIUM_DIMENSION);
familyNameCombo.setMaximumSize(EditorSupport.MEDIUM_DIMENSION);
familyNameCombo.addActionListener(this);
setAlignment(familyNameCombo);
for (int i = 0; i < pointSizes.length; i++)
fontSizeCombo.addItem("" + pointSizes[i]);
fontSizeCombo.setPreferredSize(EditorSupport.SMALL_DIMENSION);
fontSizeCombo.setMaximumSize(EditorSupport.SMALL_DIMENSION);
fontSizeCombo.setMinimumSize(EditorSupport.SMALL_DIMENSION);
fontSizeCombo.addActionListener(this);
setAlignment(fontSizeCombo);
}
/**
* ActionListener handler for all component events.
*/
public void actionPerformed(ActionEvent evt)
{
Object obj = evt.getSource();
if (obj instanceof AbstractButton)
{
AbstractButton button = (AbstractButton)obj;
if (obj == pButton)
{
selectedStyle = Font.PLAIN;
} else if (obj == iButton)
{
selectedStyle = Font.ITALIC;
} else if (obj == bButton)
{
selectedStyle = Font.BOLD;
}
String family = (String)familyNameCombo.getSelectedItem();
int size = pointSizes[fontSizeCombo.getSelectedIndex()];
setValue(new Font(family, selectedStyle, size));
}
if (obj instanceof JComboBox)
{
String family = (String)familyNameCombo.getSelectedItem();
int size = pointSizes[fontSizeCombo.getSelectedIndex()];
setValue(new Font(family, selectedStyle, size));
}
}
/**
* Reconfigure the controls to reflect the current font.
*/
private void editorChangeValue(Font font)
{
for (int i = 0; i < fonts.length; i++)
{
if (fonts[i].equals(font.getName()))
{
familyNameCombo.setSelectedIndex(i);
break;
}
}
for (int i = 0; i < pointSizes.length; i++)
{
if (font.getSize() <= pointSizes[i])
{
fontSizeCombo.setSelectedIndex(i);
break;
}
}
selectedStyle = font.getStyle();
String style = "";
switch (selectedStyle)
{
case Font.PLAIN:
pButton.setSelected(true);
style = "Plain";
break;
case Font.ITALIC:
iButton.setSelected(true);
style = "Italic";
break;
case Font.BOLD:
bButton.setSelected(true);
style = "Bold";
break;
}
String family = font.getFamily();
iDisplay.setFamily(family);
pDisplay.setFamily(family);
bDisplay.setFamily(family);
labelDisplay.setFont(font);
labelDisplay.setText(family + ", " + style + ", " + font.getSize());
panel.revalidate();
panel.repaint();
}
//
// PropertyEditor interface definitions
//
public void setValue(Object value)
{
super.setValue(value);
editorChangeValue((Font)value);
}
public boolean isPaintable()
{
return true;
}
public void paintValue(Graphics g, Rectangle rect)
{
// Silent noop.
Font oldFont = g.getFont();
g.setFont((Font)getValue());
FontMetrics fm = g.getFontMetrics();
int vpad = (rect.height - fm.getAscent())/2;
g.drawString(sampleText, 0, rect.height-vpad);
g.setFont(oldFont);
}
public String getJavaInitializationString()
{
Font font = (Font)getValue();
return "new java.awt.Font(\"" + font.getFamily() + "\", " +
font.getStyle() + ", " + font.getSize() + ")";
}
/**
* Implementation of a Icon button.
*/
private class FontDisplay implements Icon
{
private Font font;
private int style = Font.PLAIN;
private int size = 24;
private String label = "A";
private int iconWidth = 20;
private int iconHeight = 30;
public FontDisplay(int style)
{
this.style = style;
this.font = new Font("Dialog", this.style, this.size);
}
public FontDisplay()
{
this.font = new Font("Dialog", this.style, this.size);
}
public void setFamily(String family)
{
this.font = new Font(family, this.style, this.size);
}
public void paintIcon(Component c, Graphics g, int x, int y)
{
JComponent component = (JComponent)c;
Font oldFont = g.getFont();
g.setFont(this.font);
if (component instanceof JToggleButton)
{
AbstractButton b= (AbstractButton)component;
ButtonModel model = b.getModel();
if (model.isPressed() || model.isSelected())
g.setColor(Color.black); // xxx: foreground
else
g.setColor(Color.gray); // xxx: foreground light
}
g.drawString(label, x, (y + iconHeight) - 7);
g.setFont(oldFont);
}
public int getIconWidth()
{
return iconWidth;
}
public int getIconHeight()
{
return iconHeight;
}
} // end class FontDisplay
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.cache.hibernate;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.cache.Cache;
import javax.persistence.Cacheable;
import javax.persistence.Id;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.hamcrest.core.Is;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.RootClass;
import org.junit.Test;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.hibernate.HibernateAccessStrategyFactory.REGION_CACHE_PROPERTY;
import static org.hibernate.cfg.AvailableSettings.USE_STRUCTURED_CACHE;
import static org.junit.Assert.assertThat;
/**
* Tests Hibernate L2 cache configuration.
*/
@SuppressWarnings("unchecked")
public class HibernateL2CacheStrategySelfTest extends GridCommonAbstractTest {
/** */
private static final String ENTITY1_NAME = Entity1.class.getName();
/** */
private static final String ENTITY2_NAME = Entity2.class.getName();
/** */
private static final String ENTITY3_NAME = Entity3.class.getName();
/** */
private static final String ENTITY4_NAME = Entity4.class.getName();
/** */
private static final String TIMESTAMP_CACHE = "org.hibernate.cache.spi.UpdateTimestampsCache";
/** */
private static final String QUERY_CACHE = "org.hibernate.cache.internal.StandardQueryCache";
/** */
private static final String CONNECTION_URL = "jdbc:h2:mem:example;DB_CLOSE_DELAY=-1";
/** */
private SessionFactory sesFactory1;
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGrid(0);
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
for (IgniteCacheProxy<?, ?> cache : ((IgniteKernal)grid(0)).caches())
cache.clear();
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(new TcpDiscoveryVmIpFinder(true));
cfg.setCacheConfiguration(cacheConfiguration(ENTITY3_NAME),
cacheConfiguration(ENTITY4_NAME),
cacheConfiguration("cache1"),
cacheConfiguration("cache2"),
cacheConfiguration(TIMESTAMP_CACHE),
cacheConfiguration(QUERY_CACHE));
return cfg;
}
/**
* @param cacheName Cache name.
* @return Cache configuration.
*/
private CacheConfiguration cacheConfiguration(String cacheName) {
CacheConfiguration cfg = new CacheConfiguration();
cfg.setName(cacheName);
cfg.setCacheMode(PARTITIONED);
cfg.setAtomicityMode(TRANSACTIONAL);
return cfg;
}
/**
* @throws Exception If failed.
*/
@Test
public void testEntityCacheReadWrite() throws Exception {
for (AccessType accessType : new AccessType[]{AccessType.READ_WRITE, AccessType.NONSTRICT_READ_WRITE})
testEntityCacheReadWrite(accessType);
}
/**
* @param accessType Cache access type.
* @throws Exception If failed.
*/
private void testEntityCacheReadWrite(AccessType accessType) throws Exception {
log.info("Test access type: " + accessType);
sesFactory1 = startHibernate(accessType, getTestIgniteInstanceName(0));
try {
// 1 Adding.
Session ses = sesFactory1.openSession();
try {
Transaction tr = ses.beginTransaction();
ses.save(new Entity1(1, "entity-1#name-1"));
ses.save(new Entity2(1, "entity-2#name-1"));
tr.commit();
}
finally {
ses.close();
}
loadEntities(sesFactory1);
assertEquals(1, grid(0).cache("cache1").size());
assertEquals(1, grid(0).cache("cache2").size());
assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 1), Is.is("entity-1#name-1"));
assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 1), Is.is("entity-2#name-1"));
// 2. Updating and adding.
ses = sesFactory1.openSession();
try {
Transaction tx = ses.beginTransaction();
Entity1 e1 = (Entity1)ses.load(Entity1.class, 1);
e1.setName("entity-1#name-1#UPDATED-1");
ses.update(e1);
ses.save(new Entity2(2, "entity-2#name-2#ADDED"));
tx.commit();
}
finally {
ses.close();
}
loadEntities(sesFactory1);
assertEquals(1, grid(0).cache("cache1").size());
assertEquals(2, grid(0).cache("cache2").size());
assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 1), Is.is("entity-1#name-1#UPDATED-1"));
assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 1), Is.is("entity-2#name-1"));
assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 2), Is.is("entity-2#name-2#ADDED"));
// 3. Updating, adding, updating.
ses = sesFactory1.openSession();
try {
Transaction tx = ses.beginTransaction();
Entity2 e2_1 = (Entity2)ses.load(Entity2.class, 1);
e2_1.setName("entity-2#name-1#UPDATED-1");
ses.update(e2_1);
ses.save(new Entity1(2, "entity-1#name-2#ADDED"));
Entity1 e1_1 = (Entity1)ses.load(Entity1.class, 1);
e1_1.setName("entity-1#name-1#UPDATED-2");
ses.update(e1_1);
tx.commit();
}
finally {
ses.close();
}
loadEntities(sesFactory1);
assertEquals(2, grid(0).cache("cache1").size());
assertEquals(2, grid(0).cache("cache2").size());
assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 1), Is.is("entity-2#name-1#UPDATED-1"));
assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 2), Is.is("entity-1#name-2#ADDED"));
assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 1), Is.is("entity-1#name-1#UPDATED-2"));
ses = sesFactory1.openSession();
sesFactory1.getStatistics().logSummary();
ses.close();
}
finally {
cleanup();
}
}
/**
* @param sesFactory Session factory.
*/
private void loadEntities(SessionFactory sesFactory) {
Session ses = sesFactory.openSession();
try {
List<Entity1> list1 = ses.createCriteria(ENTITY1_NAME).list();
for (Entity1 e1 : list1)
assertNotNull(e1.getName());
List<Entity2> list2 = ses.createCriteria(ENTITY2_NAME).list();
for (Entity2 e2 : list2)
assertNotNull(e2.getName());
}
finally {
ses.close();
}
}
/**
* @param sesFactory Session Factory.
* @param regionName Region Name.
* @param id Id.
* @return Entity Name.
*/
private String getEntityNameFromRegion(SessionFactory sesFactory, String regionName, int id) {
Session ses = sesFactory.openSession();
try {
for (Cache.Entry<Object, Object> entry : grid(0).cache(regionName)) {
if (((HibernateKeyWrapper)entry.getKey()).id().equals(id))
return (String) ((HashMap) entry.getValue()).get("name");
}
return null;
}
finally {
ses.close();
}
}
/**
* @param accessType Cache access typr.
* @param igniteInstanceName Name of the grid providing caches.
* @return Session factory.
*/
private SessionFactory startHibernate(AccessType accessType, String igniteInstanceName) {
StandardServiceRegistryBuilder builder = new StandardServiceRegistryBuilder();
builder.applySetting("hibernate.connection.url", CONNECTION_URL);
for (Map.Entry<String, String> e : HibernateL2CacheSelfTest.hibernateProperties(igniteInstanceName, accessType.name()).entrySet())
builder.applySetting(e.getKey(), e.getValue());
builder.applySetting(USE_STRUCTURED_CACHE, "true");
builder.applySetting(REGION_CACHE_PROPERTY + ENTITY1_NAME, "cache1");
builder.applySetting(REGION_CACHE_PROPERTY + ENTITY2_NAME, "cache2");
builder.applySetting(REGION_CACHE_PROPERTY + TIMESTAMP_CACHE, TIMESTAMP_CACHE);
builder.applySetting(REGION_CACHE_PROPERTY + QUERY_CACHE, QUERY_CACHE);
MetadataSources metadataSources = new MetadataSources(builder.build());
metadataSources.addAnnotatedClass(Entity1.class);
metadataSources.addAnnotatedClass(Entity2.class);
metadataSources.addAnnotatedClass(Entity3.class);
metadataSources.addAnnotatedClass(Entity4.class);
Metadata metadata = metadataSources.buildMetadata();
for (PersistentClass entityBinding : metadata.getEntityBindings()) {
if (!entityBinding.isInherited())
((RootClass)entityBinding).setCacheConcurrencyStrategy(accessType.getExternalName());
}
return metadata.buildSessionFactory();
}
/**
* Test Hibernate entity1.
*/
@javax.persistence.Entity
@SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"})
@Cacheable
public static class Entity1 {
/** */
private int id;
/** */
private String name;
/**
*
*/
public Entity1() {
// No-op.
}
/**
* @param id ID.
* @param name Name.
*/
Entity1(int id, String name) {
this.id = id;
this.name = name;
}
/**
* @return ID.
*/
@Id
public int getId() {
return id;
}
/**
* @param id ID.
*/
public void setId(int id) {
this.id = id;
}
/**
* @return Name.
*/
public String getName() {
return name;
}
/**
* @param name Name.
*/
public void setName(String name) {
this.name = name;
}
}
/**
* Test Hibernate entity2.
*/
@javax.persistence.Entity
@SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"})
@Cacheable
public static class Entity2 {
/** */
private int id;
/** */
private String name;
/**
*
*/
public Entity2() {
// No-op.
}
/**
* @param id ID.
* @param name Name.
*/
Entity2(int id, String name) {
this.id = id;
this.name = name;
}
/**
* @return ID.
*/
@Id
public int getId() {
return id;
}
/**
* @param id ID.
*/
public void setId(int id) {
this.id = id;
}
/**
* @return Name.
*/
public String getName() {
return name;
}
/**
* @param name Name.
*/
public void setName(String name) {
this.name = name;
}
}
/**
* Test Hibernate entity3.
*/
@javax.persistence.Entity
@SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"})
@Cacheable
public static class Entity3 {
/** */
private int id;
/** */
private String name;
/**
*
*/
public Entity3() {
// No-op.
}
/**
* @param id ID.
* @param name Name.
*/
public Entity3(int id, String name) {
this.id = id;
this.name = name;
}
/**
* @return ID.
*/
@Id
public int getId() {
return id;
}
/**
* @param id ID.
*/
public void setId(int id) {
this.id = id;
}
/**
* @return Name.
*/
public String getName() {
return name;
}
/**
* @param name Name.
*/
public void setName(String name) {
this.name = name;
}
}
/**
* Test Hibernate entity4.
*/
@javax.persistence.Entity
@SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"})
@Cacheable
public static class Entity4 {
/** */
private int id;
/** */
private String name;
/**
*
*/
public Entity4() {
// No-op.
}
/**
* @param id ID.
* @param name Name.
*/
public Entity4(int id, String name) {
this.id = id;
this.name = name;
}
/**
* @return ID.
*/
@Id
public int getId() {
return id;
}
/**
* @param id ID.
*/
public void setId(int id) {
this.id = id;
}
/**
* @return Name.
*/
public String getName() {
return name;
}
/**
* @param name Name.
*/
public void setName(String name) {
this.name = name;
}
}
/**
* Closes session factories and clears data from caches.
*
* @throws Exception If failed.
*/
private void cleanup() throws Exception {
if (sesFactory1 != null)
sesFactory1.close();
sesFactory1 = null;
for (IgniteCacheProxy<?, ?> cache : ((IgniteKernal)grid(0)).caches())
cache.clear();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.placement;
import org.apache.hadoop.yarn.api.records.*;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.AllocationTags;
import org.apache.hadoop.yarn.api.resource.PlacementConstraints;
import org.apache.hadoop.yarn.exceptions.SchedulerInvalidResoureRequestException;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AppSchedulingInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.SchedulingMode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.TestUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.AllocationTagsManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.InvalidAllocationTagsQueryException;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.MemoryPlacementConstraintManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.PlacementConstraintManager;
import org.apache.hadoop.yarn.server.scheduler.SchedulerRequestKey;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.util.HashSet;
import java.util.Set;
import java.util.function.LongBinaryOperator;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Test behaviors of single constraint app placement allocator.
*/
public class TestSingleConstraintAppPlacementAllocator {
private AppSchedulingInfo appSchedulingInfo;
private AllocationTagsManager spyAllocationTagsManager;
private RMContext rmContext;
private SchedulerRequestKey schedulerRequestKey;
private SingleConstraintAppPlacementAllocator allocator;
@Before
public void setup() throws Exception {
// stub app scheduling info.
appSchedulingInfo = mock(AppSchedulingInfo.class);
when(appSchedulingInfo.getApplicationId()).thenReturn(
TestUtils.getMockApplicationId(1));
when(appSchedulingInfo.getApplicationAttemptId()).thenReturn(
TestUtils.getMockApplicationAttemptId(1, 1));
// stub RMContext
rmContext = TestUtils.getMockRMContext();
// Create allocation tags manager
AllocationTagsManager allocationTagsManager = new AllocationTagsManager(
rmContext);
PlacementConstraintManager placementConstraintManager =
new MemoryPlacementConstraintManager();
spyAllocationTagsManager = spy(allocationTagsManager);
schedulerRequestKey = new SchedulerRequestKey(Priority.newInstance(1), 2L,
TestUtils.getMockContainerId(1, 1));
rmContext.setAllocationTagsManager(spyAllocationTagsManager);
rmContext.setPlacementConstraintManager(placementConstraintManager);
// Create allocator
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
}
private void assertValidSchedulingRequest(
SchedulingRequest schedulingRequest) {
// Create allocator to avoid fields polluted by previous runs
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
}
private void assertInvalidSchedulingRequest(
SchedulingRequest schedulingRequest, boolean recreateAllocator) {
try {
// Create allocator
if (recreateAllocator) {
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
}
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
} catch (SchedulerInvalidResoureRequestException e) {
// Expected
return;
}
Assert.fail(
"Expect failure for schedulingRequest=" + schedulingRequest.toString());
}
@Test
public void testSchedulingRequestValidation() {
// Valid
assertValidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build());
Assert.assertEquals("", allocator.getTargetNodePartition());
// Valid (with partition)
assertValidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition("x"))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build());
Assert.assertEquals("x", allocator.getTargetNodePartition());
// Valid (without specifying node partition)
assertValidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer")).build())
.resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build());
Assert.assertEquals("", allocator.getTargetNodePartition());
// Valid (with application Id target)
assertValidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer")).build())
.resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build());
// Allocation tags should not include application Id
Assert.assertEquals("", allocator.getTargetNodePartition());
// Invalid (without sizing)
assertInvalidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer")).build())
.build(), true);
// Invalid (without target tags)
assertInvalidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE).build())
.build(), true);
// Invalid (not GUARANTEED)
assertInvalidSchedulingRequest(SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.OPPORTUNISTIC))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build(), true);
}
@Test
public void testSchedulingRequestUpdate() {
SchedulingRequest schedulingRequest =
SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
// Update allocator with exactly same scheduling request, should succeeded.
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
// Update allocator with scheduling request different at #allocations,
// should succeeded.
schedulingRequest.getResourceSizing().setNumAllocations(10);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
// Update allocator with scheduling request different at resource,
// should failed.
schedulingRequest.getResourceSizing().setResources(
Resource.newInstance(2048, 1));
assertInvalidSchedulingRequest(schedulingRequest, false);
// Update allocator with a different placement target (allocator tag),
// should failed
schedulingRequest = SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetCardinality(PlacementConstraints.NODE, 0, 1,
PlacementConstraints.PlacementTargets
.allocationTag("mapper"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
assertInvalidSchedulingRequest(schedulingRequest, false);
// Update allocator with recover == true
int existingNumAllocations =
allocator.getSchedulingRequest().getResourceSizing()
.getNumAllocations();
schedulingRequest = SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, true);
Assert.assertEquals(existingNumAllocations + 1,
allocator.getSchedulingRequest().getResourceSizing()
.getNumAllocations());
}
@Test
public void testFunctionality() throws InvalidAllocationTagsQueryException {
SchedulingRequest schedulingRequest =
SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNode("host1", "/rack1", 123, 1024));
verify(spyAllocationTagsManager, Mockito.times(1)).getNodeCardinalityByOp(
eq(NodeId.fromString("host1:123")), any(AllocationTags.class),
any(LongBinaryOperator.class));
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
// Valid (with partition)
schedulingRequest = SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNotIn(PlacementConstraints.NODE,
PlacementConstraints.PlacementTargets
.allocationTag("mapper", "reducer"),
PlacementConstraints.PlacementTargets.nodePartition("x"))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNode("host1", "/rack1", 123, 1024));
verify(spyAllocationTagsManager, Mockito.atLeast(1)).getNodeCardinalityByOp(
eq(NodeId.fromString("host1:123")), any(AllocationTags.class),
any(LongBinaryOperator.class));
SchedulerNode node1 = mock(SchedulerNode.class);
when(node1.getPartition()).thenReturn("x");
when(node1.getNodeID()).thenReturn(NodeId.fromString("host1:123"));
Assert.assertTrue(allocator
.precheckNode(node1, SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY));
SchedulerNode node2 = mock(SchedulerNode.class);
when(node1.getPartition()).thenReturn("");
when(node1.getNodeID()).thenReturn(NodeId.fromString("host2:123"));
Assert.assertFalse(allocator
.precheckNode(node2, SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY));
}
@Test
public void testNodeAttributesFunctionality() {
// 1. Simple java=1.8 validation
SchedulingRequest schedulingRequest =
SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNodeAttribute(PlacementConstraints.NODE,
NodeAttributeOpCode.EQ,
PlacementConstraints.PlacementTargets
.nodeAttribute("java", "1.8"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest, false);
Set<NodeAttribute> attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("java", NodeAttributeType.STRING, "1.8"));
boolean result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertTrue("Allocation should be success for java=1.8", result);
// 2. verify python!=3 validation
SchedulingRequest schedulingRequest2 =
SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(PlacementConstraints
.targetNodeAttribute(PlacementConstraints.NODE,
NodeAttributeOpCode.NE,
PlacementConstraints.PlacementTargets
.nodeAttribute("python", "3"),
PlacementConstraints.PlacementTargets.nodePartition(""))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
// Create allocator
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest2, false);
attributes = new HashSet<>();
result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertTrue("Allocation should be success as python doesn't exist",
result);
// 3. verify python!=3 validation when node has python=2
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest2, false);
attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("python", NodeAttributeType.STRING, "2"));
result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertTrue(
"Allocation should be success as python=3 doesn't exist in node",
result);
// 4. verify python!=3 validation when node has python=3
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest2, false);
attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("python", NodeAttributeType.STRING, "3"));
result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertFalse("Allocation should fail as python=3 exist in node",
result);
}
@Test
public void testConjunctionNodeAttributesFunctionality() {
// 1. verify and(python!=3:java=1.8) validation when node has python=3
SchedulingRequest schedulingRequest1 =
SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(
PlacementConstraints.and(
PlacementConstraints
.targetNodeAttribute(PlacementConstraints.NODE,
NodeAttributeOpCode.NE,
PlacementConstraints.PlacementTargets
.nodeAttribute("python", "3")),
PlacementConstraints
.targetNodeAttribute(PlacementConstraints.NODE,
NodeAttributeOpCode.EQ,
PlacementConstraints.PlacementTargets
.nodeAttribute("java", "1.8")))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest1, false);
Set<NodeAttribute> attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("python", NodeAttributeType.STRING, "3"));
attributes.add(
NodeAttribute.newInstance("java", NodeAttributeType.STRING, "1.8"));
boolean result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertFalse("Allocation should fail as python=3 exists in node",
result);
// 2. verify and(python!=3:java=1.8) validation when node has python=2
// and java=1.8
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest1, false);
attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("python", NodeAttributeType.STRING, "2"));
attributes.add(
NodeAttribute.newInstance("java", NodeAttributeType.STRING, "1.8"));
result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertTrue("Allocation should be success as python=2 exists in node",
result);
// 3. verify or(python!=3:java=1.8) validation when node has python=3
SchedulingRequest schedulingRequest2 =
SchedulingRequest.newBuilder().executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED))
.allocationRequestId(10L).priority(Priority.newInstance(1))
.placementConstraintExpression(
PlacementConstraints.or(
PlacementConstraints
.targetNodeAttribute(PlacementConstraints.NODE,
NodeAttributeOpCode.NE,
PlacementConstraints.PlacementTargets
.nodeAttribute("python", "3")),
PlacementConstraints
.targetNodeAttribute(PlacementConstraints.NODE,
NodeAttributeOpCode.EQ,
PlacementConstraints.PlacementTargets
.nodeAttribute("java", "1.8")))
.build()).resourceSizing(
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)))
.build();
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest2, false);
attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("python", NodeAttributeType.STRING, "3"));
attributes.add(
NodeAttribute.newInstance("java", NodeAttributeType.STRING, "1.8"));
result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert.assertTrue("Allocation should be success as java=1.8 exists in node",
result);
// 4. verify or(python!=3:java=1.8) validation when node has python=3
// and java=1.7.
allocator = new SingleConstraintAppPlacementAllocator();
allocator.initialize(appSchedulingInfo, schedulerRequestKey, rmContext);
allocator.updatePendingAsk(schedulerRequestKey, schedulingRequest2, false);
attributes = new HashSet<>();
attributes.add(
NodeAttribute.newInstance("python", NodeAttributeType.STRING, "3"));
attributes.add(
NodeAttribute.newInstance("java", NodeAttributeType.STRING, "1.7"));
result = allocator.canAllocate(NodeType.NODE_LOCAL,
TestUtils.getMockNodeWithAttributes("host1", "/rack1", 123, 1024,
attributes));
Assert
.assertFalse("Allocation should fail as java=1.8 doesnt exist in node",
result);
}
}
| |
package uk.co.itmoore.intellisubsteps.ui;
/**
* Created by ian on 06/09/15.
*/
import com.intellij.execution.ExecutionBundle;
import com.intellij.execution.process.ProcessAdapter;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.testframework.PoolOfTestIcons;
import com.intellij.execution.testframework.Printer;
import com.intellij.execution.testframework.TestTreeView;
import com.intellij.execution.testframework.ui.TestResultsPanel;
import com.intellij.execution.testframework.ui.TestStatusLine;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.ui.SimpleColoredComponent;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.Alarm;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeCellRenderer;
import java.awt.*;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
public class ConsolePanel extends TestResultsPanel {
@NonNls private static final String PROPORTION_PROPERTY = "test_tree_console_proprtion";
private static final float DEFAULT_PROPORTION = 0.2f;
private SubstepsStatusLine myStatusLine;
// private StatisticsPanel myStatisticsPanel;
private SubstepsTestTreeView myTreeView;
private TestsOutputConsolePrinter myPrinter;
private StartingProgress myStartingProgress;
public ConsolePanel(final JComponent console,
final TestsOutputConsolePrinter printer,
final SubstepsConsoleProperties properties,
final ExecutionEnvironment environment,
AnAction[] consoleActions) {
super(console, consoleActions, properties, PROPORTION_PROPERTY, DEFAULT_PROPORTION);
myPrinter = printer;
}
@Override
public void initUI() {
super.initUI();
myStartingProgress = new StartingProgress(myTreeView);
}
// @Override
// protected JComponent createStatisticsPanel() {
// myStatisticsPanel = new StatisticsPanel();
// return myStatisticsPanel;
// }
// @Override
// protected ToolbarPanel createToolbarPanel() {
// return new JUnitToolbarPanel(myProperties, myEnvironment, this);
// }
@Override
protected TestStatusLine createStatusLine() {
myStatusLine = new SubstepsStatusLine();
return myStatusLine;
}
@Override
protected JComponent createTestTreeView() {
myTreeView = new SubstepsTestTreeView();
return myTreeView;
}
public void onProcessStarted(final ProcessHandler process) {
myStatusLine.onProcessStarted(process);
if (myStartingProgress == null) return;
myStartingProgress.start(process);
}
public void setModel(final SubstepsRunningModel model) {
stopStartingProgress();
final TestTreeView treeView = model.getTreeView();
treeView.setLargeModel(true);
setLeftComponent(treeView);
myToolbarPanel.setModel(model);
myStatusLine.setModel(model);
//
// adding this listener results in the console being cleared after every test
// model.addListener(new SubstepsAdapter() {
// @Override
// public void onTestSelected(final SubstepsTestProxy test) {
// if (myPrinter != null) myPrinter.updateOnTestSelected(test);
// }
// });
// myStatisticsPanel.attachTo(model);
}
private void stopStartingProgress() {
if (myStartingProgress != null) myStartingProgress.doStop();
myStartingProgress = null;
}
public SubstepsTestTreeView getTreeView() {
return myTreeView;
}
public Printer getPrinter() {
return myPrinter;
}
@Override
public void dispose() {
stopStartingProgress();
myPrinter = null;
}
private static class StartingProgress implements Runnable {
private final Alarm myAlarm = new Alarm();
private final Tree myTree;
private final DefaultTreeModel myModel;
private final DefaultMutableTreeNode myRootNode = new DefaultMutableTreeNode();
private boolean myStarted = false;
private boolean myStopped = false;
private final SimpleColoredComponent myStartingLabel;
private ProcessHandler myProcess;
private long myStartedAt = System.currentTimeMillis();
private final ProcessAdapter myProcessListener = new ProcessAdapter() {
@Override
public void processTerminated(ProcessEvent event) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
doStop();
}
});
}
};
public StartingProgress(final Tree tree) {
myTree = tree;
myModel = new DefaultTreeModel(myRootNode);
myTree.setModel(myModel);
myStartingLabel = new SimpleColoredComponent();
myTree.setPaintBusy(true);
//myStartingLabel.setBackground(UIManager.getColor("Tree.background"));
myTree.setCellRenderer(new TreeCellRenderer() {
@NotNull
@Override
public Component getTreeCellRendererComponent(@NotNull final JTree tree, final Object value,
final boolean selected, final boolean expanded,
final boolean leaf, final int row, final boolean hasFocus) {
myStartingLabel.clear();
myStartingLabel.setIcon(PoolOfTestIcons.LOADING_ICON);
myStartingLabel.append(getProgressText(), SimpleTextAttributes.REGULAR_ATTRIBUTES);
if (!myStarted) postRepaint();
return myStartingLabel;
}
});
myTree.addPropertyChangeListener(JTree.TREE_MODEL_PROPERTY, new PropertyChangeListener() {
@Override
public void propertyChange(@NotNull final PropertyChangeEvent evt) {
myTree.removePropertyChangeListener(JTree.TREE_MODEL_PROPERTY, this);
doStop();
}
});
}
private void doStop() {
myStopped = true;
myTree.setPaintBusy(false);
myModel.nodeChanged(myRootNode);
myAlarm.cancelAllRequests();
if (myProcess != null) myProcess.removeProcessListener(myProcessListener);
myProcess = null;
}
@Override
public void run() {
myModel.nodeChanged(myRootNode);
postRepaint();
}
private void postRepaint() {
if (myStopped) return;
myStarted = true;
myAlarm.cancelAllRequests();
myAlarm.addRequest(this, 300, ModalityState.NON_MODAL);
}
public void start(final ProcessHandler process) {
if (process.isProcessTerminated()) return;
myProcess = process;
myStartedAt = System.currentTimeMillis();
process.addProcessListener(myProcessListener);
}
private String getProgressText() {
if (myStopped) return ExecutionBundle.message("test.not.started.progress.text");
final long millis = System.currentTimeMillis() - myStartedAt;
final String phaseName = myProcess == null ? ExecutionBundle.message("starting.jvm.progress.text") : ExecutionBundle.message("instantiating.tests.progress.text");
//return phaseName + Formatters.printMinSec(millis);
return phaseName + millis + " msec";
}
}
}
| |
/**
* $RCSfile$
* $Revision: $
* $Date: $
*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.sip.tester.stack;
import org.jivesoftware.openfire.sip.tester.comm.CommunicationsException;
import org.jivesoftware.openfire.sip.tester.comm.CommunicationsListener;
import org.jivesoftware.openfire.sip.tester.Log;
import org.jivesoftware.openfire.sip.tester.security.UserCredentials;
import org.jivesoftware.openfire.sip.tester.security.SipSecurityManager;
import javax.sip.*;
import javax.sip.address.Address;
import javax.sip.address.AddressFactory;
import javax.sip.address.SipURI;
import javax.sip.header.*;
import javax.sip.message.MessageFactory;
import javax.sip.message.Request;
import javax.sip.message.Response;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.TooManyListenersException;
/**
* Title: SIP Register Tester
*
* @author Thiago Rocha Camargo (thiago@jivesoftware.com)
*/
public class SipManager implements SipListener {
protected static final int RETRY_OBJECT_DELETES = 10;
protected static final long RETRY_OBJECT_DELETES_AFTER = 500;
protected static final String DEFAULT_TRANSPORT = "udp";
protected InetAddress localAddress = null;
public SipFactory sipFactory;
public AddressFactory addressFactory;
public HeaderFactory headerFactory;
public MessageFactory messageFactory;
SipStack sipStack;
public boolean isBusy = true;
ListeningPoint listeningPoint;
public SipProvider sipProvider;
private InetSocketAddress publicIpAddress = null;
protected String sipStackPath = "gov.nist";
protected String currentlyUsedURI = null;
protected String displayName = null;
protected String transport = null;
protected String registrarAddress = null;
protected int localPort = -1;
protected int registrarPort = -1;
protected int registrationsExpiration = -1;
protected String registrarTransport = null;
//private int registerRetries = 0;
protected String stackAddress = null;
protected String stackName = "JiveSIP";
protected FromHeader fromHeader = null;
protected ContactHeader contactHeader = null;
protected ArrayList<ViaHeader> viaHeaders = null;
protected static final int MAX_FORWARDS = 70;
protected MaxForwardsHeader maxForwardsHeader = null;
protected long registrationTransaction = -1;
protected ArrayList<CommunicationsListener> listeners = new ArrayList<CommunicationsListener>();
protected boolean isStarted = false;
private RegisterProcessing registerProcessing = null;
public SipSecurityManager sipSecurityManager = null;
/**
* Constructor. It only creates a SipManager instance without initializing
* the stack itself.
*
* @param localAddress localAddress
*/
public SipManager(InetAddress localAddress) {
this.localAddress = localAddress;
registerProcessing = new RegisterProcessing(this);
sipSecurityManager = new SipSecurityManager();
//registerRetries = 0;
}
/**
* Creates and initializes JAIN SIP objects (factories, stack, listening
* point and provider). Once this method is called the application is ready
* to handle (incoming and outgoing) sip messages.
*
* @throws CommunicationsException if an axception should occur during the initialization
* process
*/
public void start() throws CommunicationsException {
initProperties();
SIPConfig.setSystemProperties();
this.sipFactory = SipFactory.getInstance();
sipFactory.setPathName(sipStackPath);
try {
addressFactory = sipFactory.createAddressFactory();
headerFactory = sipFactory.createHeaderFactory();
messageFactory = sipFactory.createMessageFactory();
}
catch (PeerUnavailableException ex) {
Log.error("start", ex);
throw new CommunicationsException(
"Could not create factories!", ex);
}
try {
sipStack = sipFactory.createSipStack(System.getProperties());
((SipCommRouter) sipStack.getRouter())
.setOutboundProxy(SIPConfig.getOutboundProxy());
}
catch (PeerUnavailableException ex) {
Log.error("start", ex);
throw new CommunicationsException(
"Cannot connect!\n"
+ "Cannot reach proxy.\nCheck your connection."
+ "(Syntax:<proxy_address:port/transport>)", ex);
}
try {
boolean successfullyBound = false;
while (!successfullyBound) {
try {
publicIpAddress = new InetSocketAddress(localAddress, localPort);
listeningPoint = sipStack.createListeningPoint(
localPort, transport);
}
catch (InvalidArgumentException ex) {
// choose another port between 1024 and 65000
localPort = (int) ((65000 - 1024) * Math.random()) + 1024;
try {
Thread.sleep(1000);
}
catch (Exception e) {
// Do Nothing
}
continue;
}
successfullyBound = true;
}
}
catch (TransportNotSupportedException ex) {
throw new CommunicationsException(
"Transport "
+ transport
+ " is not suppported by the stack!\n Try specifying another"
+ " transport in Mais property files.\n", ex);
}
try {
sipProvider = sipStack.createSipProvider(listeningPoint);
}
catch (ObjectInUseException ex) {
Log.error("start", ex);
throw new CommunicationsException(
"Could not create factories!\n", ex);
}
try {
sipProvider.addSipListener(this);
}
catch (TooManyListenersException exc) {
throw new CommunicationsException(
"Could not register SipManager as a sip listener!", exc);
}
sipSecurityManager.setHeaderFactory(headerFactory);
sipSecurityManager.setTransactionCreator(sipProvider);
sipSecurityManager.setSipManCallback(this);
// Make sure prebuilt headers are nulled so that they get reinited
// if this is a restart
contactHeader = null;
fromHeader = null;
viaHeaders = null;
maxForwardsHeader = null;
isStarted = true;
}
/**
* Unregisters listening points, deletes sip providers, and generally
* prepares the stack for a re-start(). This method is meant to be used when
* properties are changed and should be reread by the stack.
*
* @throws CommunicationsException CommunicationsException
*/
synchronized public void stop() throws CommunicationsException {
if (sipStack == null)
return;
// Delete SipProvider
int tries;
for (tries = 0; tries < SipManager.RETRY_OBJECT_DELETES; tries++) {
try {
sipStack.deleteSipProvider(sipProvider);
}
catch (ObjectInUseException ex) {
SipManager.sleep(SipManager.RETRY_OBJECT_DELETES_AFTER);
continue;
}
break;
}
if (sipStack == null)
return;
if (tries >= SipManager.RETRY_OBJECT_DELETES)
throw new CommunicationsException(
"Failed to delete the sipProvider!");
if (sipStack == null)
return;
// Delete RI ListeningPoint
for (tries = 0; tries < SipManager.RETRY_OBJECT_DELETES; tries++) {
try {
sipStack.deleteListeningPoint(listeningPoint);
}
catch (ObjectInUseException ex) {
// Log.debug("Retrying delete of riListeningPoint!");
SipManager.sleep(SipManager.RETRY_OBJECT_DELETES_AFTER);
continue;
}
break;
}
if (sipStack != null) {
for (Iterator<SipProvider> it = sipStack.getSipProviders(); it.hasNext();) {
SipProvider element = it.next();
try {
sipStack.deleteSipProvider(element);
}
catch (Exception e) {
// Do nothing
}
}
}
if (tries >= SipManager.RETRY_OBJECT_DELETES)
throw new CommunicationsException(
"Failed to delete a listeningPoint!");
listeningPoint = null;
addressFactory = null;
messageFactory = null;
headerFactory = null;
sipStack = null;
registrarAddress = null;
viaHeaders = null;
contactHeader = null;
fromHeader = null;
}
/**
* Waits during _no_less_ than sleepFor milliseconds. Had to implement it on
* top of Thread.sleep() to guarantee minimum sleep time.
*
* @param sleepFor the number of miliseconds to wait
*/
protected static void sleep(long sleepFor) {
long startTime = System.currentTimeMillis();
long haveBeenSleeping = 0;
while (haveBeenSleeping < sleepFor) {
try {
Thread.sleep(sleepFor - haveBeenSleeping);
}
catch (InterruptedException ex) {
// we-ll have to wait again!
}
haveBeenSleeping = (System.currentTimeMillis() - startTime);
}
}
/**
* @param uri the currentlyUsedURI to set.
*/
public void setCurrentlyUsedURI(String uri) {
this.currentlyUsedURI = uri;
}
public void register(String publicAddress) {
try {
if (publicAddress == null || publicAddress.trim().length() == 0) {
Log.debug("PUBLIC NOT FOUND!");
return; // maybe throw an exception?
}
if (!publicAddress.trim().toLowerCase().startsWith("sip:")) {
publicAddress = "sip:" + publicAddress;
}
this.currentlyUsedURI = publicAddress;
registerProcessing.register(registrarAddress, registrarPort,
registrarTransport, registrationsExpiration);
}
catch (Exception e) {
Log.error("register", e);
}
}
public void startRegisterProcess(String userName, String authUserName,
String password) throws CommunicationsException {
try {
checkIfStarted();
// Obtain initial credentials
String realm = SIPConfig.getAuthenticationRealm();
realm = realm == null ? "" : realm;
// put the returned user name in the properties file
// so that it appears as a default one next time user is prompted
// for pass
SIPConfig.setUserName(userName);
SIPConfig.setAuthUserName(authUserName);
UserCredentials initialCredentials = new UserCredentials();
initialCredentials.setUserName(userName);
initialCredentials.setAuthUserName(authUserName);
initialCredentials.setPassword(password.toCharArray());
register(initialCredentials.getUserName() + "@" + realm);
// at this point a simple register request has been sent and the
// global
// from header in SipManager has been set to a valid value by the
// RegisterProcesing
// class. Use it to extract the valid user name that needs to be
// cached by
// the security manager together with the user provided password.
initialCredentials.setUserName(((SipURI) getFromHeader()
.getAddress().getURI()).getUser());
// JOptionPane.showMessageDialog(null,( (SipURI)
// getFromHeader().getAddress().getURI()).getUser());
cacheCredentials(realm, initialCredentials);
}
catch (Exception ee) {
Log.error("startRegisterProcess", ee);
}
}
/**
* Causes the PresenceAgent object to notify all subscribers of our brand
* new offline status and the RegisterProcessing object to send a
* registration request with a 0 "expires" interval to the registrar defined
* in net.java.mais.sip.REGISTRAR_ADDRESS.
*
* @throws CommunicationsException if an exception is thrown by the underlying stack. The
* exception that caused this CommunicationsException may be
* extracted with CommunicationsException.getCause()
*/
public void unregister() throws CommunicationsException {
try {
checkIfStarted();
registerProcessing.unregister();
fireUnregistered(registrarAddress == null ? "" : registrarAddress);
}
catch (Exception e) {
Log.error("unregister", e);
}
}
private void registrationFailed(RegistrationEvent.Type type) {
try {
fireRegistrationFailed(registrarAddress == null ? "" : registrarAddress, type);
}
catch (Exception e) {
Log.error("unregister", e);
}
}
/**
* Queries the RegisterProcessing object whether the application is
* registered with a registrar.
*
* @return true if the application is registered with a registrar.
*/
public boolean isRegistered() {
return (registerProcessing != null && registerProcessing.isRegistered());
}
/**
* Determines whether the SipManager was started.
*
* @return true if the SipManager was started.
*/
public boolean isStarted() {
return isStarted;
}
/**
* Sends a NOT_IMPLEMENTED response through the specified transaction.
*
* @param serverTransaction the transaction to send the response through.
* @param request the request that is being answered.
* @throws InvalidArgumentException
*/
void sendNotImplemented(ServerTransaction serverTransaction, Request request) throws InvalidArgumentException {
Response notImplemented;
try {
notImplemented = messageFactory.createResponse(
Response.NOT_IMPLEMENTED, request);
attachToTag(notImplemented, serverTransaction.getDialog());
}
catch (ParseException ex) {
fireCommunicationsError(new CommunicationsException(
"Failed to create a NOT_IMPLEMENTED response to a "
+ request.getMethod() + " request!", ex));
return;
}
try {
serverTransaction.sendResponse(notImplemented);
}
catch (SipException ex) {
fireCommunicationsError(new CommunicationsException(
"Failed to create a NOT_IMPLEMENTED response to a "
+ request.getMethod() + " request!", ex));
}
}
public void fireCommunicationsError(Throwable throwable) {
}
public FromHeader getFromHeader() throws CommunicationsException {
return this.getFromHeader(false);
}
public FromHeader getFromHeader(boolean isNew)
throws CommunicationsException {
if (fromHeader != null && !isNew) {
return fromHeader;
}
try {
SipURI fromURI = (SipURI) addressFactory
.createURI(currentlyUsedURI);
fromURI.setTransportParam(listeningPoint.getTransport());
fromURI.setPort(listeningPoint.getPort());
Address fromAddress = addressFactory.createAddress(fromURI);
if (displayName != null && displayName.trim().length() > 0) {
fromAddress.setDisplayName(displayName);
} else {
fromAddress
.setDisplayName(UserCredentials.getUserDisplay());// UserCredentials.getUser());
// JOptionPane.showMessageDialog(null,currentlyUsedURI);
}
fromHeader = headerFactory.createFromHeader(fromAddress,
Integer.toString(hashCode()));
}
catch (ParseException ex) {
throw new CommunicationsException(
"A ParseException occurred while creating From Header!",
ex);
}
return fromHeader;
}
/**
* Same as calling getContactHeader(true)
*
* @return the result of getContactHeader(true)
* @throws CommunicationsException if an exception is thrown while calling
* getContactHeader(false)
*/
public ContactHeader getContactHeader() throws CommunicationsException {
return getContactHeader(true);
}
/**
* Same as calling getContactHeader(true).
*
* @return the result of calling getContactHeader(true).
* @throws CommunicationsException if an exception occurs while executing
* getContactHeader(true).
*/
ContactHeader getRegistrationContactHeader() throws CommunicationsException {
return getContactHeader(true);
}
/**
* Initialises SipManager's contactHeader field in accordance with
* javax.sip.IP_ADDRESS net.java.mais.sip.DISPLAY_NAME
* net.java.mais.sip.TRANSPORT net.java.mais.sip.PREFERRED_LOCAL_PORT and
* returns a reference to it.
*
* @param useLocalHostAddress specifies whether the SipURI in the contact header should
* contain the value of javax.sip.IP_ADDRESS (true) or that of
* net.java.mais.sip.PUBLIC_ADDRESS (false).
* @return a reference to SipManager's contactHeader field.
* @throws CommunicationsException if a ParseException occurs while initially composing the
* FromHeader.
*/
public ContactHeader getContactHeader(boolean useLocalHostAddress)
throws CommunicationsException {
if (contactHeader != null) {
return contactHeader;
}
try {
SipURI contactURI;
if (useLocalHostAddress) {
contactURI = addressFactory.createSipURI(null,
UserCredentials.getUserDisplay()
+ "@"
+ publicIpAddress.getAddress()
.getHostAddress());
} else {
contactURI = (SipURI) addressFactory
.createURI(currentlyUsedURI);
}
contactURI.setPort(publicIpAddress.getPort());
Address contactAddress = addressFactory
.createAddress(contactURI);
if (displayName != null && displayName.trim().length() > 0) {
contactAddress.setDisplayName(displayName);
}
contactHeader = headerFactory
.createContactHeader(contactAddress);
}
catch (ParseException ex) {
throw new CommunicationsException(
"A ParseException occurred while creating From Header!",
ex);
}
return contactHeader;
}
/**
* Initializes (if null) and returns an ArrayList with a single ViaHeader
* containing localhost's address. This ArrayList may be used when sending
* requests.
*
* @return ViaHeader-s list to be used when sending requests.
* @throws CommunicationsException if a ParseException is to occur while initializing the array
* list.
*/
public ArrayList<ViaHeader> getLocalViaHeaders() throws CommunicationsException {
if (viaHeaders != null) {
return viaHeaders;
}
ListeningPoint lp = sipProvider.getListeningPoint();
viaHeaders = new ArrayList<ViaHeader>();
try {
ViaHeader viaHeader = headerFactory.createViaHeader(SIPConfig
.getIPAddress(), lp.getPort(), lp.getTransport(), null);
viaHeader.setParameter("rport", null);
viaHeaders.add(viaHeader);
return viaHeaders;
}
catch (ParseException ex) {
throw new CommunicationsException(
"A ParseException occurred while creating Via Headers!");
}
catch (InvalidArgumentException ex) {
throw new CommunicationsException(
"Unable to create a via header for port "
+ lp.getPort(), ex);
}
}
/**
* Initializes and returns SipManager's maxForwardsHeader field using the
* value specified by MAX_FORWARDS.
*
* @return an instance of a MaxForwardsHeader that can be used when sending
* requests
* @throws CommunicationsException if MAX_FORWARDS has an invalid value.
*/
public MaxForwardsHeader getMaxForwardsHeader()
throws CommunicationsException {
if (maxForwardsHeader != null) {
return maxForwardsHeader;
}
try {
maxForwardsHeader = headerFactory
.createMaxForwardsHeader(SipManager.MAX_FORWARDS);
return maxForwardsHeader;
}
catch (InvalidArgumentException ex) {
throw new CommunicationsException(
"A problem occurred while creating MaxForwardsHeader",
ex);
}
}
/**
* Returns the user used to create the From Header URI.
*
* @return the user used to create the From Header URI.
*/
public String getLocalUser() {
try {
return ((SipURI) getFromHeader().getAddress().getURI()).getUser();
}
catch (CommunicationsException ex) {
return "";
}
}
/**
* Generates a ToTag (the containingDialog's hashCode())and attaches it to
* response's ToHeader.
*
* @param response the response that is to get the ToTag.
* @param containingDialog the Dialog instance that is to extract a unique Tag value
* (containingDialog.hashCode())
*/
public void attachToTag(Response response, Dialog containingDialog) {
ToHeader to = (ToHeader) response.getHeader(ToHeader.NAME);
if (to == null) {
fireCommunicationsError(new CommunicationsException(
"No TO header found in, attaching a to tag is therefore impossible"));
}
try {
if (to.getTag() == null || to.getTag().trim().length() == 0) {
int toTag = containingDialog != null ? containingDialog
.hashCode() : (int) System.currentTimeMillis();
to.setTag(Integer.toString(toTag));
}
}
catch (ParseException ex) {
fireCommunicationsError(new CommunicationsException(
"Failed to attach a TO tag to an outgoing response"));
}
}
protected void initProperties() {
try {
stackAddress = getLocalHostAddress();
// Add the host address to the properties that will pass the stack
SIPConfig.setIPAddress(stackAddress);
SIPConfig.setSystemProperties();
// ensure IPv6 address compliance
if (stackAddress.indexOf(':') != stackAddress.lastIndexOf(':')
&& stackAddress.charAt(0) != '[') {
stackAddress = '[' + stackAddress.trim() + ']';
}
stackName = SIPConfig.getStackName();
if (stackName == null) {
stackName = "SIPark@" + Integer.toString(hashCode());
}
currentlyUsedURI = SIPConfig.getPublicAddress();
if (currentlyUsedURI == null) {
currentlyUsedURI = SIPConfig.getUserName() + "@" + stackAddress;
}
if (!currentlyUsedURI.trim().toLowerCase().startsWith("sip:")) {
currentlyUsedURI = "sip:" + currentlyUsedURI.trim();
}
registrarAddress = SIPConfig.getRegistrarAddress();
try {
registrarPort = SIPConfig.getRegistrarPort();
}
catch (NumberFormatException ex) {
registrarPort = 5060;
}
registrarTransport = SIPConfig.getRegistrarTransport();
if (registrarTransport == null) {
registrarTransport = SipManager.DEFAULT_TRANSPORT;
}
try {
registrationsExpiration = SIPConfig.getRegistrationExpiration();
}
catch (NumberFormatException ex) {
registrationsExpiration = 3600;
}
sipStackPath = SIPConfig.getStackPath();
if (sipStackPath == null) {
sipStackPath = "gov.nist";
}
transport = SIPConfig.getTransport();
if (transport.equals("")) {
transport = SipManager.DEFAULT_TRANSPORT;
}
try {
localPort = SIPConfig.getLocalPort();
}
catch (NumberFormatException exc) {
localPort = 5060;
}
displayName = SIPConfig.getDisplayName();
}
catch (Exception e) {
Log.error(e.getMessage(), e);
}
}
/**
* Adds the specified credentials to the security manager's credentials
* cache so that they get tried next time they're needed.
*
* @param realm the realm these credentials should apply for.
* @param credentials a set of credentials (username and pass)
*/
public void cacheCredentials(String realm, UserCredentials credentials) {
sipSecurityManager.cacheCredentials(realm, credentials);
}
/**
* Adds a CommunicationsListener to SipManager.
*
* @param listener The CommunicationsListener to be added.
*/
public void addCommunicationsListener(CommunicationsListener listener) {
try {
listeners.add(listener);
}
catch (Exception e) {
Log.error("addCommunicationsListener", e);
}
}
// ------------ registerred
void fireRegistered(String address) {
RegistrationEvent evt = new RegistrationEvent(address);
for (int i = listeners.size() - 1; i >= 0; i--) {
(listeners.get(i)).registered(evt);
}
} // call received
// ------------ registering
void fireRegistering(String address) {
RegistrationEvent evt = new RegistrationEvent(address);
for (int i = listeners.size() - 1; i >= 0; i--) {
(listeners.get(i)).registering(evt);
}
} // call received
// ------------ unregistered
public void fireUnregistered(String address) {
RegistrationEvent evt = new RegistrationEvent(address);
for (int i = listeners.size() - 1; i >= 0; i--) {
(listeners.get(i)).unregistered(evt);
}
}
void fireRegistrationFailed(String address, RegistrationEvent.Type type) {
RegistrationEvent evt = new RegistrationEvent(address, type);
for (int i = listeners.size() - 1; i >= 0; i--) {
(listeners.get(i)).registrationFailed(evt);
}
}
void fireUnregistering(String address) {
RegistrationEvent evt = new RegistrationEvent(address);
for (int i = listeners.size() - 1; i >= 0; i--) {
(listeners.get(i)).unregistering(evt);
}
}
public void processRequest(RequestEvent requestEvent) {
}
// -------------------- PROCESS RESPONSE
public void processResponse(ResponseEvent responseReceivedEvent) {
Log.debug("RESPONSE [" + responseReceivedEvent.getResponse().getStatusCode() + "]");
ClientTransaction clientTransaction = responseReceivedEvent
.getClientTransaction();
if (clientTransaction == null) {
return;
}
Response response = responseReceivedEvent.getResponse();
String method = ((CSeqHeader) response.getHeader(CSeqHeader.NAME))
.getMethod();
// OK
if (response.getStatusCode() == Response.OK) {
// REGISTER
if (method.equals(Request.REGISTER)) {
registerProcessing.processOK(clientTransaction, response);
}
}
// NOT_FOUND
else if (response.getStatusCode() == Response.NOT_FOUND) {
if (method.equals(Request.REGISTER)) {
try {
unregister();
registrationFailed(RegistrationEvent.Type.NotFound);
}
catch (CommunicationsException e) {
Log.error("NOT FOUND", e);
}
Log.debug("REGISTER NOT FOUND");
}
}
// NOT_IMPLEMENTED
else if (response.getStatusCode() == Response.NOT_IMPLEMENTED) {
if (method.equals(Request.REGISTER)) {
// Fixed typo issues - Reported by pizarro
registerProcessing.processNotImplemented(clientTransaction,
response);
}
}
// REQUEST_TERMINATED
// 401 UNAUTHORIZED
else if (response.getStatusCode() == Response.UNAUTHORIZED
|| response.getStatusCode() == Response.PROXY_AUTHENTICATION_REQUIRED) {
if (method.equals(Request.REGISTER)) {
CSeqHeader cseq = (CSeqHeader) response.getHeader(CSeqHeader.NAME);
if (cseq.getSequenceNumber() < 2)
registerProcessing.processAuthenticationChallenge(
clientTransaction, response);
else
registrationFailed(RegistrationEvent.Type.WrongPass);
}
}
// 403 Wrong Authorization user for this account
else if(response.getStatusCode() == Response.FORBIDDEN){
registrationFailed(RegistrationEvent.Type.Forbidden);
}
} // process response
public void processTimeout(TimeoutEvent timeoutEvent) {
}
String getLocalHostAddress() {
return localAddress.getHostAddress();
}
protected void checkIfStarted() throws CommunicationsException {
if (!isStarted) {
throw new CommunicationsException(
"The underlying SIP Stack had not been"
+ "properly initialised! Impossible to continue");
}
}
public static void main(String args[]) {
SIPConfig.setRegistrarAddress("apollo");
SIPConfig.setAuthenticationRealm("apollo");
SIPConfig.setDefaultDomain("apollo");
InetAddress address = null;
try {
address = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
e.printStackTrace();
}
SipManager sipManager = new SipManager(address);
try {
sipManager.start();
} catch (CommunicationsException e) {
e.printStackTrace();
}
try {
sipManager.startRegisterProcess("7512", "7512", "7512");
} catch (CommunicationsException e) {
e.printStackTrace();
}
try {
sipManager.unregister();
} catch (CommunicationsException e) {
e.printStackTrace();
}
}
@Override
public void processDialogTerminated(DialogTerminatedEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void processIOException(IOExceptionEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void processTransactionTerminated(TransactionTerminatedEvent arg0) {
// TODO Auto-generated method stub
}
}
| |
/*
* Copyright 2013 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.fuzzing;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import com.google.javascript.jscomp.CommandLineRunner;
import com.google.javascript.jscomp.CompilationLevel;
import com.google.javascript.jscomp.Compiler;
import com.google.javascript.jscomp.CompilerInput;
import com.google.javascript.jscomp.CompilerOptions;
import com.google.javascript.jscomp.JSModule;
import com.google.javascript.jscomp.Result;
import com.google.javascript.jscomp.SourceFile;
import com.google.javascript.jscomp.SyntheticAst;
import com.google.javascript.jscomp.VariableRenamingPolicy;
import com.google.javascript.rhino.Node;
import org.json.JSONException;
import org.json.JSONObject;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* UNDER DEVELOPMENT. DO NOT USE!
* @author zplin@google.com (Zhongpeng Lin)
*/
public class Driver {
@Option(name = "--number_of_runs",
usage = "The number of runs of the fuzzer. "
+ "If this option is missing, the driver will run forever")
private int numberOfRuns = -1;
@Option(name = "--max_ast_size",
usage = "The max number of nodes in the generated ASTs. Default: 100")
private int maxASTSize;
@Option(name = "--compilation_level",
usage = "Specifies the compilation level to use. " +
"Default: SIMPLE_OPTIMIZATIONS")
private CompilationLevel compilationLevel =
CompilationLevel.SIMPLE_OPTIMIZATIONS;
@Option(name = "--seed",
usage = "Specifies the seed for the fuzzer. "
+ "It will override --number_of_runs to 1. "
+ "If not given, System.currentTimeMillis() will be used")
private long seed = -1;
@Option(name = "--logging_level",
usage = "Specifies the logging level for the driver. "
+ "Default: INFO")
private LoggingLevel level = LoggingLevel.INFO;
@Option(name = "--config",
required = true,
usage = "Specifies the configuration file")
private String configFileName;
@Option(name = "--execute",
usage = "Whether to execute the generated JavaScript")
private boolean execute = false;
@Option(name = "--stop_on_error",
usage = "Whether to stop fuzzing once an error is found")
private boolean stopOnError = false;
private Logger logger;
private JSONObject config;
public Result compile(String code) throws IOException {
Compiler.setLoggingLevel(level.getLevel());
Compiler compiler = new Compiler();
return compiler.compile(CommandLineRunner.getDefaultExterns(),
Arrays.asList(SourceFile.fromCode("[fuzzedCode]", code)), getOptions());
}
public Result compile(Node script) throws IOException {
CompilerInput input = new CompilerInput(new SyntheticAst(script));
JSModule jsModule = new JSModule("fuzzedModule");
jsModule.add(input);
Compiler.setLoggingLevel(level.getLevel());
Compiler compiler = new Compiler();
compiler.setTimeout(30);
compiler.disableThreads();
return compiler.compileModules(
CommandLineRunner.getDefaultExterns(),
Arrays.asList(jsModule), getOptions());
}
private CompilerOptions getOptions() {
CompilerOptions options = new CompilerOptions();
compilationLevel.setOptionsForCompilationLevel(options);
options.variableRenaming = VariableRenamingPolicy.OFF;
return options;
}
private JSONObject getConfig() {
if (config == null) {
File file = new File(configFileName);
try {
config = new JSONObject(Files.toString(
file, StandardCharsets.UTF_8));
} catch (JSONException | IOException e) {
e.printStackTrace();
}
}
return config;
}
private Logger getLogger() {
if (logger == null) {
logger = Logger.getLogger(Driver.class.getName());
logger.setLevel(level.getLevel());
for (Handler handler : logger.getHandlers()) {
handler.setLevel(Level.ALL);
}
}
return logger;
}
private Node fuzz(FuzzingContext context) {
ScriptFuzzer fuzzer = new ScriptFuzzer(context);
return fuzzer.generate(maxASTSize);
}
private boolean executeJS(String js1, String js2) {
ExecutorService executor = Executors.newCachedThreadPool();
NodeRunner node1 = new NodeRunner(js1);
NodeRunner node2 = new NodeRunner(js2);
String[] output1 = null, output2 = null;
try {
// set the timeout to maxASTSize milliseconds
List<Future<String[]>> futures = executor.invokeAll(
Lists.newArrayList(node1, node2), maxASTSize, TimeUnit.MILLISECONDS);
Future<String[]> future1 = futures.get(0);
if (!future1.isCancelled()) {
output1 = future1.get();
}
Future<String[]> future2 = futures.get(1);
if (!future2.isCancelled()) {
output2 = future2.get();
}
} catch (InterruptedException e) {
getLogger().log(Level.INFO, "Timeout in executing JavaScript", e);
} catch (ExecutionException e) {
getLogger().log(Level.SEVERE, "Error in executing JavaScript", e);
} finally {
node1.process.destroy();
node2.process.destroy();
}
if (output1 == null && output2 == null) {
getLogger().info("Infinite loop!");
return true;
} else if (NodeRunner.isSame(output1, output2)) {
boolean hasError = false;
if (output1 != null && output1[1].length() > 0) {
getLogger().warning("First JavaScript has a runtime error: " +
output1[1]);
hasError = true;
}
if (output2 != null && output2[1].length() > 0) {
getLogger().warning("Second JavaScript has a runtime error: " +
output2[1]);
hasError = true;
}
if (hasError &&
getLogger().getLevel().intValue() < Level.WARNING.intValue()) {
return false;
} else {
return true;
}
} else {
StringBuilder sb =
new StringBuilder("Different outputs!");
sb.append("\nOutput 1:");
if (output1 != null) {
sb.append(output1[0]).append(output1[1]);
} else {
sb.append("null");
}
sb.append("\nOutput 2:");
if (output2 != null) {
sb.append(output2[0]).append(output2[1]);
} else {
sb.append("null");
}
getLogger().severe(sb.toString());
return false;
}
}
private void run() {
if (seed != -1) {
// When user specifies seed, only run once
numberOfRuns = 1;
}
long currentSeed;
for (int i = 0; numberOfRuns == -1 || i < numberOfRuns; i++) {
currentSeed = seed == -1 ? System.currentTimeMillis() : seed;
getLogger().info("Running fuzzer [" + i + " of " +
numberOfRuns + "]");
Random random = currentSeed == -1 ? new Random(currentSeed) :
new Random(currentSeed);
FuzzingContext context = new FuzzingContext(random, getConfig(), execute);
Node script = null;
try {
script = fuzz(context);
} catch (RuntimeException e) {
getLogger().log(Level.SEVERE, "Fuzzer error: ", e);
if (stopOnError) {
break;
} else {
continue;
}
}
String code1 = ScriptFuzzer.getPrettyCode(script);
StringBuffer debugInfo = new StringBuffer("Seed: ").append(currentSeed);
debugInfo.append("\nJavaScript: ").append(code1);
try {
Result result = compile(script);
if (result.success) {
if (result.warnings.length == 0) {
getLogger().info(debugInfo.toString());
} else {
getLogger().warning(debugInfo.toString());
}
} else {
getLogger().severe(debugInfo.toString());
if (stopOnError) {
break;
}
}
} catch (Exception e) {
getLogger().log(Level.SEVERE, "Compiler Crashed!", e);
getLogger().severe(debugInfo.toString());
if (stopOnError) {
break;
}
}
String code2 = ScriptFuzzer.getPrettyCode(script);
debugInfo.append("\nCompiled Code: " + code2);
String setUpCode = getSetupCode(context.scopeManager);
// System.out.print(setUpCode);
if (execute) {
if (!executeJS(setUpCode + code1, setUpCode + code2)) {
getLogger().severe(debugInfo.toString());
if (stopOnError) {
break;
}
}
}
getLogger().info(debugInfo.toString());
}
}
private String getSetupCode(ScopeManager scopeManager) {
Collection<String> vars = Collections2.transform(
Lists.newArrayList(scopeManager.localScope().symbols),
new Function<Symbol, String>() {
@Override
public String apply(Symbol s) {
return "'" + s.name + "'=" + s.name;
}
});
String setUpCode = "function toString(value) {\n" +
" if (value instanceof Array) {\n" +
" var string = \"[\";\n" +
" for (var i in value) {\n" +
" string += toString(value[i]) + \",\";\n" +
" }\n" +
" string += ']';\n" +
" return string;\n" +
" } else if (value instanceof Function) {\n" +
" return value.length;\n" +
" } else {\n" +
" return value;\n" +
" }\n" +
"}\n" +
"\n" +
"process.on('uncaughtException', function(e) {\n" +
" console.log(\"Errors: \");\n" +
" if (e instanceof Error) {\n" +
" console.log(e.name);\n" +
" } else {\n" +
" console.log(typeof(e));\n" +
" }\n" +
"});\n" +
"\n" +
"process.on(\"exit\", function(e) {\n" +
" console.log(\"Variables:\");\n" +
" var allvars = " + vars + ";\n" +
" console.log(toString(allvars));\n" +
"});\n" +
"";
return setUpCode;
}
public static void main(String[] args) throws Exception {
Driver driver = new Driver();
CmdLineParser parser = new CmdLineParser(driver);
try {
parser.parseArgument(args);
} catch (CmdLineException e) {
// handling of wrong arguments
System.err.println(e.getMessage());
parser.printUsage(System.err);
System.exit(1);
}
driver.run();
System.exit(0);
}
enum LoggingLevel {
OFF(Level.OFF),
SEVERE(Level.SEVERE),
WARNING(Level.WARNING),
INFO(Level.INFO),
CONFIG(Level.CONFIG),
FINE(Level.FINE),
FINER(Level.FINER),
FINEST(Level.FINEST),
ALL(Level.ALL);
private Level level;
private LoggingLevel(Level l) {
level = l;
}
/**
* @return the level
*/
public Level getLevel() {
return level;
}
}
static class NodeRunner implements Callable<String[]> {
private String js;
private Process process;
NodeRunner(String js) {
this.js = js;
}
/* (non-Javadoc)
* @see java.util.concurrent.Callable#call()
*/
@Override
public String[] call() throws IOException {
String[] command = {"node", "-e", js};
Runtime runtime = Runtime.getRuntime();
process = runtime.exec(command);
String[] results = new String[2];
results[0] = CharStreams.toString(
new InputStreamReader(process.getInputStream()));
results[1] = CharStreams.toString(
new InputStreamReader(process.getErrorStream()));
return results;
}
public static boolean isSame(String[] output1, String[] output2) {
if (output1 == null && output2 == null) {
return true;
} else if (output1 == null || output2 == null) {
return false;
} else {
return output1[0].equals(output2[0]);
}
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.resolve.processors;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.util.VolatileNotNullLazyValue;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.util.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.codeInspection.utils.ControlFlowUtils;
import org.jetbrains.plugins.groovy.lang.psi.GrControlFlowOwner;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
import org.jetbrains.plugins.groovy.lang.psi.api.signatures.GrClosureSignature;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrClassInitializer;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.branch.GrReturnStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.branch.GrThrowStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrAssignmentExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.path.GrIndexProperty;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrGdkMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrClosureParameter;
import org.jetbrains.plugins.groovy.lang.psi.impl.signatures.GrClosureSignatureUtil;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil;
import org.jetbrains.plugins.groovy.lang.psi.typeEnhancers.ClosureToSamConverter;
import org.jetbrains.plugins.groovy.lang.psi.util.GdkMethodUtil;
import org.jetbrains.plugins.groovy.lang.psi.util.GroovyCommonClassNames;
import org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil;
import java.util.Collection;
import static com.intellij.util.containers.ContainerUtil.emptyList;
import static com.intellij.util.containers.ContainerUtil.newHashSet;
/**
* @author Max Medvedev
*/
public class SubstitutorComputer {
private static final Logger LOG = Logger.getInstance(SubstitutorComputer.class);
protected final PsiElement myPlace;
private final PsiType myThisType;
@Nullable private final PsiType[] myArgumentTypes;
private final PsiType[] myTypeArguments;
private final PsiElement myPlaceToInferContext;
private final NotNullLazyValue<Collection<PsiElement>> myExitPoints;
private final PsiResolveHelper myHelper;
public SubstitutorComputer(PsiType thisType,
@Nullable PsiType[] argumentTypes,
PsiType[] typeArguments,
PsiElement place,
PsiElement placeToInferContext) {
myThisType = thisType;
myArgumentTypes = argumentTypes;
myTypeArguments = typeArguments;
myPlace = place;
myPlaceToInferContext = placeToInferContext;
myExitPoints = VolatileNotNullLazyValue.createValue(() -> {
if (canBeExitPoint(place)) {
GrControlFlowOwner flowOwner = ControlFlowUtils.findControlFlowOwner(place);
return newHashSet(ControlFlowUtils.collectReturns(flowOwner));
}
else {
return emptyList();
}
});
myHelper = JavaPsiFacade.getInstance(myPlace.getProject()).getResolveHelper();
}
@Nullable
protected PsiType inferContextType() {
final PsiElement parent = myPlaceToInferContext.getParent();
if (parent instanceof GrReturnStatement || myExitPoints.getValue().contains(myPlaceToInferContext)) {
final GrMethod method = PsiTreeUtil.getParentOfType(parent, GrMethod.class, true, GrClosableBlock.class);
if (method != null) {
return method.getReturnType();
}
}
else if (parent instanceof GrAssignmentExpression && myPlaceToInferContext.equals(((GrAssignmentExpression)parent).getRValue())) {
PsiElement lValue = PsiUtil.skipParentheses(((GrAssignmentExpression)parent).getLValue(), false);
if ((lValue instanceof GrExpression) && !(lValue instanceof GrIndexProperty)) {
return ((GrExpression)lValue).getNominalType();
}
else {
return null;
}
}
else if (parent instanceof GrVariable) {
return ((GrVariable)parent).getDeclaredType();
}
return null;
}
private static boolean canBeExitPoint(PsiElement place) {
while (place != null) {
if (place instanceof GrMethod || place instanceof GrClosableBlock || place instanceof GrClassInitializer) return true;
if (place instanceof GrThrowStatement || place instanceof GrTypeDefinitionBody || place instanceof GroovyFile) return false;
place = place.getParent();
}
return false;
}
public PsiSubstitutor obtainSubstitutor(@NotNull PsiSubstitutor substitutor,
@NotNull PsiMethod method,
@Nullable PsiElement resolveContext) {
final PsiTypeParameter[] typeParameters = method.getTypeParameters();
if (myTypeArguments.length == typeParameters.length) {
for (int i = 0; i < typeParameters.length; i++) {
PsiTypeParameter typeParameter = typeParameters[i];
final PsiType typeArgument = myTypeArguments[i];
substitutor = substitutor.put(typeParameter, typeArgument);
}
return substitutor;
}
if (myArgumentTypes != null && method.hasTypeParameters()) {
PsiType[] argTypes = myArgumentTypes;
if (method instanceof GrGdkMethod) {
//type inference should be performed from static method
PsiType[] newArgTypes = PsiType.createArray(argTypes.length + 1);
if (GdkMethodUtil.isInWithContext(resolveContext)) {
newArgTypes[0] = ((GrExpression)resolveContext).getType();
}
else {
newArgTypes[0] = myThisType;
}
System.arraycopy(argTypes, 0, newArgTypes, 1, argTypes.length);
argTypes = newArgTypes;
method = ((GrGdkMethod)method).getStaticMethod();
LOG.assertTrue(method.isValid());
}
return inferMethodTypeParameters(method, substitutor, typeParameters, argTypes);
}
return substitutor;
}
private PsiSubstitutor inferMethodTypeParameters(@NotNull PsiMethod method,
@NotNull PsiSubstitutor partialSubstitutor,
@NotNull PsiTypeParameter[] typeParameters,
@NotNull PsiType[] argTypes) {
if (typeParameters.length == 0 || myArgumentTypes == null) return partialSubstitutor;
final GrClosureSignature erasedSignature = GrClosureSignatureUtil.createSignature(method, partialSubstitutor, true);
final GrClosureSignature signature = GrClosureSignatureUtil.createSignature(method, partialSubstitutor);
final GrClosureParameter[] params = signature.getParameters();
final GrClosureSignatureUtil.ArgInfo<PsiType>[] argInfos = GrClosureSignatureUtil.mapArgTypesToParameters(erasedSignature, argTypes, myPlace, true);
if (argInfos == null) return partialSubstitutor;
int max = Math.max(params.length, argTypes.length);
PsiType[] parameterTypes = PsiType.createArray(max);
PsiType[] argumentTypes = PsiType.createArray(max);
int i = 0;
for (int paramIndex = 0; paramIndex < argInfos.length; paramIndex++) {
PsiType paramType = params[paramIndex].getType();
GrClosureSignatureUtil.ArgInfo<PsiType> argInfo = argInfos[paramIndex];
if (argInfo != null) {
if (argInfo.isMultiArg) {
if (paramType instanceof PsiArrayType) paramType = ((PsiArrayType)paramType).getComponentType();
}
for (PsiType type : argInfo.args) {
argumentTypes[i] = handleConversion(paramType, type);
parameterTypes[i] = paramType;
i++;
}
}
else {
parameterTypes[i] = paramType;
argumentTypes[i] = PsiType.NULL;
i++;
}
}
PsiSubstitutor substitutor = myHelper.inferTypeArguments(typeParameters, parameterTypes, argumentTypes, LanguageLevel.JDK_1_7);
for (PsiTypeParameter typeParameter : typeParameters) {
if (!substitutor.getSubstitutionMap().containsKey(typeParameter)) {
substitutor = inferFromContext(typeParameter, PsiUtil.getSmartReturnType(method), substitutor);
if (!substitutor.getSubstitutionMap().containsKey(typeParameter)) {
substitutor = substitutor.put(typeParameter, null);
}
}
}
return partialSubstitutor.putAll(substitutor);
}
@Nullable
private PsiType handleConversion(@Nullable PsiType paramType, @Nullable PsiType argType) {
if (argType instanceof PsiClassType &&
ClosureToSamConverter.isSamConversionAllowed(myPlace) &&
InheritanceUtil.isInheritor(argType, GroovyCommonClassNames.GROOVY_LANG_CLOSURE) &&
!TypesUtil.isClassType(paramType, GroovyCommonClassNames.GROOVY_LANG_CLOSURE)) {
PsiType converted = handleConversionOfSAMType(paramType, (PsiClassType)argType);
if (converted != null) {
return converted;
}
return argType;
}
if (!TypesUtil.isAssignable( TypeConversionUtil.erasure(paramType), argType, myPlace)) {
if (TypesUtil.isAssignableByMethodCallConversion(paramType, argType, myPlace)) {
return paramType;
}
return null;
}
return argType;
}
@Nullable
private PsiType handleConversionOfSAMType(@Nullable PsiType samType, @NotNull PsiClassType closure) {
if (samType instanceof PsiClassType) {
PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)samType).resolveGenerics();
PsiClass samClass = resolveResult.getElement();
if (samClass != null && samClass.getTypeParameters().length != 0) {
MethodSignature samSignature = ClosureToSamConverter.findSingleAbstractMethod(samClass, PsiSubstitutor.EMPTY);
if (samSignature != null) {
PsiMethod samMethod = MethodSignatureUtil.findMethodBySignature(samClass, samSignature, true);
if (samMethod != null) {
PsiType[] closureArgs = closure.getParameters();
if (closureArgs.length == 1 && samMethod.getReturnType() != null) {
PsiSubstitutor substitutor = myHelper.inferTypeArguments(samClass.getTypeParameters(),
new PsiType[]{samMethod.getReturnType()},
closureArgs,
LanguageLevel.JDK_1_7);
if (!substitutor.getSubstitutionMap().isEmpty()) {
return JavaPsiFacade.getElementFactory(myPlace.getProject()).createType(samClass, substitutor);
}
}
}
}
}
}
return null;
}
private PsiSubstitutor inferFromContext(@NotNull PsiTypeParameter typeParameter,
@Nullable PsiType lType,
@NotNull PsiSubstitutor substitutor) {
if (myPlace == null) return substitutor;
final PsiType inferred = myHelper.getSubstitutionForTypeParameter(typeParameter, lType, inferContextType(), false, LanguageLevel.JDK_1_7);
if (inferred != PsiType.NULL) {
return substitutor.put(typeParameter, inferred);
}
return substitutor;
}
public PsiType[] getTypeArguments() {
return myTypeArguments;
}
}
| |
/**
* Copyright (c) 2015-2016 Angelo ZERR.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Angelo Zerr <angelo.zerr@gmail.com> - initial API and implementation
*/
package ts.eclipse.ide.internal.ui.navigator;
import java.util.ArrayList;
import java.util.Collection;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.jface.viewers.AbstractTreeViewer;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.StructuredViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.widgets.Control;
import ts.eclipse.ide.core.TypeScriptCorePlugin;
import ts.eclipse.ide.core.resources.IIDETypeScriptProject;
import ts.eclipse.ide.core.resources.TypeScriptElementChangedListenerAdapater;
import ts.eclipse.ide.core.resources.buildpath.ITsconfigBuildPath;
import ts.eclipse.ide.core.resources.buildpath.ITypeScriptBuildPath;
import ts.eclipse.ide.core.utils.TypeScriptResourceUtil;
import ts.resources.ITypeScriptProject;
/**
* TypeScript navigator used to display for *.ts file, *.js and *.js.map files
* as children.
*
*/
public class TypeScriptNavigatorContentProvider extends TypeScriptElementChangedListenerAdapater
implements ITreeContentProvider {
public static final Object[] NO_CHILDREN = new Object[0];
private Viewer viewer;
public TypeScriptNavigatorContentProvider() {
TypeScriptCorePlugin.getDefault().addTypeScriptElementChangedListener(this);
}
@Override
public Object[] getElements(Object element) {
return NO_CHILDREN;
}
@Override
public Object[] getChildren(Object element) {
Object[] children = getChildrenOrNull(element);
return children != null ? children : NO_CHILDREN;
}
private Object[] getChildrenOrNull(Object element) {
if (element instanceof IResource) {
IResource resource = (IResource) element;
Object[] children = getChildren(resource);
return children;
} else if ((element instanceof IIDETypeScriptProject)) {
IIDETypeScriptProject tsProject = (IIDETypeScriptProject) element;
return tsProject.getTypeScriptBuildPath().getTsconfigBuildPaths();
} else if ((element instanceof ITsconfigBuildPath)) {
return ((ITsconfigBuildPath) element).members();
}
return null;
}
private Object[] getChildren(IResource resource) {
switch (resource.getType()) {
case IResource.PROJECT:
return getTypeScriptResources((IProject) resource);
case IResource.FILE:
return getEmmitedFiles((IFile) resource);
}
return null;
}
private Object[] getTypeScriptResources(IProject project) {
if (TypeScriptResourceUtil.isTypeScriptProject(project)) {
try {
IIDETypeScriptProject tsProject = TypeScriptResourceUtil.getTypeScriptProject(project);
return new Object[] { tsProject };
} catch (CoreException e) {
}
}
return null;
}
private Object[] getEmmitedFiles(IFile file) {
try {
return TypeScriptResourceUtil.getEmittedFiles(file);
} catch (CoreException e) {
return null;
}
}
@Override
public boolean hasChildren(Object element) {
if (element instanceof IResource) {
// for performance, returns true to avoid loading twice compiled
// resources *.js and *.js.map
return TypeScriptResourceUtil.isTsOrTsxFile(element);
} else if (element instanceof ITypeScriptProject) {
return true;
} else if ((element instanceof ITsconfigBuildPath)) {
return true;
}
return false;
}
@Override
public void dispose() {
TypeScriptCorePlugin.getDefault().removeTypeScriptElementChangedListener(this);
}
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
this.viewer = viewer;
}
@Override
public void typeScriptVersionChanged(IIDETypeScriptProject tsProject, String oldVersion, String newVersion) {
// Update TypeScript Resources label with the well TypeScript version.
updateTypeScriptResourcesLabel(tsProject);
}
private void updateTypeScriptResourcesLabel(IIDETypeScriptProject tsProject) {
// he widget may have been destroyed
// by the time this is run. Check for this and do nothing if so.
Control ctrl = viewer.getControl();
if (ctrl == null || ctrl.isDisposed()) {
return;
}
if (viewer instanceof AbstractTreeViewer) {
AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer;
treeViewer.refresh(tsProject, true);
}
}
@Override
public void buildPathChanged(IIDETypeScriptProject tsProject, ITypeScriptBuildPath oldBuildPath,
ITypeScriptBuildPath newBuildPath) {
Control ctrl = viewer.getControl();
if (ctrl == null || ctrl.isDisposed()) {
return;
}
final Collection<Runnable> runnables = new ArrayList<Runnable>();
processChanged(tsProject, oldBuildPath, newBuildPath, runnables);
if (runnables.isEmpty()) {
return;
}
// Are we in the UIThread? If so spin it until we are done
if (ctrl.getDisplay().getThread() == Thread.currentThread()) {
runUpdates(runnables);
} else {
ctrl.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
// Abort if this happens after disposes
Control ctrl = viewer.getControl();
if (ctrl == null || ctrl.isDisposed()) {
return;
}
runUpdates(runnables);
}
});
}
}
private void processChanged(final IIDETypeScriptProject tsProject, final ITypeScriptBuildPath oldBuildPath,
final ITypeScriptBuildPath newBuildPath, Collection<Runnable> runnables) {
// he widget may have been destroyed
// by the time this is run. Check for this and do nothing if so.
Control ctrl = viewer.getControl();
if (ctrl == null || ctrl.isDisposed()) {
return;
}
final IProject project = tsProject.getProject();
Runnable addAndRemove = new Runnable() {
public void run() {
if (viewer instanceof AbstractTreeViewer) {
AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer;
// Disable redraw until the operation is finished so we
// don't
// get a flash of both the new and old item (in the case of
// rename)
// Only do this if we're both adding and removing files (the
// rename case)
/*
* if (hasRename) {
* treeViewer.getControl().setRedraw(false); }
*/
try {
/*
* if (oldBuildPath != null) { treeViewer.remove(new
* Object[] { oldBuildPath }); } if (newBuildPath !=
* null && newBuildPath.getContainers().size() > 0) {
* treeViewer.add(project, new Object[] { newBuildPath
* }); }
*/
if (!hasBuildPath(newBuildPath)) {
treeViewer.remove(project, new Object[] { tsProject });
} else {
if (!hasBuildPath(oldBuildPath)) {
treeViewer.add(project, new Object[] { tsProject });
} else {
treeViewer.refresh(tsProject);
}
}
} finally {
/*
* if (hasRename) {
* treeViewer.getControl().setRedraw(true); }
*/
}
} else {
((StructuredViewer) viewer).refresh(project);
}
}
private boolean hasBuildPath(final ITypeScriptBuildPath buildPath) {
return buildPath != null && buildPath.hasRootContainers();
}
};
runnables.add(addAndRemove);
}
/**
* Run all of the runnables that are the widget updates
*
* @param runnables
*/
private void runUpdates(Collection<Runnable> runnables) {
for (Runnable runnable : runnables) {
runnable.run();
}
}
@Override
public Object getParent(Object element) {
return null;
}
}
| |
package com.github.shynixn.astraledit.bukkit.logic.business.nms.v1_8_R1;
import com.github.shynixn.astraledit.api.bukkit.business.entity.PacketArmorstand;
import com.github.shynixn.astraledit.bukkit.logic.business.nms.NMSRegistry;
import com.github.shynixn.astraledit.bukkit.logic.lib.ItemStackBuilder;
import net.minecraft.server.v1_8_R1.*;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.craftbukkit.v1_8_R1.CraftWorld;
import org.bukkit.craftbukkit.v1_8_R1.entity.CraftArmorStand;
import org.bukkit.craftbukkit.v1_8_R1.entity.CraftPlayer;
import org.bukkit.craftbukkit.v1_8_R1.inventory.CraftItemStack;
import org.bukkit.entity.ArmorStand;
import org.bukkit.entity.Player;
import org.bukkit.util.EulerAngle;
import java.util.Set;
/**
* Copyright 2017 Shynixn
* <p>
* Do not remove this header!
* <p>
* Version 1.0
* <p>
* MIT License
* <p>
* Copyright (c) 2017
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
public class DisplayArmorstand implements PacketArmorstand {
private Player player;
private final EntityArmorStand armorStand;
private int storedId;
private byte storedData;
private final Set<Player> watchers;
/**
* Initializes the armorstand
*
* @param player player
* @param location location
* @param id id
* @param data data
* @param watchers watchers
*/
public DisplayArmorstand(Player player, Location location, int id, byte data, Set<Player> watchers) {
super();
this.watchers = watchers;
this.player = player;
this.armorStand = new EntityArmorStand(((CraftWorld) player.getWorld()).getHandle());
final NBTTagCompound compound = new NBTTagCompound();
compound.setBoolean("invulnerable", true);
compound.setBoolean("Invisible", true);
compound.setBoolean("PersistenceRequired", true);
compound.setBoolean("NoBasePlate", true);
this.armorStand.a(compound);
this.armorStand.setLocation(location.getX(), location.getY(), location.getZ(), 0, 0);
this.storedId = id;
this.storedData = data;
ItemStackBuilder stackBuilder = new ItemStackBuilder(Material.getMaterial(id), 1, data);
this.getCraftEntity().setHelmet(stackBuilder.build());
this.getCraftEntity().setBodyPose(new EulerAngle(3.15, 0, 0));
this.getCraftEntity().setLeftLegPose(new EulerAngle(3.15, 0, 0));
this.getCraftEntity().setRightLegPose(new EulerAngle(3.15, 0, 0));
if (((ArmorStand) this.armorStand.getBukkitEntity()).getHelmet().getType() == Material.AIR) {
stackBuilder = new ItemStackBuilder(Material.SKULL_ITEM, 1, (short) 3);
if (id == Material.WATER.getId() || id == Material.STATIONARY_WATER.getId()) {
stackBuilder.setSkin(NMSRegistry.WATER_HEAD);
} else if (id == Material.LAVA.getId() || id == Material.STATIONARY_LAVA.getId()) {
stackBuilder.setSkin(NMSRegistry.LAVA_HEAD);
} else {
stackBuilder.setSkin(NMSRegistry.NOT_FOUND);
}
((ArmorStand) this.armorStand.getBukkitEntity()).setHelmet(stackBuilder.build());
}
}
/**
* Spawns the armorstand
*/
@Override
public void spawn() {
final PacketPlayOutSpawnEntityLiving packetSpawn = new PacketPlayOutSpawnEntityLiving(this.armorStand);
final PacketPlayOutEntityEquipment packetHead =
new PacketPlayOutEntityEquipment(this.armorStand.getId(), 3, CraftItemStack.asNMSCopy(((ArmorStand) this.armorStand.getBukkitEntity()).getHelmet()));
this.sendPacket(packetSpawn);
this.sendPacket(packetHead);
}
/**
* Teleports the armorstand to the given location
*
* @param location location
*/
@Override
public void teleport(Location location) {
this.armorStand.setPositionRotation(location.getX(), location.getY(), location.getZ(), location.getYaw(), location.getPitch());
final PacketPlayOutEntityTeleport teleportPacket = new PacketPlayOutEntityTeleport(this.armorStand);
this.sendPacket(teleportPacket);
}
/**
* Removes the armorstand
*/
@Override
public void remove() {
final PacketPlayOutEntityDestroy destroyPacket = new PacketPlayOutEntityDestroy(this.armorStand.getId());
this.sendPacket(destroyPacket);
}
/**
* Returns the location of the armorstand
*
* @return location
*/
@Override
public Location getLocation() {
return this.armorStand.getBukkitEntity().getLocation();
}
/**
* Sets the pose of
*
* @param angle angle
*/
@Override
public void setHeadPose(EulerAngle angle) {
((ArmorStand) this.armorStand.getBukkitEntity()).setHeadPose(angle);
}
/**
* Returns the pose of the head
*
* @return angle
*/
@Override
public EulerAngle getHeadPose() {
return ((ArmorStand) this.armorStand.getBukkitEntity()).getHeadPose();
}
/**
* Returns the stored block id
*
* @return id
*/
@Override
public int getStoredBlockId() {
return this.storedId;
}
/**
* Sets the stored block id
*
* @param id id
*/
@Override
public void setStoreBlockId(int id) {
this.storedId = id;
}
/**
* Returns the stored block data
*
* @return data
*/
@Override
public byte getStoredBlockData() {
return this.storedData;
}
/**
* Sets the stored block data
*
* @param data data
*/
@Override
public void setStoredBlockData(byte data) {
this.storedData = data;
}
/**
* Sends the packet
*
* @param packet packet
*/
private void sendPacket(Packet packet) {
this.sendPacket(packet, this.player);
for (final Player player : this.watchers) {
this.sendPacket(packet, player);
}
}
/**
* Sends the packet
*
* @param player player
* @param packet packet
*/
private void sendPacket(Packet packet, Player player) {
((CraftPlayer) player).getHandle().playerConnection.sendPacket(packet);
}
/**
* Returns the craftArmorstand
*
* @return stand
*/
private CraftArmorStand getCraftEntity() {
return (CraftArmorStand) this.armorStand.getBukkitEntity();
}
/**
* Closes this resource, relinquishing any underlying resources.
* This method is invoked automatically on objects managed by the
* {@code try}-with-resources statement.
*
* @throws Exception if this resource cannot be closed
*/
@Override
public void close() throws Exception {
this.remove();
this.player = null;
}
}
| |
/*
* Copyright (C) 2012 Square, Inc.
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp.internal;
import com.squareup.okhttp.Protocol;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import javax.net.ssl.SSLSocket;
import okio.ByteString;
/**
* Access to Platform-specific features necessary for SPDY and advanced TLS.
*
* <h3>ALPN and NPN</h3>
* This class uses TLS extensions ALPN and NPN to negotiate the upgrade from
* HTTP/1.1 (the default protocol to use with TLS on port 443) to either SPDY
* or HTTP/2.
*
* <p>NPN (Next Protocol Negotiation) was developed for SPDY. It is widely
* available and we support it on both Android (4.1+) and OpenJDK 7 (via the
* Jetty NPN-boot library). NPN is not yet available on Java 8.
*
* <p>ALPN (Application Layer Protocol Negotiation) is the successor to NPN. It
* has some technical advantages over NPN. ALPN first arrived in Android 4.4,
* but that release suffers a <a href="http://goo.gl/y5izPP">concurrency bug</a>
* so we don't use it. ALPN will be supported in the future.
*
* <p>On platforms that support both extensions, OkHttp will use both,
* preferring ALPN's result. Future versions of OkHttp will drop support for
* NPN.
*
* <h3>Deflater Sync Flush</h3>
* SPDY header compression requires a recent version of {@code
* DeflaterOutputStream} that is public API in Java 7 and callable via
* reflection in Android 4.1+.
*/
public class Platform {
private static final Platform PLATFORM = findPlatform();
private Constructor<DeflaterOutputStream> deflaterConstructor;
public static Platform get() {
return PLATFORM;
}
/** Prefix used on custom headers. */
public String getPrefix() {
return "OkHttp";
}
public void logW(String warning) {
System.out.println(warning);
}
public void tagSocket(Socket socket) throws SocketException {
}
public void untagSocket(Socket socket) throws SocketException {
}
public URI toUriLenient(URL url) throws URISyntaxException {
return url.toURI(); // this isn't as good as the built-in toUriLenient
}
/**
* Attempt a TLS connection with useful extensions enabled. This mode
* supports more features, but is less likely to be compatible with older
* HTTPS servers.
*/
public void enableTlsExtensions(SSLSocket socket, String uriHost) {
}
/**
* Attempt a secure connection with basic functionality to maximize
* compatibility. Currently this uses SSL 3.0.
*/
public void supportTlsIntolerantServer(SSLSocket socket) {
socket.setEnabledProtocols(new String[] {"SSLv3"});
}
/** Returns the negotiated protocol, or null if no protocol was negotiated. */
public ByteString getNpnSelectedProtocol(SSLSocket socket) {
return null;
}
/**
* Sets client-supported protocols on a socket to send to a server. The
* protocols are only sent if the socket implementation supports NPN.
*/
public void setNpnProtocols(SSLSocket socket, List<Protocol> npnProtocols) {
}
public void connectSocket(Socket socket, InetSocketAddress address,
int connectTimeout) throws IOException {
socket.connect(address, connectTimeout);
}
/**
* Returns a deflater output stream that supports SYNC_FLUSH for SPDY name
* value blocks. This throws an {@link UnsupportedOperationException} on
* Java 6 and earlier where there is no built-in API to do SYNC_FLUSH.
*/
public OutputStream newDeflaterOutputStream(OutputStream out, Deflater deflater,
boolean syncFlush) {
try {
Constructor<DeflaterOutputStream> constructor = deflaterConstructor;
if (constructor == null) {
constructor = deflaterConstructor = DeflaterOutputStream.class.getConstructor(
OutputStream.class, Deflater.class, boolean.class);
}
return constructor.newInstance(out, deflater, syncFlush);
} catch (NoSuchMethodException e) {
throw new UnsupportedOperationException("Cannot SPDY; no SYNC_FLUSH available");
} catch (InvocationTargetException e) {
throw e.getCause() instanceof RuntimeException ? (RuntimeException) e.getCause()
: new RuntimeException(e.getCause());
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new AssertionError();
}
}
/** Attempt to match the host runtime to a capable Platform implementation. */
private static Platform findPlatform() {
// Attempt to find Android 2.3+ APIs.
Class<?> openSslSocketClass;
Method setUseSessionTickets;
Method setHostname;
try {
try {
openSslSocketClass = Class.forName("com.android.org.conscrypt.OpenSSLSocketImpl");
} catch (ClassNotFoundException ignored) {
// Older platform before being unbundled.
openSslSocketClass = Class.forName(
"org.apache.harmony.xnet.provider.jsse.OpenSSLSocketImpl");
}
setUseSessionTickets = openSslSocketClass.getMethod("setUseSessionTickets", boolean.class);
setHostname = openSslSocketClass.getMethod("setHostname", String.class);
// Attempt to find Android 4.1+ APIs.
Method setNpnProtocols = null;
Method getNpnSelectedProtocol = null;
try {
setNpnProtocols = openSslSocketClass.getMethod("setNpnProtocols", byte[].class);
getNpnSelectedProtocol = openSslSocketClass.getMethod("getNpnSelectedProtocol");
} catch (NoSuchMethodException ignored) {
}
return new Android(openSslSocketClass, setUseSessionTickets, setHostname, setNpnProtocols,
getNpnSelectedProtocol);
} catch (ClassNotFoundException ignored) {
// This isn't an Android runtime.
} catch (NoSuchMethodException ignored) {
// This isn't Android 2.3 or better.
}
// Attempt to find the Jetty's NPN extension for OpenJDK.
try {
String npnClassName = "org.eclipse.jetty.npn.NextProtoNego";
Class<?> nextProtoNegoClass = Class.forName(npnClassName);
Class<?> providerClass = Class.forName(npnClassName + "$Provider");
Class<?> clientProviderClass = Class.forName(npnClassName + "$ClientProvider");
Class<?> serverProviderClass = Class.forName(npnClassName + "$ServerProvider");
Method putMethod = nextProtoNegoClass.getMethod("put", SSLSocket.class, providerClass);
Method getMethod = nextProtoNegoClass.getMethod("get", SSLSocket.class);
return new JdkWithJettyNpnPlatform(
putMethod, getMethod, clientProviderClass, serverProviderClass);
} catch (ClassNotFoundException ignored) {
// NPN isn't on the classpath.
} catch (NoSuchMethodException ignored) {
// The NPN version isn't what we expect.
}
return new Platform();
}
/**
* Android 2.3 or better. Version 2.3 supports TLS session tickets and server
* name indication (SNI). Versions 4.1 supports NPN.
*/
private static class Android extends Platform {
// Non-null.
protected final Class<?> openSslSocketClass;
private final Method setUseSessionTickets;
private final Method setHostname;
// Non-null on Android 4.1+.
private final Method setNpnProtocols;
private final Method getNpnSelectedProtocol;
private Android(Class<?> openSslSocketClass, Method setUseSessionTickets, Method setHostname,
Method setNpnProtocols, Method getNpnSelectedProtocol) {
this.openSslSocketClass = openSslSocketClass;
this.setUseSessionTickets = setUseSessionTickets;
this.setHostname = setHostname;
this.setNpnProtocols = setNpnProtocols;
this.getNpnSelectedProtocol = getNpnSelectedProtocol;
}
@Override public void connectSocket(Socket socket, InetSocketAddress address,
int connectTimeout) throws IOException {
try {
socket.connect(address, connectTimeout);
} catch (SecurityException se) {
// Before android 4.3, socket.connect could throw a SecurityException
// if opening a socket resulted in an EACCES error.
IOException ioException = new IOException("Exception in connect");
ioException.initCause(se);
throw ioException;
}
}
@Override public void enableTlsExtensions(SSLSocket socket, String uriHost) {
super.enableTlsExtensions(socket, uriHost);
if (!openSslSocketClass.isInstance(socket)) return;
try {
setUseSessionTickets.invoke(socket, true);
setHostname.invoke(socket, uriHost);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
}
@Override public void setNpnProtocols(SSLSocket socket, List<Protocol> npnProtocols) {
if (setNpnProtocols == null) return;
if (!openSslSocketClass.isInstance(socket)) return;
try {
Object[] parameters = { concatLengthPrefixed(npnProtocols) };
setNpnProtocols.invoke(socket, parameters);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override public ByteString getNpnSelectedProtocol(SSLSocket socket) {
if (getNpnSelectedProtocol == null) return null;
if (!openSslSocketClass.isInstance(socket)) return null;
try {
byte[] npnResult = (byte[]) getNpnSelectedProtocol.invoke(socket);
if (npnResult == null) return null;
return ByteString.of(npnResult);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
}
}
/** OpenJDK 7 plus {@code org.mortbay.jetty.npn/npn-boot} on the boot class path. */
private static class JdkWithJettyNpnPlatform extends Platform {
private final Method getMethod;
private final Method putMethod;
private final Class<?> clientProviderClass;
private final Class<?> serverProviderClass;
public JdkWithJettyNpnPlatform(Method putMethod, Method getMethod, Class<?> clientProviderClass,
Class<?> serverProviderClass) {
this.putMethod = putMethod;
this.getMethod = getMethod;
this.clientProviderClass = clientProviderClass;
this.serverProviderClass = serverProviderClass;
}
@Override public void setNpnProtocols(SSLSocket socket, List<Protocol> npnProtocols) {
try {
List<String> names = new ArrayList<String>(npnProtocols.size());
for (int i = 0, size = npnProtocols.size(); i < size; i++) {
names.add(npnProtocols.get(i).name.utf8());
}
Object provider = Proxy.newProxyInstance(Platform.class.getClassLoader(),
new Class[] { clientProviderClass, serverProviderClass }, new JettyNpnProvider(names));
putMethod.invoke(null, socket, provider);
} catch (InvocationTargetException e) {
throw new AssertionError(e);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
}
@Override public ByteString getNpnSelectedProtocol(SSLSocket socket) {
try {
JettyNpnProvider provider =
(JettyNpnProvider) Proxy.getInvocationHandler(getMethod.invoke(null, socket));
if (!provider.unsupported && provider.selected == null) {
Logger logger = Logger.getLogger("com.squareup.okhttp.OkHttpClient");
logger.log(Level.INFO,
"NPN callback dropped so SPDY is disabled. Is npn-boot on the boot class path?");
return null;
}
return provider.unsupported ? null : ByteString.encodeUtf8(provider.selected);
} catch (InvocationTargetException e) {
throw new AssertionError();
} catch (IllegalAccessException e) {
throw new AssertionError();
}
}
}
/**
* Handle the methods of NextProtoNego's ClientProvider and ServerProvider
* without a compile-time dependency on those interfaces.
*/
private static class JettyNpnProvider implements InvocationHandler {
/** This peer's supported protocols. */
private final List<String> protocols;
/** Set when remote peer notifies NPN is unsupported. */
private boolean unsupported;
/** The protocol the client selected. */
private String selected;
public JettyNpnProvider(List<String> protocols) {
this.protocols = protocols;
}
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
Class<?> returnType = method.getReturnType();
if (args == null) {
args = Util.EMPTY_STRING_ARRAY;
}
if (methodName.equals("supports") && boolean.class == returnType) {
return true; // Client supports NPN.
} else if (methodName.equals("unsupported") && void.class == returnType) {
this.unsupported = true; // Remote peer doesn't support NPN.
return null;
} else if (methodName.equals("protocols") && args.length == 0) {
return protocols; // Server advertises these protocols.
} else if (methodName.equals("selectProtocol") // Called when client.
&& String.class == returnType
&& args.length == 1
&& (args[0] == null || args[0] instanceof List)) {
List<String> serverProtocols = (List) args[0];
// Pick the first protocol the server advertises and client knows.
for (int i = 0, size = serverProtocols.size(); i < size; i++) {
if (protocols.contains(serverProtocols.get(i))) {
return selected = serverProtocols.get(i);
}
}
// On no intersection, try client's first protocol.
return selected = protocols.get(0);
} else if (methodName.equals("protocolSelected") && args.length == 1) {
this.selected = (String) args[0]; // Client selected this protocol.
return null;
} else {
return method.invoke(this, args);
}
}
}
/**
* Concatenation of 8-bit, length prefixed protocol names.
*
* http://tools.ietf.org/html/draft-agl-tls-nextprotoneg-04#page-4
*/
static byte[] concatLengthPrefixed(List<Protocol> protocols) {
int size = 0;
for (Protocol protocol : protocols) {
size += protocol.name.size() + 1; // add a byte for 8-bit length prefix.
}
byte[] result = new byte[size];
int pos = 0;
for (Protocol protocol : protocols) {
int nameSize = protocol.name.size();
result[pos++] = (byte) nameSize;
// toByteArray allocates an array, but this is only called on new connections.
System.arraycopy(protocol.name.toByteArray(), 0, result, pos, nameSize);
pos += nameSize;
}
return result;
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.ofbiz.service.mail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.mail.Address;
import javax.mail.BodyPart;
import javax.mail.MessagingException;
import javax.mail.Multipart;
import javax.mail.Part;
import javax.mail.internet.MimeMessage;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.service.GenericServiceException;
import org.ofbiz.service.LocalDispatcher;
import org.ofbiz.service.ServiceUtil;
import org.w3c.dom.Element;
@SuppressWarnings("serial")
public class ServiceMcaCondition implements java.io.Serializable {
private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass());
public static final int CONDITION_FIELD = 1;
public static final int CONDITION_HEADER = 2;
public static final int CONDITION_SERVICE = 3;
protected String serviceName = null;
protected String headerName = null;
protected String fieldName = null;
protected String operator = null;
protected String value = null;
public ServiceMcaCondition(Element condElement, int condType) {
switch (condType) {
case CONDITION_FIELD:
// fields: from|to|subject|body|sent-date|receieved-date
this.fieldName = condElement.getAttribute("field-name");
// operators: equals|not-equals|empty|not-empty|matches|not-matches
this.operator = condElement.getAttribute("operator");
// value to compare
this.value = condElement.getAttribute("value");
break;
case CONDITION_HEADER:
// free form header name
this.headerName = condElement.getAttribute("header-name");
// operators: equals|not-equals|empty|not-empty|matches|not-matches
this.operator = condElement.getAttribute("operator");
// value to compare
this.value = condElement.getAttribute("value");
break;
case CONDITION_SERVICE:
this.serviceName = condElement.getAttribute("service-name");
break;
default:
Debug.logWarning("There was an error in the switch-case in ServiceMcaCondition", module);
}
}
public boolean eval(LocalDispatcher dispatcher, MimeMessageWrapper messageWrapper, GenericValue userLogin) {
boolean passedCondition = false;
if (serviceName != null) {
Map<String, Object> result = null;
try {
result = dispatcher.runSync(serviceName, UtilMisc.<String, Object>toMap("messageWrapper", messageWrapper, "userLogin", userLogin));
} catch (GenericServiceException e) {
Debug.logError(e, module);
return false;
}
if (result == null) {
Debug.logError("Service MCA Condition Service [" + serviceName + "] returned null!", module);
return false;
}
if (ServiceUtil.isError(result)) {
Debug.logError(ServiceUtil.getErrorMessage(result), module);
return false;
}
Boolean reply = (Boolean) result.get("conditionReply");
if (reply == null) {
reply = Boolean.FALSE;
}
return reply;
} else if (headerName != null) {
// compare the header field
MimeMessage message = messageWrapper.getMessage();
String[] headerValues = null;
try {
headerValues = message.getHeader(headerName);
} catch (MessagingException e) {
Debug.logError(e, module);
}
if (headerValues != null) {
for (String headerValue: headerValues) {
if ("equals".equals(operator)) {
if (headerValue.equals(value)) {
passedCondition = true;
break;
}
} else if ("not-equals".equals(operator)) {
if (!headerValue.equals(value)) {
passedCondition = true;
} else {
passedCondition = false;
}
} else if ("matches".equals(operator)) {
if (headerValue.matches(value)) {
passedCondition = true;
break;
}
} else if ("not-matches".equals(operator)) {
if (!headerValue.matches(value)) {
passedCondition = true;
} else {
passedCondition = false;
}
} else if ("not-empty".equals(operator)) {
passedCondition = true;
break;
}
}
} else if ("empty".equals(operator)) {
passedCondition = true;
}
} else if (fieldName != null) {
MimeMessage message = messageWrapper.getMessage();
String[] fieldValues = null;
try {
fieldValues = this.getFieldValue(message, fieldName);
} catch (MessagingException | IOException e) {
Debug.logError(e, module);
}
if (fieldValues != null) {
for (String fieldValue: fieldValues) {
if ("equals".equals(operator)) {
if (fieldValue.equals(value)) {
passedCondition = true;
break;
}
} else if ("not-equals".equals(operator)) {
if (!fieldValue.equals(value)) {
passedCondition = true;
} else {
passedCondition = false;
}
} else if ("matches".equals(operator)) {
if (fieldValue.matches(value)) {
passedCondition = true;
break;
}
} else if ("not-matches".equals(operator)) {
if (!fieldValue.matches(value)) {
passedCondition = true;
} else {
passedCondition = false;
}
} else if ("not-empty".equals(operator)) {
passedCondition = true;
break;
}
}
} else if ("empty".equals(operator)) {
passedCondition = true;
}
} else {
passedCondition = false;
}
return passedCondition;
}
protected String[] getFieldValue(MimeMessage message, String fieldName) throws MessagingException, IOException {
String[] values = null;
if ("to".equals(fieldName)) {
Address[] addrs = message.getRecipients(MimeMessage.RecipientType.TO);
if (addrs != null) {
values = new String[addrs.length];
for (int i = 0; i < addrs.length; i++) {
values[i] = addrs[i].toString();
}
}
} else if ("cc".equals(fieldName)) {
Address[] addrs = message.getRecipients(MimeMessage.RecipientType.CC);
if (addrs != null) {
values = new String[addrs.length];
for (int i = 0; i < addrs.length; i++) {
values[i] = addrs[i].toString();
}
}
} else if ("bcc".equals(fieldName)) {
Address[] addrs = message.getRecipients(MimeMessage.RecipientType.BCC);
if (addrs != null) {
values = new String[addrs.length];
for (int i = 0; i < addrs.length; i++) {
values[i] = addrs[i].toString();
}
}
} else if ("from".equals(fieldName)) {
Address[] addrs = message.getFrom();
if (addrs != null) {
values = new String[addrs.length];
for (int i = 0; i < addrs.length; i++) {
values[i] = addrs[i].toString();
}
}
} else if ("subject".equals(fieldName)) {
values = new String[1];
values[0] = message.getSubject();
} else if ("send-date".equals(fieldName)) {
values = new String[1];
values[0] = message.getSentDate().toString();
} else if ("received-date".equals(fieldName)) {
values = new String[1];
values[0] = message.getReceivedDate().toString();
} else if ("body".equals(fieldName)) {
List<String> bodyParts = this.getBodyText(message);
values = bodyParts.toArray(new String[bodyParts.size()]);
}
return values;
}
private List<String> getBodyText(Part part) throws MessagingException, IOException {
Object c = part.getContent();
if (c instanceof String) {
return UtilMisc.toList((String) c);
} else if (c instanceof Multipart) {
List<String> textContent = new ArrayList<>(); // SCIPIO: switched to ArrayList
int count = ((Multipart) c).getCount();
for (int i = 0; i < count; i++) {
BodyPart bp = ((Multipart) c).getBodyPart(i);
textContent.addAll(this.getBodyText(bp));
}
return textContent;
} else {
return new ArrayList<>(); // SCIPIO: switched to ArrayList
}
}
}
| |
package org.basex.data;
import static org.basex.query.func.Function.*;
import java.util.*;
import java.util.List;
import org.basex.*;
import org.basex.core.*;
import org.basex.core.cmd.*;
import org.junit.*;
import org.junit.Test;
import org.junit.runner.*;
import org.junit.runners.*;
import org.junit.runners.Parameterized.*;
/**
* This class tests the {@link MainOptions#UPDINDEX} and {@link MainOptions#AUTOOPTIMIZE} options.
*
* @author BaseX Team 2005-20, BSD License
* @author Christian Gruen
*/
@RunWith(Parameterized.class)
public final class IndexTest extends SandboxTest {
/** Main memory flag. */
@Parameter
public Object mainmem;
/**
* Mainmem parameters.
* @return parameters
*/
@Parameters
public static Collection<Object[]> params() {
final List<Object[]> params = new ArrayList<>();
params.add(new Object[] { false });
params.add(new Object[] { true });
return params;
}
/**
* Initializes a test.
*/
@Before public void before() {
set(MainOptions.MAINMEM, mainmem);
}
/**
* Finalize test.
*/
@After public void after() {
execute(new DropDB(NAME));
set(MainOptions.TOKENINDEX, false);
set(MainOptions.UPDINDEX, false);
set(MainOptions.AUTOOPTIMIZE, false);
set(MainOptions.MAINMEM, false);
}
/**
* Test.
*/
@Test public void updindexText() {
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
for(int i = 0; i < 5; i++) {
execute(new Replace("x.xml", "<x><a>A</a><a>B</a></x>"));
}
query(_DB_TEXT.args(NAME, "A"), "A");
query(_DB_TEXT.args(NAME, "B"), "B");
query(_DB_INFO.args(NAME) + "//textindex/text()", true);
}
/**
* Test.
*/
@Test public void updindexText2() {
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
for(int i = 0; i < 5; i++) {
execute(new Replace("x.xml", "<x><a>A</a><a>B</a></x>"));
execute(new Replace("x.xml", "<x><a>A</a><a>C</a></x>"));
}
query(_DB_TEXT.args(NAME, "A"), "A");
query(_DB_TEXT.args(NAME, "C"), "C");
query(_DB_TEXT.args(NAME, "B"), "");
}
/**
* Test.
*/
@Test public void updindexText3() {
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
for(int i = 0; i < 5; i++) {
execute(new Replace("x.xml", "<x><a>A</a><a>BC</a><a>DEF</a></x>"));
}
query(_DB_TEXT.args(NAME, "A"), "A");
query(_DB_TEXT.args(NAME, "BC"), "BC");
query(_DB_TEXT.args(NAME, "DEF"), "DEF");
}
/**
* Test.
*/
@Test public void updindexAttribute() {
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
for(int i = 0; i < 5; i++) {
execute(new Add("a", "<x c='c'/>"));
execute(new Add("a", "<x a='a' b='b'/>"));
execute(new Replace("a", "<x/>"));
}
query(_DB_ATTRIBUTE.args(NAME, "a"), "");
query(_DB_ATTRIBUTE.args(NAME, "b"), "");
query(_DB_ATTRIBUTE.args(NAME, "c"), "");
}
/**
* Test.
*/
@Test public void updindexToken() {
set(MainOptions.UPDINDEX, true);
set(MainOptions.TOKENINDEX, true);
execute(new CreateDB(NAME));
execute(new Add("a", "<x c='c'/>"));
query(_DB_TOKEN.args(NAME, "a"), "");
query("data(" + _DB_TOKEN.args(NAME, "c") + ')', "c");
for(int i = 0; i < 5; i++) {
execute(new Add("a", "<x c='c'/>"));
execute(new Add("a", "<x a='a' b='b'/>"));
execute(new Replace("a", "<x/>"));
}
query(_DB_TOKEN.args(NAME, "a"), "");
query(_DB_TOKEN.args(NAME, "b"), "");
query(_DB_TOKEN.args(NAME, "c"), "");
query(_DB_INFO.args(NAME) + "//tokenindex/text()", true);
}
/**
* Test.
*/
@Test public void updindexReplace1() {
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME, "<X><A>q</A><B>q</B></X>"));
query("replace node /X/A with 'x', replace node /X/B with 'y'", "");
}
/**
* Test.
*/
@Test public void updindexReplace2() {
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
execute(new Replace("A", "<X a='?' b='a' c='1'/>"));
execute(new Replace("A", "<X a='?' b='b' c='2'/>"));
execute(new Replace("A", "<X/>"));
}
/**
* Test.
*/
@Test public void updindexOpenClose1() {
final boolean openClose = !(Boolean) mainmem;
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
for(int i = 0; i < 5; i++) {
if(openClose) execute(new Open(NAME));
execute(new Replace("x.xml", "<x><a>A</a><a>BC</a></x>"));
if(openClose) execute(new Close());
}
query(_DB_TEXT.args(NAME, "A"), "A");
query(_DB_TEXT.args(NAME, "BC"), "BC");
}
/**
* Test.
*/
@Test public void updindexOpenClose2() {
final boolean openClose = !(Boolean) mainmem;
set(MainOptions.UPDINDEX, true);
execute(new CreateDB(NAME));
execute(new Replace("A", "<a/>"));
execute(new Replace("B", "<a a='1'/>"));
execute(new Replace("C", "<a a='1'/>"));
execute(new Replace("A", "<a a='1'/>"));
if(openClose) {
execute(new Close());
execute(new Open(NAME));
}
execute(new Delete("A"));
}
/**
* Test.
*/
@Test public void autooptimize() {
set(MainOptions.AUTOOPTIMIZE, true);
execute(new CreateDB(NAME));
query(_DB_INFO.args(NAME) + "//textindex/text()", true);
execute(new Replace("x.xml", "<a>A</a>"));
query(_DB_INFO.args(NAME) + "//textindex/text()", true);
query(_DB_REPLACE.args(NAME, "x.xml", " <a>B</a>"));
query(_DB_INFO.args(NAME) + "//textindex/text()", true);
set(MainOptions.AUTOOPTIMIZE, false);
execute(new Optimize());
execute(new Replace("x.xml", "<a>C</a>"));
query(_DB_INFO.args(NAME) + "//textindex/text()", false);
execute(new Optimize());
query(_DB_INFO.args(NAME) + "//textindex/text()", true);
query(_DB_REPLACE.args(NAME, "x.xml", " <a>D</a>"));
query(_DB_INFO.args(NAME) + "//textindex/text()", false);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Name.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: SNAPSHOT Built on : Dec 21, 2007 (04:03:30 LKT)
*/
package org.apache.axis2.databinding.types.xsd;
import javax.xml.stream.XMLStreamWriter;
/**
* Name bean class
*/
public class Name
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = Name
Namespace URI = http://www.w3.org/2001/XMLSchema
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://www.w3.org/2001/XMLSchema")){
return "xsd";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for Name
*/
protected org.apache.axis2.databinding.types.Name localName ;
/**
* Auto generated getter method
* @return org.apache.axis2.databinding.types.Name
*/
public org.apache.axis2.databinding.types.Name getName(){
return localName;
}
/**
* Auto generated setter method
* @param param Name
*/
public void setName(org.apache.axis2.databinding.types.Name param){
this.localName=param;
}
public java.lang.String toString(){
return localName.toString();
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName);
return factory.createOMElement(dataSource,parentQName);
}
public void serialize(final javax.xml.namespace.QName parentQName,
XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
XMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://www.w3.org/2001/XMLSchema");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":Name",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"Name",
xmlWriter);
}
}
if (localName==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("Name cannot be null!!");
}else{
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName));
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext();
while (true) {
java.lang.String uri = nsContext.getNamespaceURI(prefix);
if (uri == null || uri.length() == 0) {
break;
}
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT);
if (localName != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName));
} else {
throw new org.apache.axis2.databinding.ADBException("Name cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
public static Name fromString(java.lang.String value,
java.lang.String namespaceURI){
Name returnValue = new Name();
returnValue.setName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToName(value));
return returnValue;
}
public static Name fromString(javax.xml.stream.XMLStreamReader xmlStreamReader,
java.lang.String content) {
if (content.indexOf(":") > -1){
java.lang.String prefix = content.substring(0,content.indexOf(":"));
java.lang.String namespaceUri = xmlStreamReader.getNamespaceContext().getNamespaceURI(prefix);
return Name.Factory.fromString(content,namespaceUri);
} else {
return Name.Factory.fromString(content,"");
}
}
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static Name parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
Name object =
new Name();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"Name".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (Name)org.apache.axis2.databinding.types.xsd.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() || reader.hasText()){
if (reader.isStartElement() || reader.hasText()){
java.lang.String content = reader.getElementText();
object.setName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToName(content));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectView.impl.nodes;
import com.intellij.codeInsight.navigation.NavigationUtil;
import com.intellij.icons.AllIcons;
import com.intellij.ide.bookmarks.Bookmark;
import com.intellij.ide.bookmarks.BookmarkManager;
import com.intellij.ide.projectView.PresentationData;
import com.intellij.ide.projectView.ProjectViewNode;
import com.intellij.ide.projectView.ProjectViewNodeDecorator;
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.ValidateableNode;
import com.intellij.navigation.NavigationItem;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Iconable;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.openapi.vcs.FileStatusManager;
import com.intellij.openapi.vfs.VFileProperty;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.StatePreservingNavigatable;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.AstLoadingFilter;
import consulo.ide.IconDescriptor;
import consulo.ide.IconDescriptorUpdaters;
import consulo.logging.Logger;
import consulo.ui.image.Image;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Objects;
/**
* Class for node descriptors based on PsiElements. Subclasses should define
* method that extract PsiElement from Value.
*
* @param <Value> Value of node descriptor
*/
public abstract class AbstractPsiBasedNode<Value> extends ProjectViewNode<Value> implements ValidateableNode, StatePreservingNavigatable {
private static final Logger LOG = Logger.getInstance(AbstractPsiBasedNode.class);
protected AbstractPsiBasedNode(final Project project, @Nonnull Value value, final ViewSettings viewSettings) {
super(project, value, viewSettings);
}
@Nullable
protected abstract PsiElement extractPsiFromValue();
@Nullable
protected abstract Collection<AbstractTreeNode> getChildrenImpl();
protected abstract void updateImpl(@Nonnull PresentationData data);
@Override
@Nonnull
public final Collection<? extends AbstractTreeNode> getChildren() {
return AstLoadingFilter.disallowTreeLoading(this::doGetChildren);
}
@Nonnull
private Collection<? extends AbstractTreeNode> doGetChildren() {
final PsiElement psiElement = extractPsiFromValue();
if (psiElement == null) {
return new ArrayList<>();
}
if (!psiElement.isValid()) {
LOG.error(new IllegalStateException("Node contains invalid PSI: " + "\n" + getClass() + " [" + this + "]" + "\n" + psiElement.getClass() + " [" + psiElement + "]"));
return Collections.emptyList();
}
final Collection<AbstractTreeNode> children = getChildrenImpl();
return children != null ? children : Collections.emptyList();
}
@Override
public boolean isValid() {
final PsiElement psiElement = extractPsiFromValue();
return psiElement != null && psiElement.isValid();
}
protected boolean isMarkReadOnly() {
final AbstractTreeNode<?> parent = getParent();
if (parent == null) {
return false;
}
if (parent instanceof AbstractPsiBasedNode) {
final PsiElement psiElement = ((AbstractPsiBasedNode<?>)parent).extractPsiFromValue();
return psiElement instanceof PsiDirectory;
}
final Object parentValue = parent.getValue();
return parentValue instanceof PsiDirectory || parentValue instanceof Module;
}
@Override
public FileStatus getFileStatus() {
return computeFileStatus(getVirtualFileForValue(), Objects.requireNonNull(getProject()));
}
protected static FileStatus computeFileStatus(@Nullable VirtualFile virtualFile, @Nonnull Project project) {
if (virtualFile == null) {
return FileStatus.NOT_CHANGED;
}
return FileStatusManager.getInstance(project).getStatus(virtualFile);
}
@Nullable
private VirtualFile getVirtualFileForValue() {
PsiElement psiElement = extractPsiFromValue();
if (psiElement == null) {
return null;
}
return PsiUtilCore.getVirtualFile(psiElement);
}
// Should be called in atomic action
@Override
public void update(@Nonnull final PresentationData data) {
AstLoadingFilter.disallowTreeLoading(() -> doUpdate(data));
}
private void doUpdate(@Nonnull PresentationData data) {
ApplicationManager.getApplication().runReadAction(() -> {
if (!validate()) {
return;
}
final PsiElement value = extractPsiFromValue();
LOG.assertTrue(value.isValid());
int flags = getIconableFlags();
try {
Image icon = IconDescriptorUpdaters.getIcon(value, flags);
data.setIcon(icon);
}
catch (IndexNotReadyException ignored) {
}
data.setPresentableText(myName);
try {
if (isDeprecated()) {
data.setAttributesKey(CodeInsightColors.DEPRECATED_ATTRIBUTES);
}
}
catch (IndexNotReadyException ignored) {
}
updateImpl(data);
data.setIcon(patchIcon(myProject, data.getIcon(), getVirtualFile()));
for (ProjectViewNodeDecorator decorator : ProjectViewNodeDecorator.EP_NAME.getExtensionList(myProject)) {
decorator.decorate(AbstractPsiBasedNode.this, data);
}
});
}
@Nullable
public static Image patchIcon(@Nonnull Project project, @Nullable Image original, @Nullable VirtualFile file) {
if (file == null || original == null) return original;
IconDescriptor iconDescriptor = new IconDescriptor(original);
final Bookmark bookmarkAtFile = BookmarkManager.getInstance(project).findFileBookmark(file);
if (bookmarkAtFile != null) {
iconDescriptor.setRightIcon(bookmarkAtFile.getIcon(false));
}
if (!file.isWritable()) {
iconDescriptor.addLayerIcon(AllIcons.Nodes.Locked);
}
if (file.is(VFileProperty.SYMLINK)) {
iconDescriptor.addLayerIcon(AllIcons.Nodes.Symlink);
}
return iconDescriptor.toIcon();
}
@Iconable.IconFlags
protected int getIconableFlags() {
int flags = 0;
if (isMarkReadOnly()) {
flags |= Iconable.ICON_FLAG_READ_STATUS;
}
return flags;
}
protected boolean isDeprecated() {
return false;
}
@Override
public boolean contains(@Nonnull final VirtualFile file) {
final PsiElement psiElement = extractPsiFromValue();
if (psiElement == null || !psiElement.isValid()) {
return false;
}
final PsiFile containingFile = psiElement.getContainingFile();
if (containingFile == null) {
return false;
}
final VirtualFile valueFile = containingFile.getVirtualFile();
return file.equals(valueFile);
}
@Nullable
public NavigationItem getNavigationItem() {
final PsiElement psiElement = extractPsiFromValue();
return psiElement instanceof NavigationItem ? (NavigationItem)psiElement : null;
}
@Override
public void navigate(boolean requestFocus, boolean preserveState) {
if (canNavigate()) {
if (requestFocus || preserveState) {
NavigationUtil.openFileWithPsiElement(extractPsiFromValue(), requestFocus, requestFocus);
}
else {
getNavigationItem().navigate(requestFocus);
}
}
}
@Override
public void navigate(boolean requestFocus) {
navigate(requestFocus, false);
}
@Override
public boolean canNavigate() {
final NavigationItem item = getNavigationItem();
return item != null && item.canNavigate();
}
@Override
public boolean canNavigateToSource() {
final NavigationItem item = getNavigationItem();
return item != null && item.canNavigateToSource();
}
@Nullable
protected String calcTooltip() {
return null;
}
@Override
public boolean validate() {
final PsiElement psiElement = extractPsiFromValue();
if (psiElement == null || !psiElement.isValid()) {
setValue(null);
}
return getValue() != null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.groupby;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.druid.collections.CloseableDefaultBlockingPool;
import org.apache.druid.collections.CloseableStupidPool;
import org.apache.druid.collections.ReferenceCountingResourceHolder;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.DruidProcessingConfig;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryDataSource;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@RunWith(Parameterized.class)
public class GroupByQueryMergeBufferTest extends InitializedNullHandlingTest
{
private static final long TIMEOUT = 5000;
private static class TestBlockingPool extends CloseableDefaultBlockingPool<ByteBuffer>
{
private int minRemainBufferNum;
TestBlockingPool(Supplier<ByteBuffer> generator, int limit)
{
super(generator, limit);
minRemainBufferNum = limit;
}
@Override
public List<ReferenceCountingResourceHolder<ByteBuffer>> takeBatch(final int maxElements, final long timeout)
{
final List<ReferenceCountingResourceHolder<ByteBuffer>> holder = super.takeBatch(maxElements, timeout);
final int poolSize = getPoolSize();
if (minRemainBufferNum > poolSize) {
minRemainBufferNum = poolSize;
}
return holder;
}
void resetMinRemainBufferNum()
{
minRemainBufferNum = PROCESSING_CONFIG.getNumMergeBuffers();
}
int getMinRemainBufferNum()
{
return minRemainBufferNum;
}
}
private static final DruidProcessingConfig PROCESSING_CONFIG = new DruidProcessingConfig()
{
@Override
public String getFormatString()
{
return null;
}
@Override
public int intermediateComputeSizeBytes()
{
return 10 * 1024 * 1024;
}
@Override
public int getNumMergeBuffers()
{
return 4;
}
@Override
public int getNumThreads()
{
return 1;
}
};
private static GroupByQueryRunnerFactory makeQueryRunnerFactory(
final ObjectMapper mapper,
final GroupByQueryConfig config
)
{
final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, BUFFER_POOL),
QueryRunnerTestHelper.NOOP_QUERYWATCHER,
BUFFER_POOL
),
new GroupByStrategyV2(
PROCESSING_CONFIG,
configSupplier,
BUFFER_POOL,
MERGE_BUFFER_POOL,
mapper,
QueryRunnerTestHelper.NOOP_QUERYWATCHER
)
);
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(strategySelector);
return new GroupByQueryRunnerFactory(strategySelector, toolChest);
}
private static final CloseableStupidPool<ByteBuffer> BUFFER_POOL = new CloseableStupidPool<>(
"GroupByQueryEngine-bufferPool",
() -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes())
);
private static final TestBlockingPool MERGE_BUFFER_POOL = new TestBlockingPool(
() -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()),
PROCESSING_CONFIG.getNumMergeBuffers()
);
private static final GroupByQueryRunnerFactory FACTORY = makeQueryRunnerFactory(
GroupByQueryRunnerTest.DEFAULT_MAPPER,
new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return "v2";
}
}
);
private final QueryRunner<ResultRow> runner;
@AfterClass
public static void teardownClass()
{
BUFFER_POOL.close();
MERGE_BUFFER_POOL.close();
}
@Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder()
{
final List<Object[]> args = new ArrayList<>();
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(FACTORY)) {
args.add(new Object[]{runner});
}
return args;
}
public GroupByQueryMergeBufferTest(QueryRunner<ResultRow> runner)
{
this.runner = FACTORY.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner));
}
@Before
public void setup()
{
MERGE_BUFFER_POOL.resetMinRemainBufferNum();
}
@Test
public void testSimpleGroupBy()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(0, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
Assert.assertEquals(3, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testNestedGroupBy()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
)
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(1, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
Assert.assertEquals(2, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testDoubleNestedGroupBy()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(
GroupByQuery.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(
new DefaultDimensionSpec("quality", "alias"),
new DefaultDimensionSpec("market", null)
)
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
)
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(2, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
// This should be 1 because the broker needs 2 buffers and the queryable node needs one.
Assert.assertEquals(1, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testTripleNestedGroupBy()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(
GroupByQuery.builder()
.setDataSource(
GroupByQuery.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(Lists.newArrayList(
new DefaultDimensionSpec("quality", "alias"),
new DefaultDimensionSpec("market", null),
new DefaultDimensionSpec("placement", null)
))
.setAggregatorSpecs(Collections.singletonList(
QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(
new DefaultDimensionSpec("quality", "alias"),
new DefaultDimensionSpec("market", null)
)
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
)
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(2, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
// This should be 1 because the broker needs 2 buffers and the queryable node needs one.
Assert.assertEquals(1, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testSimpleGroupByWithSubtotals()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setDimensions(Arrays.asList(
DefaultDimensionSpec.of(QueryRunnerTestHelper.MARKET_DIMENSION),
DefaultDimensionSpec.of(QueryRunnerTestHelper.PLACEMENT_DIMENSION),
DefaultDimensionSpec.of(QueryRunnerTestHelper.QUALITY_DIMENSION)
))
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setSubtotalsSpec(Arrays.asList(
Arrays.asList(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.PLACEMENT_DIMENSION),
Arrays.asList(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.PLACEMENT_DIMENSION, QueryRunnerTestHelper.QUALITY_DIMENSION)
))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(1, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
// 1 for subtotal and 1 for GroupByQueryRunnerFactory#mergeRunners
Assert.assertEquals(2, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testSimpleGroupByWithSubtotalsWithoutPrefixMatch()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setDimensions(Arrays.asList(
DefaultDimensionSpec.of(QueryRunnerTestHelper.MARKET_DIMENSION),
DefaultDimensionSpec.of(QueryRunnerTestHelper.PLACEMENT_DIMENSION),
DefaultDimensionSpec.of(QueryRunnerTestHelper.QUALITY_DIMENSION)
))
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setSubtotalsSpec(Arrays.asList(
Arrays.asList(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.PLACEMENT_DIMENSION),
Arrays.asList(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.QUALITY_DIMENSION)
))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(2, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
// 2 needed by subtotal and 1 for GroupByQueryRunnerFactory#mergeRunners
Assert.assertEquals(1, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testNestedGroupByWithSubtotals()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(Arrays.asList(
DefaultDimensionSpec.of("quality"),
DefaultDimensionSpec.of("market"),
DefaultDimensionSpec.of("placement")
))
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
)
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setDimensions(Arrays.asList(
DefaultDimensionSpec.of("quality"),
DefaultDimensionSpec.of("market")
))
.setSubtotalsSpec(Arrays.asList(
Collections.singletonList("quality"),
Collections.singletonList("market")
))
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(3, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
// 2 for subtotal, 1 for nested group by and 1 for GroupByQueryRunnerFactory#mergeRunners
Assert.assertEquals(0, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
@Test
public void testNestedGroupByWithNestedSubtotals()
{
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setGranularity(Granularities.ALL)
.setDimensions(Arrays.asList(
DefaultDimensionSpec.of("quality"),
DefaultDimensionSpec.of("market"),
DefaultDimensionSpec.of("placement")
))
.setSubtotalsSpec(Arrays.asList(
Collections.singletonList("quality"),
Collections.singletonList("market")
))
.setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT))
.build()
)
)
.setGranularity(Granularities.ALL)
.setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD)
.setDimensions(Arrays.asList(
DefaultDimensionSpec.of("quality"),
DefaultDimensionSpec.of("market")
))
.setSubtotalsSpec(Arrays.asList(
Collections.singletonList("quality"),
Collections.singletonList("market")
))
.setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"))
.setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT))
.build();
Assert.assertEquals(3, GroupByStrategyV2.countRequiredMergeBufferNum(query));
GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query);
// 2 for subtotal, 1 for nested group by and 1 for GroupByQueryRunnerFactory#mergeRunners
Assert.assertEquals(0, MERGE_BUFFER_POOL.getMinRemainBufferNum());
Assert.assertEquals(4, MERGE_BUFFER_POOL.getPoolSize());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.management.impl;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanOperationInfo;
import javax.management.openmbean.CompositeData;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.JsonUtil;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.management.MessageCounterInfo;
import org.apache.activemq.artemis.api.core.management.QueueControl;
import org.apache.activemq.artemis.core.filter.Filter;
import org.apache.activemq.artemis.core.filter.impl.FilterImpl;
import org.apache.activemq.artemis.core.management.impl.openmbean.OpenTypeSupport;
import org.apache.activemq.artemis.core.message.impl.CoreMessage;
import org.apache.activemq.artemis.core.messagecounter.MessageCounter;
import org.apache.activemq.artemis.core.messagecounter.impl.MessageCounterHelper;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.postoffice.Binding;
import org.apache.activemq.artemis.core.postoffice.PostOffice;
import org.apache.activemq.artemis.core.security.CheckType;
import org.apache.activemq.artemis.core.security.SecurityAuth;
import org.apache.activemq.artemis.core.security.SecurityStore;
import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle;
import org.apache.activemq.artemis.core.server.Consumer;
import org.apache.activemq.artemis.core.server.MessageReference;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.ServerConsumer;
import org.apache.activemq.artemis.core.settings.HierarchicalRepository;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.utils.Base64;
import org.apache.activemq.artemis.utils.JsonLoader;
import org.apache.activemq.artemis.utils.collections.LinkedListIterator;
public class QueueControlImpl extends AbstractControl implements QueueControl {
public static final int FLUSH_LIMIT = 500;
// Constants -----------------------------------------------------
// Attributes ----------------------------------------------------
private final Queue queue;
private final String address;
private final PostOffice postOffice;
private final StorageManager storageManager;
private final SecurityStore securityStore;
private final HierarchicalRepository<AddressSettings> addressSettingsRepository;
private MessageCounter counter;
// Static --------------------------------------------------------
private static String toJSON(final Map<String, Object>[] messages) {
JsonArray array = toJSONMsgArray(messages);
return array.toString();
}
private static JsonArray toJSONMsgArray(final Map<String, Object>[] messages) {
JsonArrayBuilder array = JsonLoader.createArrayBuilder();
for (Map<String, Object> message : messages) {
array.add(JsonUtil.toJsonObject(message));
}
return array.build();
}
private static String toJSON(final Map<String, Map<String, Object>[]> messages) {
JsonArrayBuilder arrayReturn = JsonLoader.createArrayBuilder();
for (Map.Entry<String, Map<String, Object>[]> entry : messages.entrySet()) {
JsonObjectBuilder objectItem = JsonLoader.createObjectBuilder();
objectItem.add("consumerName", entry.getKey());
objectItem.add("elements", toJSONMsgArray(entry.getValue()));
arrayReturn.add(objectItem);
}
return arrayReturn.build().toString();
}
// Constructors --------------------------------------------------
public QueueControlImpl(final Queue queue,
final String address,
final PostOffice postOffice,
final StorageManager storageManager,
final SecurityStore securityStore,
final HierarchicalRepository<AddressSettings> addressSettingsRepository) throws Exception {
super(QueueControl.class, storageManager);
this.queue = queue;
this.address = address;
this.postOffice = postOffice;
this.storageManager = storageManager;
this.securityStore = securityStore;
this.addressSettingsRepository = addressSettingsRepository;
}
// Public --------------------------------------------------------
public void setMessageCounter(final MessageCounter counter) {
this.counter = counter;
}
// QueueControlMBean implementation ------------------------------
@Override
public String getName() {
clearIO();
try {
return queue.getName().toString();
} finally {
blockOnIO();
}
}
@Override
public String getAddress() {
checkStarted();
return address;
}
@Override
public String getFilter() {
checkStarted();
clearIO();
try {
Filter filter = queue.getFilter();
return filter != null ? filter.getFilterString().toString() : null;
} finally {
blockOnIO();
}
}
@Override
public boolean isDurable() {
checkStarted();
clearIO();
try {
return queue.isDurable();
} finally {
blockOnIO();
}
}
@Override
public String getUser() {
checkStarted();
clearIO();
try {
SimpleString user = queue.getUser();
return user == null ? null : user.toString();
} finally {
blockOnIO();
}
}
@Override
public String getRoutingType() {
checkStarted();
clearIO();
try {
return queue.getRoutingType().toString();
} finally {
blockOnIO();
}
}
@Override
public boolean isTemporary() {
checkStarted();
clearIO();
try {
return queue.isTemporary();
} finally {
blockOnIO();
}
}
@Override
public long getMessageCount() {
checkStarted();
clearIO();
try {
return queue.getMessageCount();
} finally {
blockOnIO();
}
}
@Override
public int getConsumerCount() {
checkStarted();
clearIO();
try {
return queue.getConsumerCount();
} finally {
blockOnIO();
}
}
@Override
public int getDeliveringCount() {
checkStarted();
clearIO();
try {
return queue.getDeliveringCount();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesAdded() {
checkStarted();
clearIO();
try {
return queue.getMessagesAdded();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesAcknowledged() {
checkStarted();
clearIO();
try {
return queue.getMessagesAcknowledged();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesExpired() {
checkStarted();
clearIO();
try {
return queue.getMessagesExpired();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesKilled() {
checkStarted();
clearIO();
try {
return queue.getMessagesKilled();
} finally {
blockOnIO();
}
}
@Override
public long getID() {
checkStarted();
clearIO();
try {
return queue.getID();
} finally {
blockOnIO();
}
}
@Override
public long getScheduledCount() {
checkStarted();
clearIO();
try {
return queue.getScheduledCount();
} finally {
blockOnIO();
}
}
@Override
public String getDeadLetterAddress() {
checkStarted();
clearIO();
try {
AddressSettings addressSettings = addressSettingsRepository.getMatch(address);
if (addressSettings != null && addressSettings.getDeadLetterAddress() != null) {
return addressSettings.getDeadLetterAddress().toString();
}
return null;
} finally {
blockOnIO();
}
}
@Override
public String getExpiryAddress() {
checkStarted();
clearIO();
try {
AddressSettings addressSettings = addressSettingsRepository.getMatch(address);
if (addressSettings != null && addressSettings.getExpiryAddress() != null) {
return addressSettings.getExpiryAddress().toString();
} else {
return null;
}
} finally {
blockOnIO();
}
}
@Override
public int getMaxConsumers() {
checkStarted();
clearIO();
try {
return queue.getMaxConsumers();
} finally {
blockOnIO();
}
}
@Override
public boolean isPurgeOnNoConsumers() {
checkStarted();
clearIO();
try {
return queue.isPurgeOnNoConsumers();
} finally {
blockOnIO();
}
}
@Override
public Map<String, Object>[] listScheduledMessages() throws Exception {
checkStarted();
clearIO();
try {
List<MessageReference> refs = queue.getScheduledMessages();
return convertMessagesToMaps(refs);
} finally {
blockOnIO();
}
}
@Override
public String listScheduledMessagesAsJSON() throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listScheduledMessages());
} finally {
blockOnIO();
}
}
/**
* @param refs
* @return
*/
private Map<String, Object>[] convertMessagesToMaps(List<MessageReference> refs) throws ActiveMQException {
Map<String, Object>[] messages = new Map[refs.size()];
int i = 0;
for (MessageReference ref : refs) {
Message message = ref.getMessage();
messages[i++] = message.toMap();
}
return messages;
}
@Override
public Map<String, Map<String, Object>[]> listDeliveringMessages() throws ActiveMQException {
checkStarted();
clearIO();
try {
Map<String, List<MessageReference>> msgs = queue.getDeliveringMessages();
Map<String, Map<String, Object>[]> msgRet = new HashMap<>();
for (Map.Entry<String, List<MessageReference>> entry : msgs.entrySet()) {
msgRet.put(entry.getKey(), convertMessagesToMaps(entry.getValue()));
}
return msgRet;
} finally {
blockOnIO();
}
}
@Override
public String listDeliveringMessagesAsJSON() throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listDeliveringMessages());
} finally {
blockOnIO();
}
}
@Override
public Map<String, Object>[] listMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
List<Map<String, Object>> messages = new ArrayList<>();
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
if (filter == null || filter.match(ref.getMessage())) {
Message message = ref.getMessage();
messages.add(message.toMap());
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return messages.toArray(new Map[messages.size()]);
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public String listMessagesAsJSON(final String filter) throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listMessages(filter));
} finally {
blockOnIO();
}
}
protected Map<String, Object>[] getFirstMessage() throws Exception {
checkStarted();
clearIO();
try {
List<Map<String, Object>> messages = new ArrayList<>();
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
// returns just the first, as it's the first only
if (iterator.hasNext()) {
MessageReference ref = iterator.next();
Message message = ref.getMessage();
messages.add(message.toMap());
}
return messages.toArray(new Map[1]);
}
} finally {
blockOnIO();
}
}
@Override
public String getFirstMessageAsJSON() throws Exception {
return toJSON(getFirstMessage());
}
@Override
public Long getFirstMessageTimestamp() throws Exception {
Map<String, Object>[] _message = getFirstMessage();
if (_message == null || _message.length == 0 || _message[0] == null) {
return null;
}
Map<String, Object> message = _message[0];
if (!message.containsKey("timestamp")) {
return null;
}
return (Long) message.get("timestamp");
}
@Override
public Long getFirstMessageAge() throws Exception {
Long firstMessageTimestamp = getFirstMessageTimestamp();
if (firstMessageTimestamp == null) {
return null;
}
long now = new Date().getTime();
return now - firstMessageTimestamp.longValue();
}
@Override
public long countMessages() throws Exception {
return countMessages(null);
}
@Override
public long countMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
if (filter == null) {
return getMessageCount();
} else {
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
int count = 0;
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
if (filter.match(ref.getMessage())) {
count++;
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return count;
}
}
} finally {
blockOnIO();
}
}
@Override
public boolean removeMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.deleteReference(messageID);
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public int removeMessages(final String filterStr) throws Exception {
return removeMessages(FLUSH_LIMIT, filterStr);
}
@Override
public int removeMessages(final int flushLimit, final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.deleteMatchingReferences(flushLimit, filter);
} finally {
blockOnIO();
}
}
@Override
public int removeAllMessages() throws Exception {
return removeMessages(FLUSH_LIMIT, null);
}
@Override
public boolean expireMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.expireReference(messageID);
} finally {
blockOnIO();
}
}
@Override
public int expireMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.expireReferences(filter);
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public boolean retryMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
Filter singleMessageFilter = new Filter() {
@Override
public boolean match(Message message) {
return message.getMessageID() == messageID;
}
@Override
public SimpleString getFilterString() {
return new SimpleString("custom filter for MESSAGEID= messageID");
}
};
return queue.retryMessages(singleMessageFilter) > 0;
} finally {
blockOnIO();
}
}
@Override
public int retryMessages() throws Exception {
checkStarted();
clearIO();
try {
return queue.retryMessages(null);
} finally {
blockOnIO();
}
}
@Override
public boolean moveMessage(final long messageID, final String otherQueueName) throws Exception {
return moveMessage(messageID, otherQueueName, false);
}
@Override
public boolean moveMessage(final long messageID,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
checkStarted();
clearIO();
try {
Binding binding = postOffice.getBinding(new SimpleString(otherQueueName));
if (binding == null) {
throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName);
}
return queue.moveReference(messageID, binding.getAddress(), binding, rejectDuplicates);
} finally {
blockOnIO();
}
}
@Override
public int moveMessages(final String filterStr, final String otherQueueName) throws Exception {
return moveMessages(filterStr, otherQueueName, false);
}
@Override
public int moveMessages(final int flushLimit,
final String filterStr,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
Binding binding = postOffice.getBinding(new SimpleString(otherQueueName));
if (binding == null) {
throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName);
}
int retValue = queue.moveReferences(flushLimit, filter, binding.getAddress(), rejectDuplicates, binding);
return retValue;
} finally {
blockOnIO();
}
}
@Override
public int moveMessages(final String filterStr,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
return moveMessages(FLUSH_LIMIT, filterStr, otherQueueName, rejectDuplicates);
}
@Override
public int sendMessagesToDeadLetterAddress(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.sendMessagesToDeadLetterAddress(filter);
} finally {
blockOnIO();
}
}
@Override
public String sendMessage(final Map<String, String> headers,
final int type,
final String body,
boolean durable,
final String user,
final String password) throws Exception {
try {
securityStore.check(queue.getAddress(), queue.getName(), CheckType.SEND, new SecurityAuth() {
@Override
public String getUsername() {
return user;
}
@Override
public String getPassword() {
return password;
}
@Override
public RemotingConnection getRemotingConnection() {
return null;
}
});
CoreMessage message = new CoreMessage(storageManager.generateID(), 50);
if (headers != null) {
for (String header : headers.keySet()) {
message.putStringProperty(new SimpleString(header), new SimpleString(headers.get(header)));
}
}
message.setType((byte) type);
message.setDurable(durable);
message.setTimestamp(System.currentTimeMillis());
if (body != null) {
if (type == Message.TEXT_TYPE) {
message.getBodyBuffer().writeNullableSimpleString(new SimpleString(body));
} else {
message.getBodyBuffer().writeBytes(Base64.decode(body));
}
}
message.setAddress(queue.getAddress());
ByteBuffer buffer = ByteBuffer.allocate(8);
buffer.putLong(queue.getID());
message.putBytesProperty(Message.HDR_ROUTE_TO_IDS, buffer.array());
postOffice.route(message, true);
return "" + message.getMessageID();
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
}
}
@Override
public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.sendMessageToDeadLetterAddress(messageID);
} finally {
blockOnIO();
}
}
@Override
public int changeMessagesPriority(final String filterStr, final int newPriority) throws Exception {
checkStarted();
clearIO();
try {
if (newPriority < 0 || newPriority > 9) {
throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority);
}
Filter filter = FilterImpl.createFilter(filterStr);
return queue.changeReferencesPriority(filter, (byte) newPriority);
} finally {
blockOnIO();
}
}
@Override
public boolean changeMessagePriority(final long messageID, final int newPriority) throws Exception {
checkStarted();
clearIO();
try {
if (newPriority < 0 || newPriority > 9) {
throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority);
}
return queue.changeReferencePriority(messageID, (byte) newPriority);
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounter() {
checkStarted();
clearIO();
try {
return MessageCounterInfo.toJSon(counter);
} catch (Exception e) {
throw new IllegalStateException(e);
} finally {
blockOnIO();
}
}
@Override
public void resetMessageCounter() {
checkStarted();
clearIO();
try {
counter.resetCounter();
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterAsHTML() {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterAsHTML(new MessageCounter[]{counter});
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterHistory() throws Exception {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterHistory(counter);
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterHistoryAsHTML() {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterHistoryAsHTML(new MessageCounter[]{counter});
} finally {
blockOnIO();
}
}
@Override
public void pause() {
checkStarted();
clearIO();
try {
queue.pause();
} finally {
blockOnIO();
}
}
@Override
public void pause(boolean persist) {
checkStarted();
clearIO();
try {
queue.pause(persist);
} finally {
blockOnIO();
}
}
@Override
public void resume() {
checkStarted();
clearIO();
try {
queue.resume();
} finally {
blockOnIO();
}
}
@Override
public boolean isPaused() throws Exception {
checkStarted();
clearIO();
try {
return queue.isPaused();
} finally {
blockOnIO();
}
}
@Override
public CompositeData[] browse(int page, int pageSize) throws Exception {
String filter = null;
checkStarted();
clearIO();
try {
long index = 0;
long start = (page - 1) * pageSize;
long end = Math.min((long)(page * pageSize), queue.getMessageCount());
ArrayList<CompositeData> c = new ArrayList<>();
Filter thefilter = FilterImpl.createFilter(filter);
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext() && index < end) {
MessageReference ref = iterator.next();
if (thefilter == null || thefilter.match(ref.getMessage())) {
if (index >= start) {
c.add(OpenTypeSupport.convert(ref));
}
}
index++;
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
CompositeData[] rc = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public CompositeData[] browse() throws Exception {
return browse(null);
}
@Override
public CompositeData[] browse(String filter) throws Exception {
checkStarted();
clearIO();
try {
int pageSize = addressSettingsRepository.getMatch(queue.getName().toString()).getManagementBrowsePageSize();
int currentPageSize = 0;
ArrayList<CompositeData> c = new ArrayList<>();
Filter thefilter = FilterImpl.createFilter(filter);
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext() && currentPageSize++ < pageSize) {
MessageReference ref = iterator.next();
if (thefilter == null || thefilter.match(ref.getMessage())) {
c.add(OpenTypeSupport.convert(ref));
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
CompositeData[] rc = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public void flushExecutor() {
checkStarted();
clearIO();
try {
queue.flushExecutor();
} finally {
blockOnIO();
}
}
@Override
public String listConsumersAsJSON() throws Exception {
checkStarted();
clearIO();
try {
Collection<Consumer> consumers = queue.getConsumers();
JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder();
for (Consumer consumer : consumers) {
if (consumer instanceof ServerConsumer) {
ServerConsumer serverConsumer = (ServerConsumer) consumer;
JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime());
jsonArray.add(obj);
}
}
return jsonArray.build().toString();
} finally {
blockOnIO();
}
}
@Override
protected MBeanOperationInfo[] fillMBeanOperationInfo() {
return MBeanInfoHelper.getMBeanOperationsInfo(QueueControl.class);
}
@Override
protected MBeanAttributeInfo[] fillMBeanAttributeInfo() {
return MBeanInfoHelper.getMBeanAttributesInfo(QueueControl.class);
}
@Override
public void resetMessagesAdded() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesAdded();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesAcknowledged() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesAcknowledged();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesExpired() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesExpired();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesKilled() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesKilled();
} finally {
blockOnIO();
}
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
private void checkStarted() {
if (!postOffice.isStarted()) {
throw new IllegalStateException("Broker is not started. Queue can not be managed yet");
}
}
// Inner classes -------------------------------------------------
}
| |
/*
* Copyright 2015 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.cdn.traffic_control.traffic_router.core.router;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.pool.ObjectPool;
import org.apache.log4j.Logger;
import org.json.JSONException;
import org.json.JSONObject;
import org.xbill.DNS.Name;
import org.xbill.DNS.Zone;
import com.comcast.cdn.traffic_control.traffic_router.core.TrafficRouterException;
import com.comcast.cdn.traffic_control.traffic_router.core.cache.Cache;
import com.comcast.cdn.traffic_control.traffic_router.core.cache.CacheLocation;
import com.comcast.cdn.traffic_control.traffic_router.core.cache.CacheRegister;
import com.comcast.cdn.traffic_control.traffic_router.core.cache.InetRecord;
import com.comcast.cdn.traffic_control.traffic_router.core.dns.ZoneManager;
import com.comcast.cdn.traffic_control.traffic_router.core.dns.DNSAccessRecord;
import com.comcast.cdn.traffic_control.traffic_router.core.ds.DeliveryService;
import com.comcast.cdn.traffic_control.traffic_router.core.ds.Dispersion;
import com.comcast.cdn.traffic_control.traffic_router.core.hash.HashFunction;
import com.comcast.cdn.traffic_control.traffic_router.core.loc.FederationRegistry;
import com.comcast.cdn.traffic_control.traffic_router.core.loc.Geolocation;
import com.comcast.cdn.traffic_control.traffic_router.core.loc.GeolocationException;
import com.comcast.cdn.traffic_control.traffic_router.core.loc.GeolocationService;
import com.comcast.cdn.traffic_control.traffic_router.core.loc.NetworkNode;
import com.comcast.cdn.traffic_control.traffic_router.core.loc.NetworkNodeException;
import com.comcast.cdn.traffic_control.traffic_router.core.request.DNSRequest;
import com.comcast.cdn.traffic_control.traffic_router.core.request.HTTPRequest;
import com.comcast.cdn.traffic_control.traffic_router.core.request.Request;
import com.comcast.cdn.traffic_control.traffic_router.core.router.StatTracker.Track;
import com.comcast.cdn.traffic_control.traffic_router.core.router.StatTracker.Track.ResultType;
import com.comcast.cdn.traffic_control.traffic_router.core.router.StatTracker.Track.RouteType;
import com.comcast.cdn.traffic_control.traffic_router.core.util.TrafficOpsUtils;
import com.comcast.cdn.traffic_control.traffic_router.core.util.CidrAddress;
import com.comcast.cdn.traffic_control.traffic_router.core.router.StatTracker.Track.ResultDetails;
public class TrafficRouter {
public static final Logger LOGGER = Logger.getLogger(TrafficRouter.class);
private final CacheRegister cacheRegister;
private final ZoneManager zoneManager;
private final GeolocationService geolocationService;
private final GeolocationService geolocationService6;
private final ObjectPool hashFunctionPool;
private final FederationRegistry federationRegistry;
private final Random random = new Random(System.nanoTime());
private Set<String> requestHeaders = new HashSet<String>();
public TrafficRouter(final CacheRegister cr,
final GeolocationService geolocationService,
final GeolocationService geolocationService6,
final ObjectPool hashFunctionPool,
final StatTracker statTracker,
final TrafficOpsUtils trafficOpsUtils,
final FederationRegistry federationRegistry) throws IOException, JSONException, TrafficRouterException {
this.cacheRegister = cr;
this.geolocationService = geolocationService;
this.geolocationService6 = geolocationService6;
this.hashFunctionPool = hashFunctionPool;
this.federationRegistry = federationRegistry;
this.zoneManager = new ZoneManager(this, statTracker, trafficOpsUtils);
}
public ZoneManager getZoneManager() {
return zoneManager;
}
/**
* Returns a {@link List} of all of the online {@link Cache}s that support the specified
* {@link DeliveryService}. If no online caches are found to support the specified
* DeliveryService an empty list is returned.
*
* @param ds
* the DeliveryService to check
* @return collection of supported caches
*/
protected List<Cache> getSupportingCaches(final List<Cache> caches, final DeliveryService ds) {
for(int i = 0; i < caches.size(); i++) {
final Cache cache = caches.get(i);
boolean isAvailable = true;
if(cache.hasAuthority()) {
isAvailable = cache.isAvailable();
}
if (!isAvailable || !cache.hasDeliveryService(ds.getId())) {
caches.remove(i);
i--;
}
}
return caches;
}
public CacheRegister getCacheRegister() {
return cacheRegister;
}
protected DeliveryService selectDeliveryService(final Request request, final boolean isHttp) {
if(cacheRegister==null) {
LOGGER.warn("no caches yet");
return null;
}
final DeliveryService ds = cacheRegister.getDeliveryService(request, isHttp);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Selected DeliveryService: " + ds);
}
return ds;
}
boolean setState(final JSONObject states) throws UnknownHostException {
setCacheStates(states.optJSONObject("caches"));
setDsStates(states.optJSONObject("deliveryServices"));
return true;
}
private boolean setDsStates(final JSONObject dsStates) {
if(dsStates == null) {
return false;
}
final Map<String, DeliveryService> dsMap = cacheRegister.getDeliveryServices();
for (final String dsName : dsMap.keySet()) {
dsMap.get(dsName).setState(dsStates.optJSONObject(dsName));
}
return true;
}
private boolean setCacheStates(final JSONObject cacheStates) {
if(cacheStates == null) {
return false;
}
final Map<String, Cache> cacheMap = cacheRegister.getCacheMap();
if(cacheMap == null) { return false; }
for (final String cacheName : cacheMap.keySet()) {
final String monitorCacheName = cacheName.replaceFirst("@.*", "");
final JSONObject state = cacheStates.optJSONObject(monitorCacheName);
cacheMap.get(cacheName).setState(state);
}
return true;
}
protected static final String UNABLE_TO_ROUTE_REQUEST = "Unable to route request.";
protected static final String URL_ERR_STR = "Unable to create URL.";
public GeolocationService getGeolocationService() {
return geolocationService;
}
public Geolocation getLocation(final String clientIP) throws GeolocationException {
if(clientIP.contains(":")) {
return geolocationService6.location(clientIP);
}
return geolocationService.location(clientIP);
}
/**
* Gets hashFunctionPool.
*
* @return the hashFunctionPool
*/
public ObjectPool getHashFunctionPool() {
return hashFunctionPool;
}
public List<Cache> getCachesByGeo(final Request request, final DeliveryService ds, final Geolocation clientLocation, final Map<String, Double> resultLocation) throws GeolocationException {
final String zoneId = null;
// the specific use of the popularity zone
// manager was not understood and not used
// and was therefore was eliminated
// final String zoneId = getZoneManager().getZone(request.getRequestedUrl());
final int locationLimit = ds.getLocationLimit();
int locationsTested = 0;
final List<CacheLocation> cacheLocations = orderCacheLocations(request,
getCacheRegister().getCacheLocations(zoneId), ds, clientLocation);
for (final CacheLocation location : cacheLocations) {
final List<Cache> caches = selectCache(location, ds);
if (caches != null) {
resultLocation.put("latitude", location.getGeolocation().getLatitude());
resultLocation.put("longitude", location.getGeolocation().getLongitude());
return caches;
}
locationsTested++;
if(locationLimit != 0 && locationsTested >= locationLimit) {
return null;
}
}
return null;
}
protected List<Cache> selectCache(final Request request, final DeliveryService ds, final Track track) throws GeolocationException {
final CacheLocation cacheLocation = getCoverageZoneCache(request.getClientIP());
List<Cache> caches = selectCachesByCZ(ds, cacheLocation, track);
if (caches != null) {
return caches;
}
if (ds.isCoverageZoneOnly()) {
track.setResult(ResultType.MISS);
track.setResultDetails(ResultDetails.DS_CZ_ONLY);
} else {
caches = selectCachesByGeo(request, ds, cacheLocation, track);
}
return caches;
}
public List<Cache> selectCachesByGeo(final Request request, final DeliveryService deliveryService, final CacheLocation cacheLocation, final Track track) throws GeolocationException {
Geolocation clientLocation = null;
try {
clientLocation = getClientLocation(request, deliveryService, cacheLocation);
} catch (GeolocationException e) {
LOGGER.warn("Failed looking up Client GeoLocation: " + e.getMessage());
}
if (clientLocation == null) {
track.setResultDetails(ResultDetails.DS_CLIENT_GEO_UNSUPPORTED);
return null;
}
final Map<String, Double> resultLocation = new HashMap<String, Double>();
final List<Cache> caches = getCachesByGeo(request, deliveryService, clientLocation, resultLocation);
if (caches == null || caches.isEmpty()) {
track.setResultDetails(ResultDetails.GEO_NO_CACHE_FOUND);
}
track.setResult(ResultType.GEO);
return caches;
}
public DNSRouteResult route(final DNSRequest request, final Track track) throws GeolocationException {
track.setRouteType(RouteType.DNS, request.getHostname());
final DeliveryService ds = selectDeliveryService(request, false);
if (ds == null) {
track.setResult(ResultType.STATIC_ROUTE);
track.setResultDetails(ResultDetails.DS_NOT_FOUND);
return null;
}
final DNSRouteResult result = new DNSRouteResult();
if (!ds.isAvailable()) {
result.setAddresses(ds.getFailureDnsResponse(request, track));
return result;
}
final CacheLocation cacheLocation = getCoverageZoneCache(request.getClientIP());
List<Cache> caches = selectCachesByCZ(ds, cacheLocation, track);
if (caches != null) {
track.setResult(ResultType.CZ);
result.setAddresses(inetRecordsFromCaches(ds, caches));
return result;
}
if (ds.isCoverageZoneOnly()) {
track.setResult(ResultType.MISS);
track.setResultDetails(ResultDetails.DS_CZ_ONLY);
result.setAddresses(ds.getFailureDnsResponse(request, track));
return result;
}
try {
final List<InetRecord> inetRecords = federationRegistry.findInetRecords(ds.getId(), CidrAddress.fromString(request.getClientIP()));
if (inetRecords != null && !inetRecords.isEmpty()) {
result.setAddresses(inetRecords);
track.setResult(ResultType.FED);
return result;
}
} catch (NetworkNodeException e) {
LOGGER.error("Bad client address: '" + request.getClientIP() + "'");
}
caches = selectCachesByGeo(request, ds, cacheLocation, track);
if (caches != null) {
track.setResult(ResultType.GEO);
result.setAddresses(inetRecordsFromCaches(ds, caches));
} else {
track.setResult(ResultType.MISS);
result.setAddresses(ds.getFailureDnsResponse(request, track));
}
return result;
}
private List<InetRecord> inetRecordsFromCaches(final DeliveryService ds, final List<Cache> caches) {
final List<InetRecord> addresses = new ArrayList<InetRecord>();
final int maxDnsIps = ds.getMaxDnsIps();
/*
* We also shuffle in NameServer when adding Records to the Message prior
* to sending it out, as the Records are sorted later when we fill the
* dynamic zone if DNSSEC is enabled. We shuffle here prior to pruning
* for maxDnsIps so that we ensure we are spreading load across all caches
* assigned to this delivery service.
*/
if (maxDnsIps > 0) {
Collections.shuffle(caches, random);
}
int i = 0;
for (final Cache cache : caches) {
if (maxDnsIps!=0 && i >= maxDnsIps) {
break;
}
i++;
addresses.addAll(cache.getIpAddresses(ds.getTtls(), zoneManager, ds.isIp6RoutingEnabled()));
}
return addresses;
}
public Geolocation getClientLocation(final Request request, final DeliveryService ds, final CacheLocation cacheLocation) throws GeolocationException {
Geolocation clientLocation;
if (cacheLocation != null) {
clientLocation = cacheLocation.getGeolocation();
} else {
clientLocation = getLocation(request.getClientIP());
clientLocation = ds.supportLocation(clientLocation, request.getType());
}
return clientLocation;
}
private List<Cache> selectCachesByCZ(final DeliveryService ds, final CacheLocation cacheLocation, final Track track) {
if (cacheLocation == null || !ds.isLocationAvailable(cacheLocation)) {
return null;
}
final List<Cache> caches = selectCache(cacheLocation, ds);
if (caches != null) {
track.setResult(ResultType.CZ);
track.setResultLocation(cacheLocation.getGeolocation());
}
return caches;
}
public HTTPRouteResult route(final HTTPRequest request, final Track track) throws MalformedURLException, GeolocationException {
track.setRouteType(RouteType.HTTP, request.getHostname());
final DeliveryService ds = selectDeliveryService(request, true);
if (ds == null) {
track.setResult(ResultType.DS_MISS);
track.setResultDetails(ResultDetails.DS_NOT_FOUND);
return null;
}
final HTTPRouteResult routeResult = new HTTPRouteResult();
routeResult.setDeliveryService(ds);
if (!ds.isAvailable()) {
routeResult.setUrl(ds.getFailureHttpResponse(request, track));
return routeResult;
}
final List<Cache> caches = selectCache(request, ds, track);
if (caches == null) {
routeResult.setUrl(ds.getFailureHttpResponse(request, track));
return routeResult;
}
final Dispersion dispersion = ds.getDispersion();
final Cache cache = dispersion.getCache(consistentHash(caches, request.getPath()));
routeResult.setUrl(new URL(ds.createURIString(request, cache)));
return routeResult;
}
protected CacheLocation getCoverageZoneCache(final String ip) {
NetworkNode nn = null;
try {
nn = NetworkNode.getInstance().getNetwork(ip);
} catch (NetworkNodeException e) {
LOGGER.warn(e);
}
if (nn == null) {
return null;
}
final String locId = nn.getLoc();
final CacheLocation cl = nn.getCacheLocation();
if(cl != null) {
return cl;
}
if(locId == null) {
return null;
}
// find CacheLocation
final Collection<CacheLocation> caches = getCacheRegister()
.getCacheLocations();
for (final CacheLocation cl2 : caches) {
if (cl2.getId().equals(locId)) {
nn.setCacheLocation(cl2);
return cl2;
}
}
return null;
}
/**
* Utilizes the hashValues stored with each cache to select the cache that
* the specified hash should map to.
*
* @param caches
* the list of caches to choose from
* @param hash
* the hash value for the request
* @return a cache or null if no cache can be found to map to
*/
protected Cache consistentHashOld(final List<Cache> caches,
final String request) {
double hash = 0;
HashFunction hashFunction = null;
try {
hashFunction = (HashFunction) hashFunctionPool.borrowObject();
try {
hash = hashFunction.hash(request);
} catch (final Exception e) {
LOGGER.debug(e.getMessage(), e);
}
hashFunctionPool.returnObject(hashFunction);
} catch (final Exception e) {
LOGGER.debug(e.getMessage(), e);
}
if (hash == 0) {
LOGGER.warn("Problem with hashFunctionPool, request: " + request);
return null;
}
return searchCacheOld(caches, hash);
}
private Cache searchCacheOld(final List<Cache> caches, final double hash) {
Cache minCache = null;
double minHash = Double.MAX_VALUE;
Cache foundCache = null;
double minDiff = Double.MAX_VALUE;
for (final Cache cache : caches) {
for (final double hashValue : cache.getHashValues()) {
if (hashValue < minHash) {
minCache = cache;
minHash = hashValue;
}
final double diff = hashValue - hash;
if ((diff >= 0) && (diff < minDiff)) {
foundCache = cache;
minDiff = diff;
}
}
}
return (foundCache != null) ? foundCache : minCache;
}
/**
* Utilizes the hashValues stored with each cache to select the cache that
* the specified hash should map to.
*
* @param caches
* the list of caches to choose from
* @param request
* the request string from which the hash will be generated
* @return a cache or null if no cache can be found to map to
*/
protected SortedMap<Double, Cache> consistentHash(final List<Cache> caches,
final String request) {
double hash = 0;
HashFunction hashFunction = null;
try {
hashFunction = (HashFunction) hashFunctionPool.borrowObject();
try {
hash = hashFunction.hash(request);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
hashFunctionPool.returnObject(hashFunction);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
if (hash == 0) {
LOGGER.warn("Problem with hashFunctionPool, request: " + request);
return null;
}
final SortedMap<Double, Cache> cacheMap = new TreeMap<Double, Cache>();
for (final Cache cache : caches) {
final double r = cache.getClosestHash(hash);
if (r == 0) {
LOGGER.warn("Error: getClosestHash returned 0: " + cache);
return null;
}
double diff = Math.abs(r - hash);
if (cacheMap.containsKey(diff)) {
LOGGER.warn("Error: cacheMap contains diff " + diff + "; incrementing to avoid collision");
long bits = Double.doubleToLongBits(diff);
while (cacheMap.containsKey(diff)) {
bits++;
diff = Double.longBitsToDouble(bits);
}
}
cacheMap.put(diff, cache);
}
return cacheMap;
}
/**
* Returns a list {@link CacheLocation}s sorted by distance from the client.
* If the client's location could not be determined, then the list is
* unsorted.
*
* @param request
* the client's request
* @param cacheLocations
* the collection of CacheLocations to order
* @param ds
* @return the ordered list of locations
*/
protected List<CacheLocation> orderCacheLocations(final Request request,
final Collection<CacheLocation> cacheLocations,
final DeliveryService ds,
final Geolocation clientLocation) {
final List<CacheLocation> locations = new ArrayList<CacheLocation>();
for(final CacheLocation cl : cacheLocations) {
if(ds.isLocationAvailable(cl)) {
locations.add(cl);
}
}
Collections.sort(locations, new CacheLocationComparator(
clientLocation));
return locations;
}
/**
* Selects a {@link Cache} from the {@link CacheLocation} provided.
*
* @param location
* the caches that will considered
* @param ds
* the delivery service for the request
* @param request
* the request to consider for cache selection
* @return the selected cache or null if none can be found
*/
private List<Cache> selectCache(final CacheLocation location,
final DeliveryService ds) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Trying location: " + location.getId());
}
final List<Cache> caches = getSupportingCaches(location.getCaches(), ds);
if (caches.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("No online, supporting caches were found at location: "
+ location.getId());
}
return null;
}
return caches;//consistentHash(caches, request);List<Cache>
}
public Zone getZone(final Name qname, final int qtype, final InetAddress clientAddress, final boolean isDnssecRequest, final DNSAccessRecord.Builder builder) {
return zoneManager.getZone(qname, qtype, clientAddress, isDnssecRequest, builder);
}
public void setRequestHeaders(final Set<String> requestHeaders) {
this.requestHeaders = requestHeaders;
}
public Set<String> getRequestHeaders() {
return requestHeaders;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.factories;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.TableDescriptor;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.CatalogBaseTable;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.DefaultCatalogTable;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.connector.sink.DynamicTableSink;
import org.apache.flink.table.connector.sink.SinkFunctionProvider;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.connector.source.LookupTableSource;
import org.apache.flink.table.connector.source.ScanTableSource;
import org.apache.flink.table.connector.source.SourceFunctionProvider;
import org.apache.flink.table.connector.source.TableFunctionProvider;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.factories.DynamicTableFactory;
import org.apache.flink.table.factories.DynamicTableSinkFactory;
import org.apache.flink.table.factories.DynamicTableSourceFactory;
import org.apache.flink.table.factories.FactoryUtil;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.testutils.junit.SharedObjects;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
/**
* Provides a flexible testing harness for table factories.
*
* <p>This testing harness allows writing custom sources and sinks which can be directly
* instantiated from the test. This avoids having to implement a factory, and enables using the
* {@link SharedObjects} rule to get direct access to the underlying source/sink from the test.
*
* <p>The underlying source/sink must extend from {@link SourceBase} or {@link SinkBase}.
*
* <p>Example:
*
* <pre>{@code
* public class CustomSourceTest {
* {@literal @}Rule public SharedObjects sharedObjects = SharedObjects.create();
*
* {@literal @}Test
* public void test() {
* SharedReference<List<Long>> appliedLimits = sharedObjects.add(new ArrayList<>());
* TableDescriptor sourceDescriptor =
* TableFactoryHarness.newBuilder()
* .schema(Schema.derived())
* .source(new CustomSource(appliedLimits))
* .build();
* tEnv.createTable("T", sourceDescriptor);
*
* tEnv.explainSql("SELECT * FROM T LIMIT 42");
*
* assertEquals(1, appliedLimits.get().size());
* assertEquals((Long) 42L, appliedLimits.get().get(0));
* }
*
* private static class CustomSource extends TableFactoryHarness.ScanSourceBase
* implements SupportsLimitPushDown {
* private final SharedReference<List<Long>> appliedLimits;
*
* CustomSource(SharedReference<List<Long>> appliedLimits) {
* this.appliedLimits = appliedLimits;
* }
*
* public void applyLimit(long limit) {
* appliedLimits.get().add(limit);
* }
* }
* }
* }</pre>
*/
public class TableFactoryHarness {
/** Factory identifier for {@link Factory}. */
public static final String IDENTIFIER = "harness";
// ---------------------------------------------------------------------------------------------
/**
* Creates a builder for a new {@link TableDescriptor} specialized for this harness.
*
* <p>Use this method to create a {@link TableDescriptor} and passing in the source / sink
* implementation you want to use. The descriptor can for example be used with {@link
* TableEnvironment#createTable(String, TableDescriptor)} to register a table.
*/
public static HarnessTableDescriptor.Builder newBuilder() {
return new HarnessTableDescriptor.Builder();
}
// ---------------------------------------------------------------------------------------------
/**
* Factory which creates a source / sink defined in the specialized {@link HarnessCatalogTable}.
*/
public static class Factory implements DynamicTableSourceFactory, DynamicTableSinkFactory {
@Override
public String factoryIdentifier() {
return IDENTIFIER;
}
@Override
public Set<ConfigOption<?>> requiredOptions() {
return Collections.emptySet();
}
@Override
public Set<ConfigOption<?>> optionalOptions() {
return Collections.emptySet();
}
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final FactoryUtil.TableFactoryHelper factoryHelper =
FactoryUtil.createTableFactoryHelper(this, context);
factoryHelper.validate();
final HarnessCatalogTable catalogTable =
(HarnessCatalogTable) context.getCatalogTable().getOrigin();
if (catalogTable.source == null) {
throw new ValidationException(
String.format(
"Table '%s' has no source configured.",
context.getObjectIdentifier()));
}
catalogTable.source.factoryContext = context;
return catalogTable.source;
}
@Override
public DynamicTableSink createDynamicTableSink(Context context) {
final FactoryUtil.TableFactoryHelper factoryHelper =
FactoryUtil.createTableFactoryHelper(this, context);
factoryHelper.validate();
final HarnessCatalogTable catalogTable =
(HarnessCatalogTable) context.getCatalogTable().getOrigin();
if (catalogTable.sink == null) {
throw new ValidationException(
String.format(
"Table '%s' has no sink configured.",
context.getObjectIdentifier()));
}
catalogTable.sink.factoryContext = context;
return catalogTable.sink;
}
}
/**
* Specialized version of {@link TableDescriptor} which allows passing a custom source / sink to
* the {@link CatalogTable} created from it.
*/
private static class HarnessTableDescriptor extends TableDescriptor {
private final @Nullable SourceBase source;
private final @Nullable SinkBase sink;
private HarnessTableDescriptor(
Schema schema, @Nullable SourceBase source, @Nullable SinkBase sink) {
super(
schema,
Collections.singletonMap(FactoryUtil.CONNECTOR.key(), IDENTIFIER),
Collections.emptyList(),
null);
this.source = source;
this.sink = sink;
}
@Override
public CatalogTable toCatalogTable() {
return new HarnessCatalogTable(super.toCatalogTable(), source, sink);
}
@Override
public TableDescriptor.Builder toBuilder() {
return new Builder(this).source(source).sink(sink);
}
/** Builder for {@link HarnessTableDescriptor}. */
public static class Builder extends TableDescriptor.Builder {
private @Nullable Schema schema;
private @Nullable SourceBase source;
private @Nullable SinkBase sink;
private Builder() {
super();
}
private Builder(TableDescriptor descriptor) {
super(descriptor);
}
/** Define the schema of the {@link TableDescriptor}. */
public Builder schema(Schema schema) {
this.schema = schema;
return this;
}
/** Use a bounded {@link ScanTableSource} which produces no data. */
public Builder boundedScanSource() {
return source(new ScanSourceBase(true) {});
}
/** Use an unbounded {@link ScanTableSource} which produces no data. */
public Builder unboundedScanSource() {
return source(new ScanSourceBase(false) {});
}
/**
* Use an unbounded {@link ScanTableSource} with the given {@param changelogMode} which
* produces no data.
*/
public Builder unboundedScanSource(ChangelogMode changelogMode) {
return source(
new ScanSourceBase(false) {
@Override
public ChangelogMode getChangelogMode() {
return changelogMode;
}
});
}
/** Use a {@link LookupTableSource} which produces no data. */
public Builder lookupSource() {
return source(new LookupSourceBase() {});
}
/** Use a custom {@link DynamicTableSource}. */
public Builder source(SourceBase source) {
this.source = source;
return this;
}
/** Use a {@link DynamicTableSink} which discards all data. */
public Builder sink() {
return sink(new SinkBase() {});
}
/** Use a custom {@link DynamicTableSink}. */
public Builder sink(SinkBase sink) {
this.sink = sink;
return this;
}
/** Builds a {@link TableDescriptor}. */
public TableDescriptor build() {
return new HarnessTableDescriptor(schema, source, sink);
}
}
}
/** Specialized {@link CatalogTable} which contains a custom source / sink. */
private static class HarnessCatalogTable extends DefaultCatalogTable {
private final @Nullable SourceBase source;
private final @Nullable SinkBase sink;
public HarnessCatalogTable(
CatalogTable parentTable, @Nullable SourceBase source, @Nullable SinkBase sink) {
super(
parentTable.getUnresolvedSchema(),
parentTable.getComment(),
parentTable.getPartitionKeys(),
parentTable.getOptions());
this.source = source;
this.sink = sink;
}
@Override
public CatalogBaseTable copy() {
return copy(getOptions());
}
@Override
public CatalogTable copy(Map<String, String> options) {
final CatalogTable parentTable =
CatalogTable.of(
getUnresolvedSchema(), getComment(), getPartitionKeys(), options);
return new HarnessCatalogTable(parentTable, source, sink);
}
}
// ---------------------------------------------------------------------------------------------
/**
* Base class for custom sources which implement {@link ScanTableSource}.
*
* <p>Most interface methods are default-implemented for convenience, but can be overridden when
* necessary. By default, a {@link ScanRuntimeProvider} is used which doesn't produce anything.
*
* <p>Sources derived from this base class will also be provided the {@link
* DynamicTableFactory.Context} of the factory which gives access to e.g. the {@link
* CatalogTable}.
*/
public abstract static class ScanSourceBase extends SourceBase implements ScanTableSource {
private final boolean bounded;
public ScanSourceBase() {
this(true);
}
public ScanSourceBase(boolean bounded) {
this.bounded = bounded;
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
@Override
public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) {
return SourceFunctionProvider.of(
new SourceFunction<RowData>() {
@Override
public void run(SourceContext<RowData> ctx) {}
@Override
public void cancel() {}
},
bounded);
}
}
/**
* Base class for custom sources which implement {@link LookupTableSource}.
*
* <p>Most interface methods are default-implemented for convenience, but can be overridden when
* necessary. By default, a {@link LookupRuntimeProvider} is used which doesn't produce
* anything.
*
* <p>Sources derived from this base class will also be provided the {@link
* DynamicTableFactory.Context} of the factory which gives access to e.g. the {@link
* CatalogTable}.
*/
public abstract static class LookupSourceBase extends SourceBase implements LookupTableSource {
@Override
public LookupRuntimeProvider getLookupRuntimeProvider(LookupContext context) {
return TableFunctionProvider.of(new TableFunction<RowData>() {});
}
}
/**
* Base class for custom sinks.
*
* <p>Most interface methods are default-implemented for convenience, but can be overridden when
* necessary. By default, a {@link SinkRuntimeProvider} is used which does nothing.
*
* <p>Sinks derived from this base class will also be provided the {@link
* DynamicTableFactory.Context} of the factory which gives access to e.g. the {@link
* CatalogTable}.
*/
public abstract static class SinkBase implements DynamicTableSink {
private DynamicTableFactory.Context factoryContext;
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return ChangelogMode.all();
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
return SinkFunctionProvider.of(
new SinkFunction<RowData>() {
@Override
public void invoke(RowData value, Context context1) {}
});
}
@Override
public DynamicTableSink copy() {
return this;
}
@Override
public String asSummaryString() {
return "Unspecified Testing Sink";
}
public DynamicTableFactory.Context getFactoryContext() {
return factoryContext;
}
}
/** Base class for {@link ScanSourceBase} and {{@link LookupSourceBase}}. */
private abstract static class SourceBase implements DynamicTableSource {
private DynamicTableFactory.Context factoryContext;
@Override
public DynamicTableSource copy() {
return this;
}
@Override
public String asSummaryString() {
return "Unspecified Testing Source";
}
public DynamicTableFactory.Context getFactoryContext() {
return factoryContext;
}
}
}
| |
/**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.github.ambry.clustermap.ClusterAgentsFactory;
import com.github.ambry.clustermap.ClusterMap;
import com.github.ambry.commons.BlobIdFactory;
import com.github.ambry.config.ClusterMapConfig;
import com.github.ambry.config.StoreConfig;
import com.github.ambry.config.VerifiableProperties;
import com.github.ambry.messageformat.MessageFormatException;
import com.github.ambry.tools.util.ToolUtils;
import com.github.ambry.utils.SystemTime;
import com.github.ambry.utils.Throttler;
import com.github.ambry.utils.Time;
import com.github.ambry.utils.Utils;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tool to assist in dumping data from data files in Ambry
* Supported operations are
* 1. Compare Index entries to Log entries
* 2. Compare all entries in all indexes in a replica to Log entries
* 3. Compare Log entries to index entries
*/
public class DumpDataTool {
private final ClusterMap clusterMap;
// The index file that needs to be dumped for comparison purposes
private final String fileToRead;
// File path referring to the hardware layout
private final String hardwareLayoutFilePath;
// File path referring to the partition layout
private final String partitionLayoutFilePath;
// The type of operation to perform
private final String typeOfOperation;
// Path referring to replica root directory
private final String replicaRootDirectory;
// The throttling value in index entries per sec
private final double indexEntriesPerSec;
private final StoreToolsMetrics metrics;
private final Throttler throttler;
private final Time time;
private final long currentTimeInMs;
private static final Logger logger = LoggerFactory.getLogger(DumpDataTool.class);
public DumpDataTool(VerifiableProperties verifiableProperties, StoreToolsMetrics metrics) throws Exception {
fileToRead = verifiableProperties.getString("file.to.read", "");
hardwareLayoutFilePath = verifiableProperties.getString("hardware.layout.file.path");
partitionLayoutFilePath = verifiableProperties.getString("partition.layout.file.path");
typeOfOperation = verifiableProperties.getString("type.of.operation");
replicaRootDirectory = verifiableProperties.getString("replica.root.directory", "");
indexEntriesPerSec = verifiableProperties.getDouble("index.entries.per.sec", 1000);
throttler = new Throttler(indexEntriesPerSec, 1000, true, SystemTime.getInstance());
if (!new File(hardwareLayoutFilePath).exists() || !new File(partitionLayoutFilePath).exists()) {
throw new IllegalArgumentException("Hardware or Partition Layout file does not exist");
}
ClusterMapConfig clusterMapConfig = new ClusterMapConfig(verifiableProperties);
this.clusterMap =
((ClusterAgentsFactory) Utils.getObj(clusterMapConfig.clusterMapClusterAgentsFactory, clusterMapConfig,
hardwareLayoutFilePath, partitionLayoutFilePath)).getClusterMap();
time = SystemTime.getInstance();
currentTimeInMs = time.milliseconds();
this.metrics = metrics;
}
public static void main(String args[]) throws Exception {
VerifiableProperties verifiableProperties = ToolUtils.getVerifiableProperties(args);
MetricRegistry registry = new MetricRegistry();
StoreToolsMetrics metrics = new StoreToolsMetrics(registry);
JmxReporter reporter = null;
try {
reporter = JmxReporter.forRegistry(registry).build();
reporter.start();
DumpDataTool dumpDataTool = new DumpDataTool(verifiableProperties, metrics);
dumpDataTool.doOperation();
} finally {
if (reporter != null) {
reporter.stop();
}
}
}
/**
* Executes the operation with the help of properties passed during initialization of {@link DumpDataTool}
* @throws Exception
*/
public void doOperation() throws Exception {
logger.info("Type of Operation " + typeOfOperation);
switch (typeOfOperation) {
case "CompareIndexToLog":
compareIndexEntriesToLogContent(new File(fileToRead), false);
break;
case "CompareReplicaIndexesToLog":
compareReplicaIndexEntriestoLogContent(replicaRootDirectory);
break;
default:
logger.error("Unknown typeOfOperation " + typeOfOperation);
break;
}
}
/**
* Compares every entry in every index file of a replica with those in the log.
* Checks to see if each blob in index is successfully deserializable from the log
* @param replicaRootDirectory the root directory of the replica
* @throws Exception
*/
private void compareReplicaIndexEntriestoLogContent(String replicaRootDirectory) throws Exception {
if (!new File(replicaRootDirectory).exists()) {
throw new IllegalArgumentException("Replica root directory does not exist " + replicaRootDirectory);
}
final Timer.Context context = metrics.compareReplicaIndexFilesToLogTimeMs.time();
try {
logger.info("Comparing Index entries to Log ");
File[] indexFiles = new File(replicaRootDirectory).listFiles(PersistentIndex.INDEX_SEGMENT_FILE_FILTER);
if (indexFiles == null || indexFiles.length == 0) {
throw new IllegalStateException("No index files found in replica root directory " + replicaRootDirectory);
}
Arrays.sort(indexFiles, PersistentIndex.INDEX_SEGMENT_FILE_COMPARATOR);
for (int i = 0; i < indexFiles.length; i++) {
// check end offset if this is the last index segment
boolean checkEndOffset = i == indexFiles.length - 1;
if (!checkEndOffset) {
// check end offset if the log segment represented by this index segment is different from the one represented
// by the next one
String currLogSegmentRef = IndexSegment.getIndexSegmentStartOffset(indexFiles[i].getName()).getName();
String nextLogSegmentRef = IndexSegment.getIndexSegmentStartOffset(indexFiles[i + 1].getName()).getName();
checkEndOffset = !currLogSegmentRef.equals(nextLogSegmentRef);
}
compareIndexEntriesToLogContent(indexFiles[i], checkEndOffset);
}
} finally {
context.stop();
}
}
/**
* Log ranges not covered by the index in the log
* @param coveredRanges {@link Map} of startOffsets to endOffsets of ranges covered by records in the log
* @param indexEndOffset the end offset in the log that this index segment covers
*/
private void logRangesNotCovered(Map<Long, Long> coveredRanges, long indexEndOffset) {
Iterator<Map.Entry<Long, Long>> iterator = coveredRanges.entrySet().iterator();
Map.Entry<Long, Long> prevEntry = iterator.next();
logger.trace("Record startOffset {} , endOffset {} ", prevEntry.getKey(), prevEntry.getValue());
while (iterator.hasNext()) {
Map.Entry<Long, Long> curEntry = iterator.next();
logger.trace("Record startOffset {} , endOffset {} ", curEntry.getKey(), curEntry.getValue());
if (prevEntry.getValue().compareTo(curEntry.getKey()) != 0) {
metrics.logRangeNotFoundInIndexError.inc();
logger.error("Cannot find entries in Index ranging from " + prevEntry.getValue() + " to " + curEntry.getKey()
+ " with a hole of size " + (curEntry.getKey() - prevEntry.getValue()) + " in the Log");
}
prevEntry = curEntry;
}
if (prevEntry.getValue().compareTo(indexEndOffset) != 0) {
logger.error("End offset mismatch. FileEndPointer from the index segment " + indexEndOffset
+ ", end offset as per records " + prevEntry.getValue());
}
}
/**
* Compares every entry in an index file with those in the log. Checks to see if each blob in index is successfully
* deserializable from the log
* @param indexFile the file that represents the index segment.
* @param checkLogEndOffsetMatch if {@code true}, checks that the end offset of the log matches the end offset of the
* index.
* @throws Exception
*/
private void compareIndexEntriesToLogContent(File indexFile, boolean checkLogEndOffsetMatch) throws Exception {
if (!indexFile.exists()) {
throw new IllegalArgumentException("File does not exist " + indexFile);
}
final Timer.Context context = metrics.compareIndexFileToLogTimeMs.time();
try {
logger.info("Dumping index {}", indexFile.getAbsolutePath());
StoreKeyFactory storeKeyFactory = new BlobIdFactory(clusterMap);
StoreConfig config = new StoreConfig(new VerifiableProperties(new Properties()));
MetricRegistry metricRegistry = new MetricRegistry();
StoreMetrics storeMetrics =
new StoreMetrics(indexFile.getParent(), metricRegistry, new AggregatedStoreMetrics(metricRegistry));
IndexSegment segment = new IndexSegment(indexFile, false, storeKeyFactory, config, storeMetrics,
new Journal(indexFile.getParent(), 0, 0), time);
Offset startOffset = segment.getStartOffset();
TreeMap<Long, Long> coveredRanges = new TreeMap<>();
String logFileName = LogSegmentNameHelper.nameToFilename(segment.getLogSegmentName());
File logFile = new File(indexFile.getParent(), logFileName);
if (!logFile.exists()) {
throw new IllegalStateException("Log file does not exist " + logFile);
}
RandomAccessFile randomAccessFile = new RandomAccessFile(logFile, "r");
long logFileSize = randomAccessFile.getChannel().size();
List<MessageInfo> entries = new ArrayList<>();
segment.getEntriesSince(null, new FindEntriesCondition(Long.MAX_VALUE), entries, new AtomicLong(0));
for (MessageInfo entry : entries) {
StoreKey key = entry.getStoreKey();
IndexValue value = segment.find(key);
boolean isDeleted = value.isFlagSet(IndexValue.Flags.Delete_Index);
if (value.getOffset().getOffset() < logFileSize) {
boolean success = readFromLogAndVerify(randomAccessFile, key.getID(), value, coveredRanges);
if (success) {
if (isDeleted) {
long originalOffset = value.getOriginalMessageOffset();
if (originalOffset != -1) {
if (!coveredRanges.containsKey(originalOffset)) {
if (startOffset.getOffset() > originalOffset) {
logger.trace(
"Put Record at {} with delete msg offset {} ignored because it is prior to startOffset {}",
originalOffset, value.getOffset(), startOffset);
} else {
try {
DumpDataHelper.LogBlobRecordInfo logBlobRecordInfo =
DumpDataHelper.readSingleRecordFromLog(randomAccessFile, originalOffset, clusterMap,
currentTimeInMs, metrics);
coveredRanges.put(originalOffset, originalOffset + logBlobRecordInfo.totalRecordSize);
logger.trace(
"PUT Record {} with start offset {} and end offset {} for a delete msg {} at offset {} ",
logBlobRecordInfo.blobId, originalOffset,
(originalOffset + logBlobRecordInfo.totalRecordSize), key.getID(), value.getOffset());
if (!logBlobRecordInfo.blobId.getID().equals(key.getID())) {
logger.error("BlobId value mismatch between delete record {} and put record {}", key.getID(),
logBlobRecordInfo.blobId.getID());
}
} catch (IllegalArgumentException e) {
metrics.logDeserializationError.inc();
logger.error("Illegal arg exception thrown at " + randomAccessFile.getChannel().position() + ", "
+ "while reading blob starting at offset " + originalOffset + " with exception: ", e);
} catch (MessageFormatException e) {
metrics.logDeserializationError.inc();
logger.error("MessageFormat exception thrown at " + randomAccessFile.getChannel().position()
+ " while reading blob starting at offset " + originalOffset + " with exception: ", e);
} catch (EOFException e) {
metrics.endOfFileOnDumpLogError.inc();
logger.error("EOFException thrown at " + randomAccessFile.getChannel().position() + " ", e);
} catch (Exception e) {
metrics.unknownErrorOnDumpIndex.inc();
logger.error("Unknown exception thrown " + e.getMessage() + " ", e);
}
}
}
}
}
} else {
metrics.indexToLogBlobRecordComparisonFailure.inc();
logger.error("Failed for key {} with value {} ", key, value);
}
} else {
logger.trace("Blob's {} offset {} is outside of log size {}, with a diff of {}", key,
value.getOffset().getOffset(), logFileSize, (value.getOffset().getOffset() - logFileSize));
}
}
throttler.maybeThrottle(entries.size());
long indexEndOffset = segment.getEndOffset().getOffset();
if (checkLogEndOffsetMatch && indexEndOffset != randomAccessFile.length()) {
metrics.indexLogEndOffsetMisMatchError.inc();
logger.error("Log end offset {} and index end offset {} do not match", randomAccessFile.length(),
indexEndOffset);
}
logRangesNotCovered(coveredRanges, indexEndOffset);
} finally {
context.stop();
}
}
/**
* Dumps a single record from the log at a given offset and verifies for corresponding values in index
* @param randomAccessFile the {@link RandomAccessFile} referring to log file that needs to be parsed
* @param blobId the blobId which that is expected to be matched for the record present at
* <code>offset</code>
* @param indexValue the {@link IndexValue} that needs to be compared against
* @param coveredRanges a {@link Map} of startOffset to endOffset of ranges covered by records in the log
* @throws IOException
*/
private boolean readFromLogAndVerify(RandomAccessFile randomAccessFile, String blobId, IndexValue indexValue,
Map<Long, Long> coveredRanges) throws Exception {
final Timer.Context context = metrics.readFromLogAndVerifyTimeMs.time();
long offset = indexValue.getOffset().getOffset();
try {
DumpDataHelper.LogBlobRecordInfo logBlobRecordInfo =
DumpDataHelper.readSingleRecordFromLog(randomAccessFile, offset, clusterMap, currentTimeInMs, metrics);
if (coveredRanges != null) {
coveredRanges.put(offset, offset + logBlobRecordInfo.totalRecordSize);
}
compareIndexValueToLogEntry(blobId, indexValue, logBlobRecordInfo);
if (!logBlobRecordInfo.isDeleted) {
logger.trace("{}", logBlobRecordInfo.messageHeader + "\n " + logBlobRecordInfo.blobId.getID() + "\n"
+ logBlobRecordInfo.blobProperty + "\n" + logBlobRecordInfo.userMetadata + "\n"
+ logBlobRecordInfo.blobDataOutput);
} else {
logger.trace("{}", logBlobRecordInfo.messageHeader + "\n " + logBlobRecordInfo.blobId.getID() + "\n"
+ logBlobRecordInfo.deleteMsg);
}
return true;
} catch (IllegalArgumentException e) {
metrics.logDeserializationError.inc();
logger.error("Illegal arg exception thrown at " + randomAccessFile.getChannel().position() + ", "
+ "while reading blob starting at offset " + offset + " with exception: ", e);
} catch (MessageFormatException e) {
metrics.logDeserializationError.inc();
logger.error("MessageFormat exception thrown at " + randomAccessFile.getChannel().position()
+ " while reading blob starting at offset " + offset + " with exception: ", e);
} catch (EOFException e) {
metrics.endOfFileOnDumpLogError.inc();
logger.error("EOFException thrown at " + randomAccessFile.getChannel().position() + " ", e);
} catch (Exception e) {
metrics.unknownErrorOnDumpLog.inc();
logger.error("Unknown exception thrown " + e.getMessage() + " ", e);
} finally {
context.stop();
}
return false;
}
/**
* Compares values from index to that in the Log
* @param blobId the blobId for which comparison is made
* @param indexValue the {@link IndexValue} to be used in comparison
* @param logBlobRecordInfo the {@link DumpDataHelper.LogBlobRecordInfo} to be used in comparison
*/
private void compareIndexValueToLogEntry(String blobId, IndexValue indexValue,
DumpDataHelper.LogBlobRecordInfo logBlobRecordInfo) {
boolean isDeleted = indexValue.isFlagSet(IndexValue.Flags.Delete_Index);
boolean isExpired = DumpDataHelper.isExpired(indexValue.getExpiresAtMs(), currentTimeInMs);
if (isDeleted != logBlobRecordInfo.isDeleted) {
metrics.indexToLogDeleteFlagMisMatchError.inc();
logger.error(
"Deleted value mismatch for " + logBlobRecordInfo.blobId + " Index value " + isDeleted + ", Log value "
+ logBlobRecordInfo.isDeleted);
} else if (!logBlobRecordInfo.isDeleted && isExpired != logBlobRecordInfo.isExpired) {
metrics.indexToLogExpiryMisMatchError.inc();
logger.error(
"Expiration value mismatch for " + logBlobRecordInfo.blobId + " Index value " + isExpired + ", Log value "
+ logBlobRecordInfo.isExpired + ", index expiresAt in ms " + indexValue.getExpiresAtMs()
+ ", log expiresAt in ms " + logBlobRecordInfo.expiresAtMs);
} else if (!blobId.equals(logBlobRecordInfo.blobId.getID())) {
metrics.indexToLogBlobIdMisMatchError.inc();
logger.error("BlobId value mismatch for " + logBlobRecordInfo.blobId + " Index value " + blobId + ", Log value "
+ logBlobRecordInfo.blobId);
}
}
}
| |
package twg2.parser.language;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Function;
import twg2.parser.codeParser.Keyword;
import twg2.parser.codeParser.AstExtractor;
import twg2.parser.codeParser.AstUtil;
import twg2.parser.codeParser.BlockType;
import twg2.parser.codeParser.BlockUtil;
import twg2.parser.codeParser.KeywordUtil;
import twg2.parser.codeParser.Operator;
import twg2.parser.codeParser.OperatorUtil;
import twg2.parser.codeParser.csharp.CsAstUtil;
import twg2.parser.codeParser.csharp.CsBlock;
import twg2.parser.codeParser.csharp.CsBlock.CsBlockUtil;
import twg2.parser.codeParser.csharp.CsBlockParser;
import twg2.parser.codeParser.csharp.CsFileTokenizer;
import twg2.parser.codeParser.csharp.CsKeyword;
import twg2.parser.codeParser.csharp.CsOperator;
import twg2.parser.codeParser.java.JavaAstUtil;
import twg2.parser.codeParser.java.JavaBlock;
import twg2.parser.codeParser.java.JavaBlock.JavaBlockUtil;
import twg2.parser.codeParser.java.JavaBlockParser;
import twg2.parser.codeParser.java.JavaFileTokenizer;
import twg2.parser.codeParser.java.JavaKeyword;
import twg2.parser.codeParser.java.JavaOperator;
import twg2.parser.tokenizers.CodeTokenizer;
import twg2.parser.workflow.CodeFileSrc;
import twg2.parser.workflow.ParseInput;
/**
* @author TeamworkGuy2
* @since 2015-9-19
*/
public enum CodeLanguageOptions {
;
/**
* @author TeamworkGuy2
* @since 2016-1-16
* @param <T_KEYWORD> the keyword enum containing this language's keywords
* @param <T_LANG> this language's {@link CodeLanguage} type
* @param <T_AST_UTIL> the {@link AstUtil} type for this language
* @param <T_AST_EXTRACTOR> {@link AstExtractor} type for this language
*/
public static class CodeLanguageImpl<
T_BLOCK extends BlockType,
T_KEYWORD extends Keyword,
T_LANG extends CodeLanguage,
T_OP extends Operator,
T_AST_UTIL extends AstUtil<T_BLOCK, T_KEYWORD>,
T_OP_UTIL extends OperatorUtil<T_OP>,
T_AST_EXTRACTOR extends AstExtractor<T_BLOCK>
> implements CodeLanguage {
final String displayName;
final BlockUtil<T_BLOCK, T_KEYWORD> blockUtil;
final T_AST_UTIL astUtil;
final KeywordUtil<T_KEYWORD> keywordUtil;
final T_OP_UTIL operatorUtil;
final Function<ParseInput, CodeFileSrc> parser;
final T_AST_EXTRACTOR extractor;
final List<String> fileExtensions;
public BlockUtil<T_BLOCK, T_KEYWORD> getBlockUtil() { return blockUtil; }
@Override public T_AST_UTIL getAstUtil() { return astUtil; }
@Override public KeywordUtil<T_KEYWORD> getKeywordUtil() { return keywordUtil; }
@Override public T_OP_UTIL getOperatorUtil() { return operatorUtil; }
@Override public Function<ParseInput, CodeFileSrc> getParser() { return parser; }
@Override public T_AST_EXTRACTOR getExtractor() { return extractor; }
@Override public List<String> getFileExtensions() { return fileExtensions; }
/** Create a new code language instance.
* @param displayName the common name of the language
* @param astUtil the utility used to parse and convert specific features of this language to the common {@code baseAst} representation used by this project
* @param parser the parser builder for this language, call it with {@link ParseInput} parameters and get back a parsed {@link CodeFileSrc}.
* NOTE: this function should be thread-safe or should have no side effects
* @param fileExtensions a list of file extensions associated with this language
* @return a new {@link CodeLanguage} instance
*/
// package-private
@SuppressWarnings("unchecked")
CodeLanguageImpl(
String displayName,
BlockUtil<T_BLOCK, T_KEYWORD> blockUtil,
AstUtil<? extends T_BLOCK, ? extends T_KEYWORD> astUtil,
KeywordUtil<? extends T_KEYWORD> keywordUtil,
T_OP_UTIL operatorUtil,
Function<ParseInput, CodeFileSrc> parser,
T_AST_EXTRACTOR extractor,
List<String> fileExtensions
) {
this.displayName = displayName;
this.blockUtil = blockUtil;
this.astUtil = (T_AST_UTIL)astUtil;
this.keywordUtil = (KeywordUtil<T_KEYWORD>)keywordUtil;
this.operatorUtil = operatorUtil;
this.parser = parser;
this.extractor = extractor;
this.fileExtensions = new ArrayList<>(fileExtensions);
}
@Override
public String displayName() {
return displayName;
}
}
public static class CSharp extends CodeLanguageImpl<CsBlock, CsKeyword, CSharp, CsOperator, CsAstUtil, CsOperator.Inst, AstExtractor<CsBlock>> {
CSharp(
String displayName,
CsBlockUtil blockUtil,
CsAstUtil astUtil,
KeywordUtil<CsKeyword> keywordUtil,
CsOperator.Inst operatorUtil,
Function<ParseInput, CodeFileSrc> parser,
AstExtractor<CsBlock> extractor,
List<String> fileExtensions
) {
super(displayName, blockUtil, astUtil, keywordUtil, operatorUtil, parser, extractor, fileExtensions);
}
}
public static class Java extends CodeLanguageImpl<JavaBlock, JavaKeyword, Java, JavaOperator, JavaAstUtil, JavaOperator.Inst, AstExtractor<JavaBlock>> {
Java(
String displayName,
JavaBlockUtil blockUtil,
JavaAstUtil astUtil,
KeywordUtil<JavaKeyword> keywordUtil,
JavaOperator.Inst operatorUtil,
Function<ParseInput, CodeFileSrc> parser,
AstExtractor<JavaBlock> extractor,
List<String> fileExtensions
) {
super(displayName, blockUtil, astUtil, keywordUtil, operatorUtil, parser, extractor, fileExtensions);
}
}
private static CopyOnWriteArrayList<CodeLanguage> values = new CopyOnWriteArrayList<>();
public static final CSharp C_SHARP = registerCodeLanguage(
new CSharp("C#", new CsBlockUtil(), new CsAstUtil(), CsKeyword.check, CsOperator.check,
CodeTokenizer.createTokenizerWithTimer(() -> CsFileTokenizer.createCsTokenizer()), new CsBlockParser(), Arrays.asList("cs"))
);
public static final Java JAVA = registerCodeLanguage(
new Java("Java", new JavaBlockUtil(), new JavaAstUtil(), JavaKeyword.check, JavaOperator.check,
CodeTokenizer.createTokenizerWithTimer(() -> JavaFileTokenizer.createJavaTokenizer()), new JavaBlockParser(), Arrays.asList("java"))
);
public static final CodeLanguageImpl<BlockType, Keyword, CodeLanguage, Operator, AstUtil<BlockType, Keyword>, OperatorUtil<Operator>, AstExtractor<BlockType>> JAVASCRIPT = registerCodeLanguage(
new CodeLanguageImpl<>("Javascript", null, null, null, null, null, null, Arrays.asList("js", "ts"))
);
/**
* @return a list of all registered languages
*/
public static List<CodeLanguage> getLanguagesCopy() {
return new ArrayList<>(values);
}
public static CodeLanguage fromFileExtension(String fileExtension) throws IllegalArgumentException {
CodeLanguage lang = tryFromFileExtension(fileExtension);
if(lang == null) {
throw new IllegalArgumentException("unsupported file extension '" + fileExtension + "' for parsing");
}
return lang;
}
public static CodeLanguage tryFromFileExtension(String fileExtension) {
if(fileExtension.charAt(0) == '.') {
fileExtension = fileExtension.substring(1);
}
for(CodeLanguage lang : CodeLanguageOptions.values) {
if(lang.getFileExtensions().indexOf(fileExtension) > -1) {
return lang;
}
}
return null;
}
/** Register a new language (while will be added to the list returned by {@link CodeLanguageOptions#getLanguagesCopy()}.<br>
* NOTE: thread safe
* @param inst the code language the register
* @return the input {@code inst} unmodified
*/
private static final <T extends CodeLanguage> T registerCodeLanguage(T inst) {
values.add(inst);
return inst;
}
}
| |
/*
* Copyright 2006 Jeremias Maerki in part, and ZXing Authors in part
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file has been modified from its original form in Barcode4J.
*/
package com.google.zxing.pdf417.encoder;
import com.google.zxing.WriterException;
import java.nio.charset.Charset;
/**
* Top-level class for the logic part of the PDF417 implementation.
*/
public final class PDF417 {
/**
* The start pattern (17 bits)
*/
private static final int START_PATTERN = 0x1fea8;
/**
* The stop pattern (18 bits)
*/
private static final int STOP_PATTERN = 0x3fa29;
/**
* The codeword table from the Annex A of ISO/IEC 15438:2001(E).
*/
private static final int[][] CODEWORD_TABLE = {
{0x1d5c0, 0x1eaf0, 0x1f57c, 0x1d4e0, 0x1ea78, 0x1f53e,
0x1a8c0, 0x1d470, 0x1a860, 0x15040, 0x1a830, 0x15020,
0x1adc0, 0x1d6f0, 0x1eb7c, 0x1ace0, 0x1d678, 0x1eb3e,
0x158c0, 0x1ac70, 0x15860, 0x15dc0, 0x1aef0, 0x1d77c,
0x15ce0, 0x1ae78, 0x1d73e, 0x15c70, 0x1ae3c, 0x15ef0,
0x1af7c, 0x15e78, 0x1af3e, 0x15f7c, 0x1f5fa, 0x1d2e0,
0x1e978, 0x1f4be, 0x1a4c0, 0x1d270, 0x1e93c, 0x1a460,
0x1d238, 0x14840, 0x1a430, 0x1d21c, 0x14820, 0x1a418,
0x14810, 0x1a6e0, 0x1d378, 0x1e9be, 0x14cc0, 0x1a670,
0x1d33c, 0x14c60, 0x1a638, 0x1d31e, 0x14c30, 0x1a61c,
0x14ee0, 0x1a778, 0x1d3be, 0x14e70, 0x1a73c, 0x14e38,
0x1a71e, 0x14f78, 0x1a7be, 0x14f3c, 0x14f1e, 0x1a2c0,
0x1d170, 0x1e8bc, 0x1a260, 0x1d138, 0x1e89e, 0x14440,
0x1a230, 0x1d11c, 0x14420, 0x1a218, 0x14410, 0x14408,
0x146c0, 0x1a370, 0x1d1bc, 0x14660, 0x1a338, 0x1d19e,
0x14630, 0x1a31c, 0x14618, 0x1460c, 0x14770, 0x1a3bc,
0x14738, 0x1a39e, 0x1471c, 0x147bc, 0x1a160, 0x1d0b8,
0x1e85e, 0x14240, 0x1a130, 0x1d09c, 0x14220, 0x1a118,
0x1d08e, 0x14210, 0x1a10c, 0x14208, 0x1a106, 0x14360,
0x1a1b8, 0x1d0de, 0x14330, 0x1a19c, 0x14318, 0x1a18e,
0x1430c, 0x14306, 0x1a1de, 0x1438e, 0x14140, 0x1a0b0,
0x1d05c, 0x14120, 0x1a098, 0x1d04e, 0x14110, 0x1a08c,
0x14108, 0x1a086, 0x14104, 0x141b0, 0x14198, 0x1418c,
0x140a0, 0x1d02e, 0x1a04c, 0x1a046, 0x14082, 0x1cae0,
0x1e578, 0x1f2be, 0x194c0, 0x1ca70, 0x1e53c, 0x19460,
0x1ca38, 0x1e51e, 0x12840, 0x19430, 0x12820, 0x196e0,
0x1cb78, 0x1e5be, 0x12cc0, 0x19670, 0x1cb3c, 0x12c60,
0x19638, 0x12c30, 0x12c18, 0x12ee0, 0x19778, 0x1cbbe,
0x12e70, 0x1973c, 0x12e38, 0x12e1c, 0x12f78, 0x197be,
0x12f3c, 0x12fbe, 0x1dac0, 0x1ed70, 0x1f6bc, 0x1da60,
0x1ed38, 0x1f69e, 0x1b440, 0x1da30, 0x1ed1c, 0x1b420,
0x1da18, 0x1ed0e, 0x1b410, 0x1da0c, 0x192c0, 0x1c970,
0x1e4bc, 0x1b6c0, 0x19260, 0x1c938, 0x1e49e, 0x1b660,
0x1db38, 0x1ed9e, 0x16c40, 0x12420, 0x19218, 0x1c90e,
0x16c20, 0x1b618, 0x16c10, 0x126c0, 0x19370, 0x1c9bc,
0x16ec0, 0x12660, 0x19338, 0x1c99e, 0x16e60, 0x1b738,
0x1db9e, 0x16e30, 0x12618, 0x16e18, 0x12770, 0x193bc,
0x16f70, 0x12738, 0x1939e, 0x16f38, 0x1b79e, 0x16f1c,
0x127bc, 0x16fbc, 0x1279e, 0x16f9e, 0x1d960, 0x1ecb8,
0x1f65e, 0x1b240, 0x1d930, 0x1ec9c, 0x1b220, 0x1d918,
0x1ec8e, 0x1b210, 0x1d90c, 0x1b208, 0x1b204, 0x19160,
0x1c8b8, 0x1e45e, 0x1b360, 0x19130, 0x1c89c, 0x16640,
0x12220, 0x1d99c, 0x1c88e, 0x16620, 0x12210, 0x1910c,
0x16610, 0x1b30c, 0x19106, 0x12204, 0x12360, 0x191b8,
0x1c8de, 0x16760, 0x12330, 0x1919c, 0x16730, 0x1b39c,
0x1918e, 0x16718, 0x1230c, 0x12306, 0x123b8, 0x191de,
0x167b8, 0x1239c, 0x1679c, 0x1238e, 0x1678e, 0x167de,
0x1b140, 0x1d8b0, 0x1ec5c, 0x1b120, 0x1d898, 0x1ec4e,
0x1b110, 0x1d88c, 0x1b108, 0x1d886, 0x1b104, 0x1b102,
0x12140, 0x190b0, 0x1c85c, 0x16340, 0x12120, 0x19098,
0x1c84e, 0x16320, 0x1b198, 0x1d8ce, 0x16310, 0x12108,
0x19086, 0x16308, 0x1b186, 0x16304, 0x121b0, 0x190dc,
0x163b0, 0x12198, 0x190ce, 0x16398, 0x1b1ce, 0x1638c,
0x12186, 0x16386, 0x163dc, 0x163ce, 0x1b0a0, 0x1d858,
0x1ec2e, 0x1b090, 0x1d84c, 0x1b088, 0x1d846, 0x1b084,
0x1b082, 0x120a0, 0x19058, 0x1c82e, 0x161a0, 0x12090,
0x1904c, 0x16190, 0x1b0cc, 0x19046, 0x16188, 0x12084,
0x16184, 0x12082, 0x120d8, 0x161d8, 0x161cc, 0x161c6,
0x1d82c, 0x1d826, 0x1b042, 0x1902c, 0x12048, 0x160c8,
0x160c4, 0x160c2, 0x18ac0, 0x1c570, 0x1e2bc, 0x18a60,
0x1c538, 0x11440, 0x18a30, 0x1c51c, 0x11420, 0x18a18,
0x11410, 0x11408, 0x116c0, 0x18b70, 0x1c5bc, 0x11660,
0x18b38, 0x1c59e, 0x11630, 0x18b1c, 0x11618, 0x1160c,
0x11770, 0x18bbc, 0x11738, 0x18b9e, 0x1171c, 0x117bc,
0x1179e, 0x1cd60, 0x1e6b8, 0x1f35e, 0x19a40, 0x1cd30,
0x1e69c, 0x19a20, 0x1cd18, 0x1e68e, 0x19a10, 0x1cd0c,
0x19a08, 0x1cd06, 0x18960, 0x1c4b8, 0x1e25e, 0x19b60,
0x18930, 0x1c49c, 0x13640, 0x11220, 0x1cd9c, 0x1c48e,
0x13620, 0x19b18, 0x1890c, 0x13610, 0x11208, 0x13608,
0x11360, 0x189b8, 0x1c4de, 0x13760, 0x11330, 0x1cdde,
0x13730, 0x19b9c, 0x1898e, 0x13718, 0x1130c, 0x1370c,
0x113b8, 0x189de, 0x137b8, 0x1139c, 0x1379c, 0x1138e,
0x113de, 0x137de, 0x1dd40, 0x1eeb0, 0x1f75c, 0x1dd20,
0x1ee98, 0x1f74e, 0x1dd10, 0x1ee8c, 0x1dd08, 0x1ee86,
0x1dd04, 0x19940, 0x1ccb0, 0x1e65c, 0x1bb40, 0x19920,
0x1eedc, 0x1e64e, 0x1bb20, 0x1dd98, 0x1eece, 0x1bb10,
0x19908, 0x1cc86, 0x1bb08, 0x1dd86, 0x19902, 0x11140,
0x188b0, 0x1c45c, 0x13340, 0x11120, 0x18898, 0x1c44e,
0x17740, 0x13320, 0x19998, 0x1ccce, 0x17720, 0x1bb98,
0x1ddce, 0x18886, 0x17710, 0x13308, 0x19986, 0x17708,
0x11102, 0x111b0, 0x188dc, 0x133b0, 0x11198, 0x188ce,
0x177b0, 0x13398, 0x199ce, 0x17798, 0x1bbce, 0x11186,
0x13386, 0x111dc, 0x133dc, 0x111ce, 0x177dc, 0x133ce,
0x1dca0, 0x1ee58, 0x1f72e, 0x1dc90, 0x1ee4c, 0x1dc88,
0x1ee46, 0x1dc84, 0x1dc82, 0x198a0, 0x1cc58, 0x1e62e,
0x1b9a0, 0x19890, 0x1ee6e, 0x1b990, 0x1dccc, 0x1cc46,
0x1b988, 0x19884, 0x1b984, 0x19882, 0x1b982, 0x110a0,
0x18858, 0x1c42e, 0x131a0, 0x11090, 0x1884c, 0x173a0,
0x13190, 0x198cc, 0x18846, 0x17390, 0x1b9cc, 0x11084,
0x17388, 0x13184, 0x11082, 0x13182, 0x110d8, 0x1886e,
0x131d8, 0x110cc, 0x173d8, 0x131cc, 0x110c6, 0x173cc,
0x131c6, 0x110ee, 0x173ee, 0x1dc50, 0x1ee2c, 0x1dc48,
0x1ee26, 0x1dc44, 0x1dc42, 0x19850, 0x1cc2c, 0x1b8d0,
0x19848, 0x1cc26, 0x1b8c8, 0x1dc66, 0x1b8c4, 0x19842,
0x1b8c2, 0x11050, 0x1882c, 0x130d0, 0x11048, 0x18826,
0x171d0, 0x130c8, 0x19866, 0x171c8, 0x1b8e6, 0x11042,
0x171c4, 0x130c2, 0x171c2, 0x130ec, 0x171ec, 0x171e6,
0x1ee16, 0x1dc22, 0x1cc16, 0x19824, 0x19822, 0x11028,
0x13068, 0x170e8, 0x11022, 0x13062, 0x18560, 0x10a40,
0x18530, 0x10a20, 0x18518, 0x1c28e, 0x10a10, 0x1850c,
0x10a08, 0x18506, 0x10b60, 0x185b8, 0x1c2de, 0x10b30,
0x1859c, 0x10b18, 0x1858e, 0x10b0c, 0x10b06, 0x10bb8,
0x185de, 0x10b9c, 0x10b8e, 0x10bde, 0x18d40, 0x1c6b0,
0x1e35c, 0x18d20, 0x1c698, 0x18d10, 0x1c68c, 0x18d08,
0x1c686, 0x18d04, 0x10940, 0x184b0, 0x1c25c, 0x11b40,
0x10920, 0x1c6dc, 0x1c24e, 0x11b20, 0x18d98, 0x1c6ce,
0x11b10, 0x10908, 0x18486, 0x11b08, 0x18d86, 0x10902,
0x109b0, 0x184dc, 0x11bb0, 0x10998, 0x184ce, 0x11b98,
0x18dce, 0x11b8c, 0x10986, 0x109dc, 0x11bdc, 0x109ce,
0x11bce, 0x1cea0, 0x1e758, 0x1f3ae, 0x1ce90, 0x1e74c,
0x1ce88, 0x1e746, 0x1ce84, 0x1ce82, 0x18ca0, 0x1c658,
0x19da0, 0x18c90, 0x1c64c, 0x19d90, 0x1cecc, 0x1c646,
0x19d88, 0x18c84, 0x19d84, 0x18c82, 0x19d82, 0x108a0,
0x18458, 0x119a0, 0x10890, 0x1c66e, 0x13ba0, 0x11990,
0x18ccc, 0x18446, 0x13b90, 0x19dcc, 0x10884, 0x13b88,
0x11984, 0x10882, 0x11982, 0x108d8, 0x1846e, 0x119d8,
0x108cc, 0x13bd8, 0x119cc, 0x108c6, 0x13bcc, 0x119c6,
0x108ee, 0x119ee, 0x13bee, 0x1ef50, 0x1f7ac, 0x1ef48,
0x1f7a6, 0x1ef44, 0x1ef42, 0x1ce50, 0x1e72c, 0x1ded0,
0x1ef6c, 0x1e726, 0x1dec8, 0x1ef66, 0x1dec4, 0x1ce42,
0x1dec2, 0x18c50, 0x1c62c, 0x19cd0, 0x18c48, 0x1c626,
0x1bdd0, 0x19cc8, 0x1ce66, 0x1bdc8, 0x1dee6, 0x18c42,
0x1bdc4, 0x19cc2, 0x1bdc2, 0x10850, 0x1842c, 0x118d0,
0x10848, 0x18426, 0x139d0, 0x118c8, 0x18c66, 0x17bd0,
0x139c8, 0x19ce6, 0x10842, 0x17bc8, 0x1bde6, 0x118c2,
0x17bc4, 0x1086c, 0x118ec, 0x10866, 0x139ec, 0x118e6,
0x17bec, 0x139e6, 0x17be6, 0x1ef28, 0x1f796, 0x1ef24,
0x1ef22, 0x1ce28, 0x1e716, 0x1de68, 0x1ef36, 0x1de64,
0x1ce22, 0x1de62, 0x18c28, 0x1c616, 0x19c68, 0x18c24,
0x1bce8, 0x19c64, 0x18c22, 0x1bce4, 0x19c62, 0x1bce2,
0x10828, 0x18416, 0x11868, 0x18c36, 0x138e8, 0x11864,
0x10822, 0x179e8, 0x138e4, 0x11862, 0x179e4, 0x138e2,
0x179e2, 0x11876, 0x179f6, 0x1ef12, 0x1de34, 0x1de32,
0x19c34, 0x1bc74, 0x1bc72, 0x11834, 0x13874, 0x178f4,
0x178f2, 0x10540, 0x10520, 0x18298, 0x10510, 0x10508,
0x10504, 0x105b0, 0x10598, 0x1058c, 0x10586, 0x105dc,
0x105ce, 0x186a0, 0x18690, 0x1c34c, 0x18688, 0x1c346,
0x18684, 0x18682, 0x104a0, 0x18258, 0x10da0, 0x186d8,
0x1824c, 0x10d90, 0x186cc, 0x10d88, 0x186c6, 0x10d84,
0x10482, 0x10d82, 0x104d8, 0x1826e, 0x10dd8, 0x186ee,
0x10dcc, 0x104c6, 0x10dc6, 0x104ee, 0x10dee, 0x1c750,
0x1c748, 0x1c744, 0x1c742, 0x18650, 0x18ed0, 0x1c76c,
0x1c326, 0x18ec8, 0x1c766, 0x18ec4, 0x18642, 0x18ec2,
0x10450, 0x10cd0, 0x10448, 0x18226, 0x11dd0, 0x10cc8,
0x10444, 0x11dc8, 0x10cc4, 0x10442, 0x11dc4, 0x10cc2,
0x1046c, 0x10cec, 0x10466, 0x11dec, 0x10ce6, 0x11de6,
0x1e7a8, 0x1e7a4, 0x1e7a2, 0x1c728, 0x1cf68, 0x1e7b6,
0x1cf64, 0x1c722, 0x1cf62, 0x18628, 0x1c316, 0x18e68,
0x1c736, 0x19ee8, 0x18e64, 0x18622, 0x19ee4, 0x18e62,
0x19ee2, 0x10428, 0x18216, 0x10c68, 0x18636, 0x11ce8,
0x10c64, 0x10422, 0x13de8, 0x11ce4, 0x10c62, 0x13de4,
0x11ce2, 0x10436, 0x10c76, 0x11cf6, 0x13df6, 0x1f7d4,
0x1f7d2, 0x1e794, 0x1efb4, 0x1e792, 0x1efb2, 0x1c714,
0x1cf34, 0x1c712, 0x1df74, 0x1cf32, 0x1df72, 0x18614,
0x18e34, 0x18612, 0x19e74, 0x18e32, 0x1bef4},
{0x1f560, 0x1fab8, 0x1ea40, 0x1f530, 0x1fa9c, 0x1ea20,
0x1f518, 0x1fa8e, 0x1ea10, 0x1f50c, 0x1ea08, 0x1f506,
0x1ea04, 0x1eb60, 0x1f5b8, 0x1fade, 0x1d640, 0x1eb30,
0x1f59c, 0x1d620, 0x1eb18, 0x1f58e, 0x1d610, 0x1eb0c,
0x1d608, 0x1eb06, 0x1d604, 0x1d760, 0x1ebb8, 0x1f5de,
0x1ae40, 0x1d730, 0x1eb9c, 0x1ae20, 0x1d718, 0x1eb8e,
0x1ae10, 0x1d70c, 0x1ae08, 0x1d706, 0x1ae04, 0x1af60,
0x1d7b8, 0x1ebde, 0x15e40, 0x1af30, 0x1d79c, 0x15e20,
0x1af18, 0x1d78e, 0x15e10, 0x1af0c, 0x15e08, 0x1af06,
0x15f60, 0x1afb8, 0x1d7de, 0x15f30, 0x1af9c, 0x15f18,
0x1af8e, 0x15f0c, 0x15fb8, 0x1afde, 0x15f9c, 0x15f8e,
0x1e940, 0x1f4b0, 0x1fa5c, 0x1e920, 0x1f498, 0x1fa4e,
0x1e910, 0x1f48c, 0x1e908, 0x1f486, 0x1e904, 0x1e902,
0x1d340, 0x1e9b0, 0x1f4dc, 0x1d320, 0x1e998, 0x1f4ce,
0x1d310, 0x1e98c, 0x1d308, 0x1e986, 0x1d304, 0x1d302,
0x1a740, 0x1d3b0, 0x1e9dc, 0x1a720, 0x1d398, 0x1e9ce,
0x1a710, 0x1d38c, 0x1a708, 0x1d386, 0x1a704, 0x1a702,
0x14f40, 0x1a7b0, 0x1d3dc, 0x14f20, 0x1a798, 0x1d3ce,
0x14f10, 0x1a78c, 0x14f08, 0x1a786, 0x14f04, 0x14fb0,
0x1a7dc, 0x14f98, 0x1a7ce, 0x14f8c, 0x14f86, 0x14fdc,
0x14fce, 0x1e8a0, 0x1f458, 0x1fa2e, 0x1e890, 0x1f44c,
0x1e888, 0x1f446, 0x1e884, 0x1e882, 0x1d1a0, 0x1e8d8,
0x1f46e, 0x1d190, 0x1e8cc, 0x1d188, 0x1e8c6, 0x1d184,
0x1d182, 0x1a3a0, 0x1d1d8, 0x1e8ee, 0x1a390, 0x1d1cc,
0x1a388, 0x1d1c6, 0x1a384, 0x1a382, 0x147a0, 0x1a3d8,
0x1d1ee, 0x14790, 0x1a3cc, 0x14788, 0x1a3c6, 0x14784,
0x14782, 0x147d8, 0x1a3ee, 0x147cc, 0x147c6, 0x147ee,
0x1e850, 0x1f42c, 0x1e848, 0x1f426, 0x1e844, 0x1e842,
0x1d0d0, 0x1e86c, 0x1d0c8, 0x1e866, 0x1d0c4, 0x1d0c2,
0x1a1d0, 0x1d0ec, 0x1a1c8, 0x1d0e6, 0x1a1c4, 0x1a1c2,
0x143d0, 0x1a1ec, 0x143c8, 0x1a1e6, 0x143c4, 0x143c2,
0x143ec, 0x143e6, 0x1e828, 0x1f416, 0x1e824, 0x1e822,
0x1d068, 0x1e836, 0x1d064, 0x1d062, 0x1a0e8, 0x1d076,
0x1a0e4, 0x1a0e2, 0x141e8, 0x1a0f6, 0x141e4, 0x141e2,
0x1e814, 0x1e812, 0x1d034, 0x1d032, 0x1a074, 0x1a072,
0x1e540, 0x1f2b0, 0x1f95c, 0x1e520, 0x1f298, 0x1f94e,
0x1e510, 0x1f28c, 0x1e508, 0x1f286, 0x1e504, 0x1e502,
0x1cb40, 0x1e5b0, 0x1f2dc, 0x1cb20, 0x1e598, 0x1f2ce,
0x1cb10, 0x1e58c, 0x1cb08, 0x1e586, 0x1cb04, 0x1cb02,
0x19740, 0x1cbb0, 0x1e5dc, 0x19720, 0x1cb98, 0x1e5ce,
0x19710, 0x1cb8c, 0x19708, 0x1cb86, 0x19704, 0x19702,
0x12f40, 0x197b0, 0x1cbdc, 0x12f20, 0x19798, 0x1cbce,
0x12f10, 0x1978c, 0x12f08, 0x19786, 0x12f04, 0x12fb0,
0x197dc, 0x12f98, 0x197ce, 0x12f8c, 0x12f86, 0x12fdc,
0x12fce, 0x1f6a0, 0x1fb58, 0x16bf0, 0x1f690, 0x1fb4c,
0x169f8, 0x1f688, 0x1fb46, 0x168fc, 0x1f684, 0x1f682,
0x1e4a0, 0x1f258, 0x1f92e, 0x1eda0, 0x1e490, 0x1fb6e,
0x1ed90, 0x1f6cc, 0x1f246, 0x1ed88, 0x1e484, 0x1ed84,
0x1e482, 0x1ed82, 0x1c9a0, 0x1e4d8, 0x1f26e, 0x1dba0,
0x1c990, 0x1e4cc, 0x1db90, 0x1edcc, 0x1e4c6, 0x1db88,
0x1c984, 0x1db84, 0x1c982, 0x1db82, 0x193a0, 0x1c9d8,
0x1e4ee, 0x1b7a0, 0x19390, 0x1c9cc, 0x1b790, 0x1dbcc,
0x1c9c6, 0x1b788, 0x19384, 0x1b784, 0x19382, 0x1b782,
0x127a0, 0x193d8, 0x1c9ee, 0x16fa0, 0x12790, 0x193cc,
0x16f90, 0x1b7cc, 0x193c6, 0x16f88, 0x12784, 0x16f84,
0x12782, 0x127d8, 0x193ee, 0x16fd8, 0x127cc, 0x16fcc,
0x127c6, 0x16fc6, 0x127ee, 0x1f650, 0x1fb2c, 0x165f8,
0x1f648, 0x1fb26, 0x164fc, 0x1f644, 0x1647e, 0x1f642,
0x1e450, 0x1f22c, 0x1ecd0, 0x1e448, 0x1f226, 0x1ecc8,
0x1f666, 0x1ecc4, 0x1e442, 0x1ecc2, 0x1c8d0, 0x1e46c,
0x1d9d0, 0x1c8c8, 0x1e466, 0x1d9c8, 0x1ece6, 0x1d9c4,
0x1c8c2, 0x1d9c2, 0x191d0, 0x1c8ec, 0x1b3d0, 0x191c8,
0x1c8e6, 0x1b3c8, 0x1d9e6, 0x1b3c4, 0x191c2, 0x1b3c2,
0x123d0, 0x191ec, 0x167d0, 0x123c8, 0x191e6, 0x167c8,
0x1b3e6, 0x167c4, 0x123c2, 0x167c2, 0x123ec, 0x167ec,
0x123e6, 0x167e6, 0x1f628, 0x1fb16, 0x162fc, 0x1f624,
0x1627e, 0x1f622, 0x1e428, 0x1f216, 0x1ec68, 0x1f636,
0x1ec64, 0x1e422, 0x1ec62, 0x1c868, 0x1e436, 0x1d8e8,
0x1c864, 0x1d8e4, 0x1c862, 0x1d8e2, 0x190e8, 0x1c876,
0x1b1e8, 0x1d8f6, 0x1b1e4, 0x190e2, 0x1b1e2, 0x121e8,
0x190f6, 0x163e8, 0x121e4, 0x163e4, 0x121e2, 0x163e2,
0x121f6, 0x163f6, 0x1f614, 0x1617e, 0x1f612, 0x1e414,
0x1ec34, 0x1e412, 0x1ec32, 0x1c834, 0x1d874, 0x1c832,
0x1d872, 0x19074, 0x1b0f4, 0x19072, 0x1b0f2, 0x120f4,
0x161f4, 0x120f2, 0x161f2, 0x1f60a, 0x1e40a, 0x1ec1a,
0x1c81a, 0x1d83a, 0x1903a, 0x1b07a, 0x1e2a0, 0x1f158,
0x1f8ae, 0x1e290, 0x1f14c, 0x1e288, 0x1f146, 0x1e284,
0x1e282, 0x1c5a0, 0x1e2d8, 0x1f16e, 0x1c590, 0x1e2cc,
0x1c588, 0x1e2c6, 0x1c584, 0x1c582, 0x18ba0, 0x1c5d8,
0x1e2ee, 0x18b90, 0x1c5cc, 0x18b88, 0x1c5c6, 0x18b84,
0x18b82, 0x117a0, 0x18bd8, 0x1c5ee, 0x11790, 0x18bcc,
0x11788, 0x18bc6, 0x11784, 0x11782, 0x117d8, 0x18bee,
0x117cc, 0x117c6, 0x117ee, 0x1f350, 0x1f9ac, 0x135f8,
0x1f348, 0x1f9a6, 0x134fc, 0x1f344, 0x1347e, 0x1f342,
0x1e250, 0x1f12c, 0x1e6d0, 0x1e248, 0x1f126, 0x1e6c8,
0x1f366, 0x1e6c4, 0x1e242, 0x1e6c2, 0x1c4d0, 0x1e26c,
0x1cdd0, 0x1c4c8, 0x1e266, 0x1cdc8, 0x1e6e6, 0x1cdc4,
0x1c4c2, 0x1cdc2, 0x189d0, 0x1c4ec, 0x19bd0, 0x189c8,
0x1c4e6, 0x19bc8, 0x1cde6, 0x19bc4, 0x189c2, 0x19bc2,
0x113d0, 0x189ec, 0x137d0, 0x113c8, 0x189e6, 0x137c8,
0x19be6, 0x137c4, 0x113c2, 0x137c2, 0x113ec, 0x137ec,
0x113e6, 0x137e6, 0x1fba8, 0x175f0, 0x1bafc, 0x1fba4,
0x174f8, 0x1ba7e, 0x1fba2, 0x1747c, 0x1743e, 0x1f328,
0x1f996, 0x132fc, 0x1f768, 0x1fbb6, 0x176fc, 0x1327e,
0x1f764, 0x1f322, 0x1767e, 0x1f762, 0x1e228, 0x1f116,
0x1e668, 0x1e224, 0x1eee8, 0x1f776, 0x1e222, 0x1eee4,
0x1e662, 0x1eee2, 0x1c468, 0x1e236, 0x1cce8, 0x1c464,
0x1dde8, 0x1cce4, 0x1c462, 0x1dde4, 0x1cce2, 0x1dde2,
0x188e8, 0x1c476, 0x199e8, 0x188e4, 0x1bbe8, 0x199e4,
0x188e2, 0x1bbe4, 0x199e2, 0x1bbe2, 0x111e8, 0x188f6,
0x133e8, 0x111e4, 0x177e8, 0x133e4, 0x111e2, 0x177e4,
0x133e2, 0x177e2, 0x111f6, 0x133f6, 0x1fb94, 0x172f8,
0x1b97e, 0x1fb92, 0x1727c, 0x1723e, 0x1f314, 0x1317e,
0x1f734, 0x1f312, 0x1737e, 0x1f732, 0x1e214, 0x1e634,
0x1e212, 0x1ee74, 0x1e632, 0x1ee72, 0x1c434, 0x1cc74,
0x1c432, 0x1dcf4, 0x1cc72, 0x1dcf2, 0x18874, 0x198f4,
0x18872, 0x1b9f4, 0x198f2, 0x1b9f2, 0x110f4, 0x131f4,
0x110f2, 0x173f4, 0x131f2, 0x173f2, 0x1fb8a, 0x1717c,
0x1713e, 0x1f30a, 0x1f71a, 0x1e20a, 0x1e61a, 0x1ee3a,
0x1c41a, 0x1cc3a, 0x1dc7a, 0x1883a, 0x1987a, 0x1b8fa,
0x1107a, 0x130fa, 0x171fa, 0x170be, 0x1e150, 0x1f0ac,
0x1e148, 0x1f0a6, 0x1e144, 0x1e142, 0x1c2d0, 0x1e16c,
0x1c2c8, 0x1e166, 0x1c2c4, 0x1c2c2, 0x185d0, 0x1c2ec,
0x185c8, 0x1c2e6, 0x185c4, 0x185c2, 0x10bd0, 0x185ec,
0x10bc8, 0x185e6, 0x10bc4, 0x10bc2, 0x10bec, 0x10be6,
0x1f1a8, 0x1f8d6, 0x11afc, 0x1f1a4, 0x11a7e, 0x1f1a2,
0x1e128, 0x1f096, 0x1e368, 0x1e124, 0x1e364, 0x1e122,
0x1e362, 0x1c268, 0x1e136, 0x1c6e8, 0x1c264, 0x1c6e4,
0x1c262, 0x1c6e2, 0x184e8, 0x1c276, 0x18de8, 0x184e4,
0x18de4, 0x184e2, 0x18de2, 0x109e8, 0x184f6, 0x11be8,
0x109e4, 0x11be4, 0x109e2, 0x11be2, 0x109f6, 0x11bf6,
0x1f9d4, 0x13af8, 0x19d7e, 0x1f9d2, 0x13a7c, 0x13a3e,
0x1f194, 0x1197e, 0x1f3b4, 0x1f192, 0x13b7e, 0x1f3b2,
0x1e114, 0x1e334, 0x1e112, 0x1e774, 0x1e332, 0x1e772,
0x1c234, 0x1c674, 0x1c232, 0x1cef4, 0x1c672, 0x1cef2,
0x18474, 0x18cf4, 0x18472, 0x19df4, 0x18cf2, 0x19df2,
0x108f4, 0x119f4, 0x108f2, 0x13bf4, 0x119f2, 0x13bf2,
0x17af0, 0x1bd7c, 0x17a78, 0x1bd3e, 0x17a3c, 0x17a1e,
0x1f9ca, 0x1397c, 0x1fbda, 0x17b7c, 0x1393e, 0x17b3e,
0x1f18a, 0x1f39a, 0x1f7ba, 0x1e10a, 0x1e31a, 0x1e73a,
0x1ef7a, 0x1c21a, 0x1c63a, 0x1ce7a, 0x1defa, 0x1843a,
0x18c7a, 0x19cfa, 0x1bdfa, 0x1087a, 0x118fa, 0x139fa,
0x17978, 0x1bcbe, 0x1793c, 0x1791e, 0x138be, 0x179be,
0x178bc, 0x1789e, 0x1785e, 0x1e0a8, 0x1e0a4, 0x1e0a2,
0x1c168, 0x1e0b6, 0x1c164, 0x1c162, 0x182e8, 0x1c176,
0x182e4, 0x182e2, 0x105e8, 0x182f6, 0x105e4, 0x105e2,
0x105f6, 0x1f0d4, 0x10d7e, 0x1f0d2, 0x1e094, 0x1e1b4,
0x1e092, 0x1e1b2, 0x1c134, 0x1c374, 0x1c132, 0x1c372,
0x18274, 0x186f4, 0x18272, 0x186f2, 0x104f4, 0x10df4,
0x104f2, 0x10df2, 0x1f8ea, 0x11d7c, 0x11d3e, 0x1f0ca,
0x1f1da, 0x1e08a, 0x1e19a, 0x1e3ba, 0x1c11a, 0x1c33a,
0x1c77a, 0x1823a, 0x1867a, 0x18efa, 0x1047a, 0x10cfa,
0x11dfa, 0x13d78, 0x19ebe, 0x13d3c, 0x13d1e, 0x11cbe,
0x13dbe, 0x17d70, 0x1bebc, 0x17d38, 0x1be9e, 0x17d1c,
0x17d0e, 0x13cbc, 0x17dbc, 0x13c9e, 0x17d9e, 0x17cb8,
0x1be5e, 0x17c9c, 0x17c8e, 0x13c5e, 0x17cde, 0x17c5c,
0x17c4e, 0x17c2e, 0x1c0b4, 0x1c0b2, 0x18174, 0x18172,
0x102f4, 0x102f2, 0x1e0da, 0x1c09a, 0x1c1ba, 0x1813a,
0x1837a, 0x1027a, 0x106fa, 0x10ebe, 0x11ebc, 0x11e9e,
0x13eb8, 0x19f5e, 0x13e9c, 0x13e8e, 0x11e5e, 0x13ede,
0x17eb0, 0x1bf5c, 0x17e98, 0x1bf4e, 0x17e8c, 0x17e86,
0x13e5c, 0x17edc, 0x13e4e, 0x17ece, 0x17e58, 0x1bf2e,
0x17e4c, 0x17e46, 0x13e2e, 0x17e6e, 0x17e2c, 0x17e26,
0x10f5e, 0x11f5c, 0x11f4e, 0x13f58, 0x19fae, 0x13f4c,
0x13f46, 0x11f2e, 0x13f6e, 0x13f2c, 0x13f26},
{0x1abe0, 0x1d5f8, 0x153c0, 0x1a9f0, 0x1d4fc, 0x151e0,
0x1a8f8, 0x1d47e, 0x150f0, 0x1a87c, 0x15078, 0x1fad0,
0x15be0, 0x1adf8, 0x1fac8, 0x159f0, 0x1acfc, 0x1fac4,
0x158f8, 0x1ac7e, 0x1fac2, 0x1587c, 0x1f5d0, 0x1faec,
0x15df8, 0x1f5c8, 0x1fae6, 0x15cfc, 0x1f5c4, 0x15c7e,
0x1f5c2, 0x1ebd0, 0x1f5ec, 0x1ebc8, 0x1f5e6, 0x1ebc4,
0x1ebc2, 0x1d7d0, 0x1ebec, 0x1d7c8, 0x1ebe6, 0x1d7c4,
0x1d7c2, 0x1afd0, 0x1d7ec, 0x1afc8, 0x1d7e6, 0x1afc4,
0x14bc0, 0x1a5f0, 0x1d2fc, 0x149e0, 0x1a4f8, 0x1d27e,
0x148f0, 0x1a47c, 0x14878, 0x1a43e, 0x1483c, 0x1fa68,
0x14df0, 0x1a6fc, 0x1fa64, 0x14cf8, 0x1a67e, 0x1fa62,
0x14c7c, 0x14c3e, 0x1f4e8, 0x1fa76, 0x14efc, 0x1f4e4,
0x14e7e, 0x1f4e2, 0x1e9e8, 0x1f4f6, 0x1e9e4, 0x1e9e2,
0x1d3e8, 0x1e9f6, 0x1d3e4, 0x1d3e2, 0x1a7e8, 0x1d3f6,
0x1a7e4, 0x1a7e2, 0x145e0, 0x1a2f8, 0x1d17e, 0x144f0,
0x1a27c, 0x14478, 0x1a23e, 0x1443c, 0x1441e, 0x1fa34,
0x146f8, 0x1a37e, 0x1fa32, 0x1467c, 0x1463e, 0x1f474,
0x1477e, 0x1f472, 0x1e8f4, 0x1e8f2, 0x1d1f4, 0x1d1f2,
0x1a3f4, 0x1a3f2, 0x142f0, 0x1a17c, 0x14278, 0x1a13e,
0x1423c, 0x1421e, 0x1fa1a, 0x1437c, 0x1433e, 0x1f43a,
0x1e87a, 0x1d0fa, 0x14178, 0x1a0be, 0x1413c, 0x1411e,
0x141be, 0x140bc, 0x1409e, 0x12bc0, 0x195f0, 0x1cafc,
0x129e0, 0x194f8, 0x1ca7e, 0x128f0, 0x1947c, 0x12878,
0x1943e, 0x1283c, 0x1f968, 0x12df0, 0x196fc, 0x1f964,
0x12cf8, 0x1967e, 0x1f962, 0x12c7c, 0x12c3e, 0x1f2e8,
0x1f976, 0x12efc, 0x1f2e4, 0x12e7e, 0x1f2e2, 0x1e5e8,
0x1f2f6, 0x1e5e4, 0x1e5e2, 0x1cbe8, 0x1e5f6, 0x1cbe4,
0x1cbe2, 0x197e8, 0x1cbf6, 0x197e4, 0x197e2, 0x1b5e0,
0x1daf8, 0x1ed7e, 0x169c0, 0x1b4f0, 0x1da7c, 0x168e0,
0x1b478, 0x1da3e, 0x16870, 0x1b43c, 0x16838, 0x1b41e,
0x1681c, 0x125e0, 0x192f8, 0x1c97e, 0x16de0, 0x124f0,
0x1927c, 0x16cf0, 0x1b67c, 0x1923e, 0x16c78, 0x1243c,
0x16c3c, 0x1241e, 0x16c1e, 0x1f934, 0x126f8, 0x1937e,
0x1fb74, 0x1f932, 0x16ef8, 0x1267c, 0x1fb72, 0x16e7c,
0x1263e, 0x16e3e, 0x1f274, 0x1277e, 0x1f6f4, 0x1f272,
0x16f7e, 0x1f6f2, 0x1e4f4, 0x1edf4, 0x1e4f2, 0x1edf2,
0x1c9f4, 0x1dbf4, 0x1c9f2, 0x1dbf2, 0x193f4, 0x193f2,
0x165c0, 0x1b2f0, 0x1d97c, 0x164e0, 0x1b278, 0x1d93e,
0x16470, 0x1b23c, 0x16438, 0x1b21e, 0x1641c, 0x1640e,
0x122f0, 0x1917c, 0x166f0, 0x12278, 0x1913e, 0x16678,
0x1b33e, 0x1663c, 0x1221e, 0x1661e, 0x1f91a, 0x1237c,
0x1fb3a, 0x1677c, 0x1233e, 0x1673e, 0x1f23a, 0x1f67a,
0x1e47a, 0x1ecfa, 0x1c8fa, 0x1d9fa, 0x191fa, 0x162e0,
0x1b178, 0x1d8be, 0x16270, 0x1b13c, 0x16238, 0x1b11e,
0x1621c, 0x1620e, 0x12178, 0x190be, 0x16378, 0x1213c,
0x1633c, 0x1211e, 0x1631e, 0x121be, 0x163be, 0x16170,
0x1b0bc, 0x16138, 0x1b09e, 0x1611c, 0x1610e, 0x120bc,
0x161bc, 0x1209e, 0x1619e, 0x160b8, 0x1b05e, 0x1609c,
0x1608e, 0x1205e, 0x160de, 0x1605c, 0x1604e, 0x115e0,
0x18af8, 0x1c57e, 0x114f0, 0x18a7c, 0x11478, 0x18a3e,
0x1143c, 0x1141e, 0x1f8b4, 0x116f8, 0x18b7e, 0x1f8b2,
0x1167c, 0x1163e, 0x1f174, 0x1177e, 0x1f172, 0x1e2f4,
0x1e2f2, 0x1c5f4, 0x1c5f2, 0x18bf4, 0x18bf2, 0x135c0,
0x19af0, 0x1cd7c, 0x134e0, 0x19a78, 0x1cd3e, 0x13470,
0x19a3c, 0x13438, 0x19a1e, 0x1341c, 0x1340e, 0x112f0,
0x1897c, 0x136f0, 0x11278, 0x1893e, 0x13678, 0x19b3e,
0x1363c, 0x1121e, 0x1361e, 0x1f89a, 0x1137c, 0x1f9ba,
0x1377c, 0x1133e, 0x1373e, 0x1f13a, 0x1f37a, 0x1e27a,
0x1e6fa, 0x1c4fa, 0x1cdfa, 0x189fa, 0x1bae0, 0x1dd78,
0x1eebe, 0x174c0, 0x1ba70, 0x1dd3c, 0x17460, 0x1ba38,
0x1dd1e, 0x17430, 0x1ba1c, 0x17418, 0x1ba0e, 0x1740c,
0x132e0, 0x19978, 0x1ccbe, 0x176e0, 0x13270, 0x1993c,
0x17670, 0x1bb3c, 0x1991e, 0x17638, 0x1321c, 0x1761c,
0x1320e, 0x1760e, 0x11178, 0x188be, 0x13378, 0x1113c,
0x17778, 0x1333c, 0x1111e, 0x1773c, 0x1331e, 0x1771e,
0x111be, 0x133be, 0x177be, 0x172c0, 0x1b970, 0x1dcbc,
0x17260, 0x1b938, 0x1dc9e, 0x17230, 0x1b91c, 0x17218,
0x1b90e, 0x1720c, 0x17206, 0x13170, 0x198bc, 0x17370,
0x13138, 0x1989e, 0x17338, 0x1b99e, 0x1731c, 0x1310e,
0x1730e, 0x110bc, 0x131bc, 0x1109e, 0x173bc, 0x1319e,
0x1739e, 0x17160, 0x1b8b8, 0x1dc5e, 0x17130, 0x1b89c,
0x17118, 0x1b88e, 0x1710c, 0x17106, 0x130b8, 0x1985e,
0x171b8, 0x1309c, 0x1719c, 0x1308e, 0x1718e, 0x1105e,
0x130de, 0x171de, 0x170b0, 0x1b85c, 0x17098, 0x1b84e,
0x1708c, 0x17086, 0x1305c, 0x170dc, 0x1304e, 0x170ce,
0x17058, 0x1b82e, 0x1704c, 0x17046, 0x1302e, 0x1706e,
0x1702c, 0x17026, 0x10af0, 0x1857c, 0x10a78, 0x1853e,
0x10a3c, 0x10a1e, 0x10b7c, 0x10b3e, 0x1f0ba, 0x1e17a,
0x1c2fa, 0x185fa, 0x11ae0, 0x18d78, 0x1c6be, 0x11a70,
0x18d3c, 0x11a38, 0x18d1e, 0x11a1c, 0x11a0e, 0x10978,
0x184be, 0x11b78, 0x1093c, 0x11b3c, 0x1091e, 0x11b1e,
0x109be, 0x11bbe, 0x13ac0, 0x19d70, 0x1cebc, 0x13a60,
0x19d38, 0x1ce9e, 0x13a30, 0x19d1c, 0x13a18, 0x19d0e,
0x13a0c, 0x13a06, 0x11970, 0x18cbc, 0x13b70, 0x11938,
0x18c9e, 0x13b38, 0x1191c, 0x13b1c, 0x1190e, 0x13b0e,
0x108bc, 0x119bc, 0x1089e, 0x13bbc, 0x1199e, 0x13b9e,
0x1bd60, 0x1deb8, 0x1ef5e, 0x17a40, 0x1bd30, 0x1de9c,
0x17a20, 0x1bd18, 0x1de8e, 0x17a10, 0x1bd0c, 0x17a08,
0x1bd06, 0x17a04, 0x13960, 0x19cb8, 0x1ce5e, 0x17b60,
0x13930, 0x19c9c, 0x17b30, 0x1bd9c, 0x19c8e, 0x17b18,
0x1390c, 0x17b0c, 0x13906, 0x17b06, 0x118b8, 0x18c5e,
0x139b8, 0x1189c, 0x17bb8, 0x1399c, 0x1188e, 0x17b9c,
0x1398e, 0x17b8e, 0x1085e, 0x118de, 0x139de, 0x17bde,
0x17940, 0x1bcb0, 0x1de5c, 0x17920, 0x1bc98, 0x1de4e,
0x17910, 0x1bc8c, 0x17908, 0x1bc86, 0x17904, 0x17902,
0x138b0, 0x19c5c, 0x179b0, 0x13898, 0x19c4e, 0x17998,
0x1bcce, 0x1798c, 0x13886, 0x17986, 0x1185c, 0x138dc,
0x1184e, 0x179dc, 0x138ce, 0x179ce, 0x178a0, 0x1bc58,
0x1de2e, 0x17890, 0x1bc4c, 0x17888, 0x1bc46, 0x17884,
0x17882, 0x13858, 0x19c2e, 0x178d8, 0x1384c, 0x178cc,
0x13846, 0x178c6, 0x1182e, 0x1386e, 0x178ee, 0x17850,
0x1bc2c, 0x17848, 0x1bc26, 0x17844, 0x17842, 0x1382c,
0x1786c, 0x13826, 0x17866, 0x17828, 0x1bc16, 0x17824,
0x17822, 0x13816, 0x17836, 0x10578, 0x182be, 0x1053c,
0x1051e, 0x105be, 0x10d70, 0x186bc, 0x10d38, 0x1869e,
0x10d1c, 0x10d0e, 0x104bc, 0x10dbc, 0x1049e, 0x10d9e,
0x11d60, 0x18eb8, 0x1c75e, 0x11d30, 0x18e9c, 0x11d18,
0x18e8e, 0x11d0c, 0x11d06, 0x10cb8, 0x1865e, 0x11db8,
0x10c9c, 0x11d9c, 0x10c8e, 0x11d8e, 0x1045e, 0x10cde,
0x11dde, 0x13d40, 0x19eb0, 0x1cf5c, 0x13d20, 0x19e98,
0x1cf4e, 0x13d10, 0x19e8c, 0x13d08, 0x19e86, 0x13d04,
0x13d02, 0x11cb0, 0x18e5c, 0x13db0, 0x11c98, 0x18e4e,
0x13d98, 0x19ece, 0x13d8c, 0x11c86, 0x13d86, 0x10c5c,
0x11cdc, 0x10c4e, 0x13ddc, 0x11cce, 0x13dce, 0x1bea0,
0x1df58, 0x1efae, 0x1be90, 0x1df4c, 0x1be88, 0x1df46,
0x1be84, 0x1be82, 0x13ca0, 0x19e58, 0x1cf2e, 0x17da0,
0x13c90, 0x19e4c, 0x17d90, 0x1becc, 0x19e46, 0x17d88,
0x13c84, 0x17d84, 0x13c82, 0x17d82, 0x11c58, 0x18e2e,
0x13cd8, 0x11c4c, 0x17dd8, 0x13ccc, 0x11c46, 0x17dcc,
0x13cc6, 0x17dc6, 0x10c2e, 0x11c6e, 0x13cee, 0x17dee,
0x1be50, 0x1df2c, 0x1be48, 0x1df26, 0x1be44, 0x1be42,
0x13c50, 0x19e2c, 0x17cd0, 0x13c48, 0x19e26, 0x17cc8,
0x1be66, 0x17cc4, 0x13c42, 0x17cc2, 0x11c2c, 0x13c6c,
0x11c26, 0x17cec, 0x13c66, 0x17ce6, 0x1be28, 0x1df16,
0x1be24, 0x1be22, 0x13c28, 0x19e16, 0x17c68, 0x13c24,
0x17c64, 0x13c22, 0x17c62, 0x11c16, 0x13c36, 0x17c76,
0x1be14, 0x1be12, 0x13c14, 0x17c34, 0x13c12, 0x17c32,
0x102bc, 0x1029e, 0x106b8, 0x1835e, 0x1069c, 0x1068e,
0x1025e, 0x106de, 0x10eb0, 0x1875c, 0x10e98, 0x1874e,
0x10e8c, 0x10e86, 0x1065c, 0x10edc, 0x1064e, 0x10ece,
0x11ea0, 0x18f58, 0x1c7ae, 0x11e90, 0x18f4c, 0x11e88,
0x18f46, 0x11e84, 0x11e82, 0x10e58, 0x1872e, 0x11ed8,
0x18f6e, 0x11ecc, 0x10e46, 0x11ec6, 0x1062e, 0x10e6e,
0x11eee, 0x19f50, 0x1cfac, 0x19f48, 0x1cfa6, 0x19f44,
0x19f42, 0x11e50, 0x18f2c, 0x13ed0, 0x19f6c, 0x18f26,
0x13ec8, 0x11e44, 0x13ec4, 0x11e42, 0x13ec2, 0x10e2c,
0x11e6c, 0x10e26, 0x13eec, 0x11e66, 0x13ee6, 0x1dfa8,
0x1efd6, 0x1dfa4, 0x1dfa2, 0x19f28, 0x1cf96, 0x1bf68,
0x19f24, 0x1bf64, 0x19f22, 0x1bf62, 0x11e28, 0x18f16,
0x13e68, 0x11e24, 0x17ee8, 0x13e64, 0x11e22, 0x17ee4,
0x13e62, 0x17ee2, 0x10e16, 0x11e36, 0x13e76, 0x17ef6,
0x1df94, 0x1df92, 0x19f14, 0x1bf34, 0x19f12, 0x1bf32,
0x11e14, 0x13e34, 0x11e12, 0x17e74, 0x13e32, 0x17e72,
0x1df8a, 0x19f0a, 0x1bf1a, 0x11e0a, 0x13e1a, 0x17e3a,
0x1035c, 0x1034e, 0x10758, 0x183ae, 0x1074c, 0x10746,
0x1032e, 0x1076e, 0x10f50, 0x187ac, 0x10f48, 0x187a6,
0x10f44, 0x10f42, 0x1072c, 0x10f6c, 0x10726, 0x10f66,
0x18fa8, 0x1c7d6, 0x18fa4, 0x18fa2, 0x10f28, 0x18796,
0x11f68, 0x18fb6, 0x11f64, 0x10f22, 0x11f62, 0x10716,
0x10f36, 0x11f76, 0x1cfd4, 0x1cfd2, 0x18f94, 0x19fb4,
0x18f92, 0x19fb2, 0x10f14, 0x11f34, 0x10f12, 0x13f74,
0x11f32, 0x13f72, 0x1cfca, 0x18f8a, 0x19f9a, 0x10f0a,
0x11f1a, 0x13f3a, 0x103ac, 0x103a6, 0x107a8, 0x183d6,
0x107a4, 0x107a2, 0x10396, 0x107b6, 0x187d4, 0x187d2,
0x10794, 0x10fb4, 0x10792, 0x10fb2, 0x1c7ea}};
private static final float PREFERRED_RATIO = 3.0f;
private static final float DEFAULT_MODULE_WIDTH = 0.357f; //1px in mm
private static final float HEIGHT = 2.0f; //mm
private BarcodeMatrix barcodeMatrix;
private boolean compact;
private Compaction compaction;
private Charset encoding;
private int minCols;
private int maxCols;
private int maxRows;
private int minRows;
public PDF417() {
this(false);
}
public PDF417(boolean compact) {
this.compact = compact;
compaction = Compaction.AUTO;
encoding = null; // Use default
minCols = 2;
maxCols = 30;
maxRows = 30;
minRows = 2;
}
public BarcodeMatrix getBarcodeMatrix() {
return barcodeMatrix;
}
/**
* Calculates the necessary number of rows as described in annex Q of ISO/IEC 15438:2001(E).
*
* @param m the number of source codewords prior to the additional of the Symbol Length
* Descriptor and any pad codewords
* @param k the number of error correction codewords
* @param c the number of columns in the symbol in the data region (excluding start, stop and
* row indicator codewords)
* @return the number of rows in the symbol (r)
*/
private static int calculateNumberOfRows(int m, int k, int c) {
int r = ((m + 1 + k) / c) + 1;
if (c * r >= (m + 1 + k + c)) {
r--;
}
return r;
}
/**
* Calculates the number of pad codewords as described in 4.9.2 of ISO/IEC 15438:2001(E).
*
* @param m the number of source codewords prior to the additional of the Symbol Length
* Descriptor and any pad codewords
* @param k the number of error correction codewords
* @param c the number of columns in the symbol in the data region (excluding start, stop and
* row indicator codewords)
* @param r the number of rows in the symbol
* @return the number of pad codewords
*/
private static int getNumberOfPadCodewords(int m, int k, int c, int r) {
int n = c * r - k;
return n > m + 1 ? n - m - 1 : 0;
}
private static void encodeChar(int pattern, int len, BarcodeRow logic) {
int map = 1 << len - 1;
boolean last = (pattern & map) != 0; //Initialize to inverse of first bit
int width = 0;
for (int i = 0; i < len; i++) {
boolean black = (pattern & map) != 0;
if (last == black) {
width++;
} else {
logic.addBar(last, width);
last = black;
width = 1;
}
map >>= 1;
}
logic.addBar(last, width);
}
private void encodeLowLevel(CharSequence fullCodewords,
int c,
int r,
int errorCorrectionLevel,
BarcodeMatrix logic) {
int idx = 0;
for (int y = 0; y < r; y++) {
int cluster = y % 3;
logic.startRow();
encodeChar(START_PATTERN, 17, logic.getCurrentRow());
int left;
int right;
if (cluster == 0) {
left = (30 * (y / 3)) + ((r - 1) / 3);
right = (30 * (y / 3)) + (c - 1);
} else if (cluster == 1) {
left = (30 * (y / 3)) + (errorCorrectionLevel * 3) + ((r - 1) % 3);
right = (30 * (y / 3)) + ((r - 1) / 3);
} else {
left = (30 * (y / 3)) + (c - 1);
right = (30 * (y / 3)) + (errorCorrectionLevel * 3) + ((r - 1) % 3);
}
int pattern = CODEWORD_TABLE[cluster][left];
encodeChar(pattern, 17, logic.getCurrentRow());
for (int x = 0; x < c; x++) {
pattern = CODEWORD_TABLE[cluster][fullCodewords.charAt(idx)];
encodeChar(pattern, 17, logic.getCurrentRow());
idx++;
}
if (compact) {
encodeChar(STOP_PATTERN, 1, logic.getCurrentRow()); // encodes stop line for compact pdf417
} else {
pattern = CODEWORD_TABLE[cluster][right];
encodeChar(pattern, 17, logic.getCurrentRow());
encodeChar(STOP_PATTERN, 18, logic.getCurrentRow());
}
}
}
/**
* @param msg message to encode
* @param errorCorrectionLevel PDF417 error correction level to use
* @throws WriterException if the contents cannot be encoded in this format
*/
public void generateBarcodeLogic(String msg, int errorCorrectionLevel) throws WriterException {
//1. step: High-level encoding
int errorCorrectionCodeWords = PDF417ErrorCorrection.getErrorCorrectionCodewordCount(errorCorrectionLevel);
String highLevel = PDF417HighLevelEncoder.encodeHighLevel(msg, compaction, encoding);
int sourceCodeWords = highLevel.length();
int[] dimension = determineDimensions(sourceCodeWords, errorCorrectionCodeWords);
int cols = dimension[0];
int rows = dimension[1];
int pad = getNumberOfPadCodewords(sourceCodeWords, errorCorrectionCodeWords, cols, rows);
//2. step: construct data codewords
if (sourceCodeWords + errorCorrectionCodeWords + 1 > 929) { // +1 for symbol length CW
throw new WriterException(
"Encoded message contains too many code words, message too big (" + msg.length() + " bytes)");
}
int n = sourceCodeWords + pad + 1;
StringBuilder sb = new StringBuilder(n);
sb.append((char) n);
sb.append(highLevel);
for (int i = 0; i < pad; i++) {
sb.append((char) 900); //PAD characters
}
String dataCodewords = sb.toString();
//3. step: Error correction
String ec = PDF417ErrorCorrection.generateErrorCorrection(dataCodewords, errorCorrectionLevel);
//4. step: low-level encoding
barcodeMatrix = new BarcodeMatrix(rows, cols);
encodeLowLevel(dataCodewords + ec, cols, rows, errorCorrectionLevel, barcodeMatrix);
}
/**
* Determine optimal nr of columns and rows for the specified number of
* codewords.
*
* @param sourceCodeWords number of code words
* @param errorCorrectionCodeWords number of error correction code words
* @return dimension object containing cols as width and rows as height
*/
private int[] determineDimensions(int sourceCodeWords, int errorCorrectionCodeWords) throws WriterException {
float ratio = 0.0f;
int[] dimension = null;
for (int cols = minCols; cols <= maxCols; cols++) {
int rows = calculateNumberOfRows(sourceCodeWords, errorCorrectionCodeWords, cols);
if (rows < minRows) {
break;
}
if (rows > maxRows) {
continue;
}
float newRatio = ((float) (17 * cols + 69) * DEFAULT_MODULE_WIDTH) / (rows * HEIGHT);
// ignore if previous ratio is closer to preferred ratio
if (dimension != null && Math.abs(newRatio - PREFERRED_RATIO) > Math.abs(ratio - PREFERRED_RATIO)) {
continue;
}
ratio = newRatio;
dimension = new int[] {cols, rows};
}
// Handle case when min values were larger than necessary
if (dimension == null) {
int rows = calculateNumberOfRows(sourceCodeWords, errorCorrectionCodeWords, minCols);
if (rows < minRows) {
dimension = new int[]{minCols, minRows};
}
}
if (dimension == null) {
throw new WriterException("Unable to fit message in columns");
}
return dimension;
}
/**
* Sets max/min row/col values
*
* @param maxCols maximum allowed columns
* @param minCols minimum allowed columns
* @param maxRows maximum allowed rows
* @param minRows minimum allowed rows
*/
public void setDimensions(int maxCols, int minCols, int maxRows, int minRows) {
this.maxCols = maxCols;
this.minCols = minCols;
this.maxRows = maxRows;
this.minRows = minRows;
}
/**
* @param compaction compaction mode to use
*/
public void setCompaction(Compaction compaction) {
this.compaction = compaction;
}
/**
* @param compact if true, enables compaction
*/
public void setCompact(boolean compact) {
this.compact = compact;
}
/**
* @param encoding sets character encoding to use
*/
public void setEncoding(Charset encoding) {
this.encoding = encoding;
}
}
| |
/*******************************************************************************
* Copyright Duke Comprehensive Cancer Center and SemanticBits
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/c3pr/LICENSE.txt for details.
******************************************************************************/
/**
*
*/
package edu.duke.cabig.c3pr.webservice.integration;
import static edu.duke.cabig.c3pr.webservice.integration.ISO21090Helper.BL;
import static edu.duke.cabig.c3pr.webservice.integration.ISO21090Helper.CD;
import static edu.duke.cabig.c3pr.webservice.integration.ISO21090Helper.II;
import static edu.duke.cabig.c3pr.webservice.integration.ISO21090Helper.IVLTSDateTime;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.namespace.QName;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang.time.DateUtils;
import edu.duke.cabig.c3pr.webservice.common.AdvanceSearchCriterionParameter;
import edu.duke.cabig.c3pr.webservice.common.BiologicEntityIdentifier;
import edu.duke.cabig.c3pr.webservice.common.Consent;
import edu.duke.cabig.c3pr.webservice.common.DSETAdvanceSearchCriterionParameter;
import edu.duke.cabig.c3pr.webservice.common.DSETPerformedStudySubjectMilestone;
import edu.duke.cabig.c3pr.webservice.common.DSETPerson;
import edu.duke.cabig.c3pr.webservice.common.DSETStudySubjectConsentVersion;
import edu.duke.cabig.c3pr.webservice.common.Document;
import edu.duke.cabig.c3pr.webservice.common.DocumentIdentifier;
import edu.duke.cabig.c3pr.webservice.common.DocumentVersion;
import edu.duke.cabig.c3pr.webservice.common.Organization;
import edu.duke.cabig.c3pr.webservice.common.OrganizationIdentifier;
import edu.duke.cabig.c3pr.webservice.common.PerformedStudySubjectMilestone;
import edu.duke.cabig.c3pr.webservice.common.Person;
import edu.duke.cabig.c3pr.webservice.common.StudyProtocolDocumentVersion;
import edu.duke.cabig.c3pr.webservice.common.StudyProtocolVersion;
import edu.duke.cabig.c3pr.webservice.common.StudySiteProtocolVersionRelationship;
import edu.duke.cabig.c3pr.webservice.common.StudySubjectConsentVersion;
import edu.duke.cabig.c3pr.webservice.common.StudySubjectProtocolVersionRelationship;
import edu.duke.cabig.c3pr.webservice.common.SubjectIdentifier;
import edu.duke.cabig.c3pr.webservice.iso21090.AddressPartType;
import edu.duke.cabig.c3pr.webservice.iso21090.DSETCD;
import edu.duke.cabig.c3pr.webservice.iso21090.EntityNamePartType;
import edu.duke.cabig.c3pr.webservice.iso21090.NullFlavor;
import edu.duke.cabig.c3pr.webservice.iso21090.ST;
import edu.duke.cabig.c3pr.webservice.subjectregistry.DSETStudySubject;
import edu.duke.cabig.c3pr.webservice.subjectregistry.ImportStudySubjectRegistryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.InitiateStudySubjectRegistryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.QueryConsentsByStudySubjectRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.QueryStudySubjectRegistryByConsentRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.QueryStudySubjectRegistryByStatusRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.QueryStudySubjectRegistryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.QueryStudySubjectRegistryStatusHistoryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.RetrieveStudySubjectDemographyHistoryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject;
import edu.duke.cabig.c3pr.webservice.subjectregistry.SubjectRegistry;
import edu.duke.cabig.c3pr.webservice.subjectregistry.SubjectRegistryService;
import edu.duke.cabig.c3pr.webservice.subjectregistry.UpdateStudySubjectConsentRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.UpdateStudySubjectRegistryDemographyRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.UpdateStudySubjectRegistryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.UpdateStudySubjectRegistryStatusHistoryRequest;
import edu.duke.cabig.c3pr.webservice.subjectregistry.UpdateStudySubjectRegistryStatusRequest;
/**
* This test will run C3PR in embedded Tomcat and test Subject Registry web
* service against it. <br>
*
* @author Kruttik Aggarwal
* @version 1.0
*/
public class SubjectRegistryWebServiceTest extends C3PREmbeddedTomcatTestBase {
protected static final String TEST_FAX_MODIFIED = "222-111-2222";
protected static final String TEST_FAX_ISO_MODIFIED = "x-text-fax:" + TEST_FAX_MODIFIED;
protected static final String TEST_PHONE_MODIFIED = "000-444-5555";
protected static final String TEST_PHONE_ISO_MODIFIED = "tel:" + TEST_PHONE_MODIFIED;
protected static final String TEST_EMAIL_ADDR_MODIFIED = "test_modified@mail.com";
protected static final String TEST_EMAIL_ADDR_ISO_MODIFIED = "mailto:"
+ TEST_EMAIL_ADDR_MODIFIED;
protected static final String RACE_ASIAN_MODIFIED = "Black or African American";
protected static final String RACE_WHITE_MODIFIED = "American Indian or Alaska Native";
protected static final String TEST_COUNTRY_MODIFIED = "India";
protected static final String TEST_ZIP_CODE_MODIFIED = "20190";
protected static final String TEST_STATE_CODE_MODIFIED = "PA";
protected static final String TEST_CITY_NAME_MODIFIED = "Reston";
protected static final String TEST_STREET_ADDRESS_MODIFIED = "13921 Park Center Rd STE 420";
protected static final String TEST_COUNTRY_2 = "US";
protected static final String TEST_ZIP_CODE_2 = "20170";
protected static final String TEST_STATE_CODE_2 = "VA";
protected static final String TEST_CITY_NAME_2 = "Herndon";
protected static final String TEST_STREET_ADDRESS_2 = "111 City Avenue";
protected static final String TEST_LAST_NAME_MODIFIED = "Davis";
protected static final String TEST_MID_NAME_MODIFIED = "M";
protected static final String TEST_FIRST_NAME_MODIFIED = "Geena";
protected static final String TEST_NAME_PREFIX_MODIFIED = "Sr.";
protected static final String TEST_NAME_SUFFIX_MODIFIED = "M.D.";
protected static final String TS_DATETIME_PATTERN = "yyyyMMddHHmmss";
protected static final String TEST_FAX = "000-111-2222";
protected static final String TEST_FAX_ISO = "x-text-fax:" + TEST_FAX;
protected static final String TEST_PHONE = "333-444-5555";
protected static final String TEST_PHONE_ISO = "tel:" + TEST_PHONE;
protected static final String TEST_EMAIL_ADDR = "test@mail.com";
protected static final String TEST_EMAIL_ADDR_ISO = "mailto:"
+ TEST_EMAIL_ADDR;
protected static final String RACE_ASIAN = "Asian";
protected static final String RACE_WHITE = "White";
protected static final String TEST_COUNTRY = "USA";
protected static final String TEST_ZIP_CODE = "22203-5555";
protected static final String TEST_STATE_CODE = "VA";
protected static final String TEST_CITY_NAME = "Arlington";
protected static final String TEST_STREET_ADDRESS = "1029 N Stuart St Unit 999";
protected static final String TEST_LAST_NAME = "Clooney";
protected static final String TEST_NAME_PREFIX = "Jr.";
protected static final String TEST_NAME_SUFFIX = "Ph.D.";
protected static final String TEST_MID_NAME = "Z";
protected static final String TEST_FIRST_NAME = "Rudolph";
protected static final String MARITAL_STATUS_SINGLE = "Single";
protected static final String MARITAL_STATUS_SINGLE_MODIFIED = "Married";
protected static final String ETHNIC_CODE_NOT_REPORTED = "Not Reported";
protected static final String ETHNIC_CODE_MODIFIED = "Hispanic or Latino";
protected static final String TEST_BIRTH_DATE_ISO = "19800101000000";
protected static final String TEST_BIRTH_DATE_ISO_MODIFIED = "19990205000000";
protected static final String GENDER_MALE = "Male";
protected static final String GENDER_MALE_MODIFIED = "Female";
protected static final String ORG_ID_TYPE_MRN = "MRN";
protected static final String ORG_ID_TYPE_STUDY = "COORDINATING_CENTER_IDENTIFIER";
protected static final String ORG_ID_TYPE_STUDYSUBJECT = "COORDINATING_CENTER_ASSIGNED_STUDY_SUBJECT_IDENTIFIER";
protected static final String ORG_ID_TYPE_STUDYSUBJECT_MODIFIED = "STUDY_SUBJECT_IDENTIFIER";
protected static final String TEST_BIO_ID = "test_subject_id";
protected static final String TEST_BIO_ID_MODIFIED = "test_subject_id_modified";
protected static final String TEST_BIO_ID_IMPORT = "test_subject_id_import";
protected static final Boolean TEST_BIO_ID_PRIMARYINDICATOR=false;
protected static final String TEST_STUDYSUBJECT_ID = "002";
protected static final String TEST_STUDYSUBJECT_SYSTEM_ID = "005";
protected static final String TEST_STUDYSUBJECT_ID_MODIFIED = "003";
protected static final String TEST_STUDYSUBJECT_ID_IMPORT = "004";
protected static final Boolean TEST_STUDYSUBJECT_ID_PRIMARYINDICATOR=false;
protected static final Boolean TEST_STUDYSUBJECT_SYSTEM_ID_PRIMARYINDICATOR=true;
protected static final String TEST_STUDY_ID = "test_study_id";
protected static final String TEST_SYSTEM_STUDY_ID = "test_system_study_id";
protected static final Boolean TEST_STUDY_ID_PRIMARYINDICATOR=false;
protected static final String ORG_ID_TYPE_CTEP = "CTEP";
protected static final String TEST_ORG_ID = "MN026";
protected static final String TEST_CONSENT_DELIVERY_DATE1 = "20090101000000";
protected static final String TEST_CONSENT_SIGNED_DATE1 = "20100101000000";
protected static final String TEST_CONSENT_DECLINED_DATE1 = "20100202000000";
protected static final String TEST_CONSENT_PRESENTER1 = "John Doe";
protected static final String TEST_CONSENTING_METHOD1 = "Written";
protected static final String TEST_CONSENT_NAME1 = "General1";
protected static final String TEST_CONSENT_DESC1 = "Desc1";
protected static final String TEST_CONSENTING_DOCID1 = "DOC_ID1";
protected static final String TEST_CONSENT_VERSION1 = "1.0";
protected static final Boolean TEST_CONSENT_ANS11=true;
protected static final Boolean TEST_CONSENT_ANS12=false;
protected static final String TEST_CONSENT_QUES11="Q11";
protected static final String TEST_CONSENT_QUES12="Q12";
protected static final String TEST_CONSENT_DELIVERY_DATE2 = "20060101000000";
protected static final String TEST_CONSENT_SIGNED_DATE2 = "20070101000000";
protected static final String TEST_CONSENT_DECLINED_DATE2 = "20070202000000";
protected static final String TEST_CONSENT_PRESENTER2 = "Deep Singh";
protected static final String TEST_CONSENTING_METHOD2 = "Verbal";
protected static final String TEST_CONSENT_NAME2 = "General2";
protected static final String TEST_CONSENT_DESC2 = "Desc2";
protected static final String TEST_CONSENTING_DOCID2 = "DOC_ID2";
protected static final String TEST_CONSENT_VERSION2 = "2.0";
protected static final Boolean TEST_CONSENT_ANS21=true;
protected static final Boolean TEST_CONSENT_ANS22=false;
protected static final String TEST_CONSENT_QUES21="Q21";
protected static final String TEST_CONSENT_QUES22="Q22";
protected static final String TEST_REGISTRYSTATUS_CODE1="Screen Failed";
protected static final String TEST_REGISTRYSTATUS_COMMENT1="Some Comment";
protected static final String TEST_REGISTRYSTATUS_DATE1 = "20080101000000";
protected static final String TEST_REGISTRYSTATUS_REASON11 = "FAILED INCLUSION";
protected static final String TEST_REGISTRYSTATUS_REASON12 = "Lab_Out_Of_Range1";
protected static final String TEST_REGISTRYSTATUS_CODE2="Withdrawn";
protected static final String TEST_REGISTRYSTATUS_DATE2 = "20070101000000";
protected static final String TEST_REGISTRYSTATUS_REASON21 = "UNWILLING";
protected static final String TEST_REGISTRYSTATUS_REASON22 = "Distance";
protected static final String TEST_PAYMENT_METHOD = "private insurance";
protected static final String TEST_PAYMENT_METHOD_MODIFIED = "private insurance";
protected static final String TEST_DATA_ENTRY_STATUS = "Complete";
protected static final String TEST_DATA_ENTRY_STATUS_MODIFIED = "Incomplete";
protected static final String TEST_SHORTTITLE = "short_title_text";
protected static final String TEST_LONGTITLE = "long_title";
protected static final String TEST_DESC = "description";
protected static final String TEST_PREDICATE = "=";
protected static final String TEST_OBJ_NAME = "edu.duke.cabig.c3pr.domain.Identifier";
protected static final String TEST_OBJ_CTX_NAME = "StudySubject";
protected static final String TEST_ATTRIBUTE_NAME = "value";
protected static final String SUBJECT_SYSTEM_ID = "baskjd89y34343a-123u9a";
protected static final String SUBJECT_SYSTEM_ID_MODIFIED = "baskjd89y34343a-123u9a_modified";
protected static final String SYSTEM_NAME = "MAYO";
protected static final String SYSTEM_ID_TYPE = "SUBJECT_IDENTIFIER";
protected final String DEFAULT_SUBJECT_SYSTEM_ID_NAME = "C3PR";
protected static final String DEFAULT_SUBJECT_SYSTEM_ID_TYPE = "SUBJECT_IDENTIFIER";
private static final QName SERVICE_NAME = new QName(
"http://enterpriseservices.nci.nih.gov/SubjectRegistryService",
"SubjectRegistryService");
private static final String WS_ENDPOINT_SERVLET_PATH = "/services/services/SubjectRegistry";
private URL endpointURL;
private URL wsdlLocation;
protected static final ISO21090Helper iso = new ISO21090Helper();
/**
* Set this JVM property to true if this test should not bring up an
* instance of embedded Tomcat and use one already running locally at
* <b>https://localhost:8443/c3pr.
*/
protected boolean noEmbeddedTomcat = Boolean.valueOf(System.getProperty(
"noEmbeddedTomcat", "false"));
@Override
protected void setUp() throws Exception {
if (noEmbeddedTomcat) {
endpointURL = new URL(
"https://localhost:8443/c3pr/services/services/SubjectRegistry");
initDataSourceFile();
} else {
super.setUp();
endpointURL = new URL("https://"
+ InetAddress.getLocalHost().getHostName() + ":" + sslPort
+ C3PR_CONTEXT + WS_ENDPOINT_SERVLET_PATH);
}
wsdlLocation = new URL(endpointURL.toString() + "?wsdl");
logger.info("endpointURL: " + endpointURL);
logger.info("wsdlLocation: " + wsdlLocation);
}
@Override
protected void tearDown() throws Exception {
if (!noEmbeddedTomcat) {
super.tearDown();
}
}
/**
* @throws InterruptedException
* @throws IOException
*
*/
public void testSubjectRegistryUtility() throws InterruptedException, IOException, Exception {
try {
executeInitiateStudySubjectTest();
executeUpdateStudySubjectConsentTest();
executeUpdateStudySubjectRegistryStatusTest();
executeUpdateStudySubjectRegistryStatusHistoryTest();
executeUpdateStudySubjectTest();
executeQuerySubjectRegistryTest();
executeUpdateStudySubjectDemographyTest();
executeRetrieveStudySubjectDemographyHistoryTest();
executeQueryStudySubjectRegistryStatusHistoryTest();
executeQuerySubjectRegistryByRegistryStatusTest();
executeQuerySubjectRegistryByConsentTest();
executeQueryConsentsByStudySubjectTest();
executeImportSubjectRegistryTest();
} catch (Exception e) {
logger.severe(ExceptionUtils.getFullStackTrace(e));
fail(ExceptionUtils.getFullStackTrace(e));
}
}
protected void executeInitiateStudySubjectTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final InitiateStudySubjectRegistryRequest request = new InitiateStudySubjectRegistryRequest();
request.setSiteIdentifier(createOrgId());
request.setStudyIdentifier(createSystemDocumentId());
request.setSubjectIdentifier(createBioEntitySystemId(true));
StudySubject studySubject = createStudySubject();
request.setStudySubject(studySubject);
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
StudySubject createdStudySubject = service.initiateStudySubject(request).getStudySubject();
assertNotNull(createdStudySubject);
StudySubject createdExpectedStudySubject = createExpectedStudySubject();
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(createdExpectedStudySubject.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( createdStudySubject.getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(createdExpectedStudySubject, createdStudySubject));
}
protected void executeUpdateStudySubjectConsentTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final UpdateStudySubjectConsentRequest request = new UpdateStudySubjectConsentRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
DSETStudySubjectConsentVersion dsetStudySubjectConsentVersion = new DSETStudySubjectConsentVersion();
dsetStudySubjectConsentVersion.getItem().addAll(getSubjectConsents());
request.setStudySubjectConsentVersions(dsetStudySubjectConsentVersion);
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
StudySubject createdStudySubject = service.updateStudySubjectConsent(request).getStudySubject();
assertNotNull(createdStudySubject);
StudySubject expected = createExpectedStudySubject();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( createdStudySubject.getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, createdStudySubject));
}
protected void executeUpdateStudySubjectRegistryStatusTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final UpdateStudySubjectRegistryStatusRequest request = new UpdateStudySubjectRegistryStatusRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
request.setStudySubjectStatus(createStatus1());
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
StudySubject createdStudySubject = service.updateStudySubjectRegistryStatus(request).getStudySubject();
assertNotNull(createdStudySubject);
StudySubject expected = createExpectedStudySubject();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus1());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( createdStudySubject.getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, createdStudySubject));
}
protected void executeUpdateStudySubjectRegistryStatusHistoryTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final UpdateStudySubjectRegistryStatusHistoryRequest request = new UpdateStudySubjectRegistryStatusHistoryRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
DSETPerformedStudySubjectMilestone dsetPerformedStudySubjectMilestone = new DSETPerformedStudySubjectMilestone();
dsetPerformedStudySubjectMilestone.getItem().add(createStatus2());
request.setStudySubjectRegistryStatusHistory(dsetPerformedStudySubjectMilestone);
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
StudySubject createdStudySubject = service.updateStudySubjectRegistryStatusHistory(request).getStudySubject();
assertNotNull(createdStudySubject);
StudySubject expected = createExpectedStudySubject();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( createdStudySubject.getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, createdStudySubject));
}
protected void executeUpdateStudySubjectTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final UpdateStudySubjectRegistryRequest request = new UpdateStudySubjectRegistryRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
StudySubject studySubject = createStudySubjectModified();
request.setStudySubject(studySubject);
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
StudySubject createdStudySubject = service.updateStudySubject(request).getStudySubject();
assertNotNull(createdStudySubject);
StudySubject expected = createStudySubjectJAXBObjectModified();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( createdStudySubject.getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, createdStudySubject));
}
protected void executeQuerySubjectRegistryTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final QueryStudySubjectRegistryRequest request = new QueryStudySubjectRegistryRequest();
DSETAdvanceSearchCriterionParameter dsetAdvanceSearchCriterionParameter = new DSETAdvanceSearchCriterionParameter();
dsetAdvanceSearchCriterionParameter.getItem().add(createAdvaceSearchParam());
request.setSearchParameter(dsetAdvanceSearchCriterionParameter);
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETStudySubject studySubjects = service.querySubjectRegistry(request).getStudySubjects();
assertNotNull(studySubjects);
assertEquals(1, studySubjects.getItem().size());
StudySubject expected = createStudySubjectJAXBObjectModified();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( studySubjects.getItem().get(0).getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, studySubjects.getItem().get(0)));
}
protected void executeUpdateStudySubjectDemographyTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final UpdateStudySubjectRegistryDemographyRequest request = new UpdateStudySubjectRegistryDemographyRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
request.setPerson(createPersonModified());
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
StudySubject createdStudySubject = service.updateStudySubjectDemography(request).getStudySubject();
assertNotNull(createdStudySubject);
StudySubject expected = createStudySubjectJAXBObjectModified();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
expected.setEntity(createPersonModified());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( createdStudySubject.getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, createdStudySubject));
}
protected void executeRetrieveStudySubjectDemographyHistoryTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final RetrieveStudySubjectDemographyHistoryRequest request = new RetrieveStudySubjectDemographyHistoryRequest();
request.setPatientIdentifier(createBioEntitySystemId(true));
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETPerson subjectDemographies = service.retrieveStudySubjectDemographyHistory(request).getPatients();
assertNotNull(subjectDemographies);
assertEquals(1, subjectDemographies.getItem().size());
Person expected = createPersonModified();
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( subjectDemographies.getItem().get(0).getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, subjectDemographies.getItem().get(0)));
}
protected void executeQueryStudySubjectRegistryStatusHistoryTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final QueryStudySubjectRegistryStatusHistoryRequest request = new QueryStudySubjectRegistryStatusHistoryRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETPerformedStudySubjectMilestone statusHistory = service.queryStudySubjectRegistryStatusHistory(request).getStudySubjectRegistryStatusHistory();
assertNotNull(statusHistory);
assertEquals(1, statusHistory.getItem().size());
PerformedStudySubjectMilestone expected = createStatus2();
assertTrue(BeanUtils.deepCompare(expected, statusHistory.getItem().get(0)));
}
protected void executeQuerySubjectRegistryByRegistryStatusTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final QueryStudySubjectRegistryByStatusRequest request = new QueryStudySubjectRegistryByStatusRequest();
request.setStudyIdentifier(createDocumentId());
request.setRegistryStatus(createStatus2());
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETStudySubject studySubjects = service.querySubjectRegistryByRegistryStatus(request).getStudySubjects();
assertNotNull(studySubjects);
assertEquals(1, studySubjects.getItem().size());
StudySubject expected = createStudySubjectJAXBObjectModified();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
expected.setEntity(createPersonModified());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( studySubjects.getItem().get(0).getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, studySubjects.getItem().get(0)));
request.setRegistryStatus(createStatus1());
studySubjects = service.querySubjectRegistryByRegistryStatus(request).getStudySubjects();
assertNotNull(studySubjects);
assertEquals(0, studySubjects.getItem().size());
}
protected void executeQuerySubjectRegistryByConsentTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final QueryStudySubjectRegistryByConsentRequest request = new QueryStudySubjectRegistryByConsentRequest();
request.setStudyIdentifier(createDocumentId());
Consent consent = new Consent();
consent.setOfficialTitle(iso.ST(TEST_CONSENT_NAME1));
consent.setText(iso.ED(TEST_CONSENT_DESC1));
// consent.setVersionNumberText(iso.ST(TEST_CONSENT_VERSION1));
request.setConsent(consent);
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETStudySubject studySubjects = service.querySubjectRegistryByConsent(request).getStudySubjects();
assertNotNull(studySubjects);
assertEquals(1, studySubjects.getItem().size());
StudySubject expected = createStudySubjectJAXBObjectModified();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
expected.setEntity(createPersonModified());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( studySubjects.getItem().get(0).getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, studySubjects.getItem().get(0)));
consent = new Consent();
consent.setOfficialTitle(iso.ST(TEST_CONSENT_NAME2));
consent.setText(iso.ED(TEST_CONSENT_DESC2));
consent.setVersionNumberText(iso.ST(TEST_CONSENT_VERSION2));
request.setConsent(consent);
studySubjects = service.querySubjectRegistryByConsent(request).getStudySubjects();
assertNotNull(studySubjects);
assertEquals(1, studySubjects.getItem().size());
expected = createStudySubjectJAXBObjectModified();
expected.getStudySubjectProtocolVersion().getStudySubjectConsentVersion().addAll(getSubjectConsents());
expected.getStudySubjectStatus().add(createStatus2());
expected.setEntity(createPersonModified());
// If the default c3pr generated system identifier is sent in the request, there is no need to strip it from the created subject
//system identifiers in response, otherwise we need to remove the default system identifier from response before doing deep compare.
if(!ifC3PRDefaultSystemIdentifierSent(expected.getEntity().getBiologicEntityIdentifier())){
stripC3PRDefaultSystemIdentifier( studySubjects.getItem().get(0).getEntity().getBiologicEntityIdentifier());
}
assertTrue(BeanUtils.deepCompare(expected, studySubjects.getItem().get(0)));
consent = new Consent();
consent.setOfficialTitle(iso.ST(TEST_CONSENT_NAME2));
consent.setVersionNumberText(iso.ST("Wrong Version"));
request.setConsent(consent);
studySubjects = service.querySubjectRegistryByConsent(request).getStudySubjects();
assertNotNull(studySubjects);
assertEquals(0, studySubjects.getItem().size());
}
protected void executeQueryConsentsByStudySubjectTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final QueryConsentsByStudySubjectRequest request = new QueryConsentsByStudySubjectRequest();
request.setStudySubjectIdentifier(createSubjectSystemId());
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETStudySubjectConsentVersion subjectConsents = service.queryConsentsByStudySubject(request).getStudySubjectConsents();
assertNotNull(subjectConsents);
assertEquals(2, subjectConsents.getItem().size());
assertTrue(BeanUtils.deepCompare(getSubjectConsents(), subjectConsents.getItem()));
Consent consent = new Consent();
consent.setOfficialTitle(iso.ST(TEST_CONSENT_NAME1));
consent.setText(iso.ED(TEST_CONSENT_DESC1));
// consent.setVersionNumberText(iso.ST(TEST_CONSENT_VERSION1));
request.setConsent(consent);
subjectConsents = service.queryConsentsByStudySubject(request).getStudySubjectConsents();
assertNotNull(subjectConsents);
assertEquals(1, subjectConsents.getItem().size());
assertTrue(BeanUtils.deepCompare(getSubjectConsents().get(0), subjectConsents.getItem().get(0)));
consent = new Consent();
consent.setOfficialTitle(iso.ST(TEST_CONSENT_NAME2));
consent.setText(iso.ED(TEST_CONSENT_DESC2));
consent.setVersionNumberText(iso.ST(TEST_CONSENT_VERSION2));
request.setConsent(consent);
subjectConsents = service.queryConsentsByStudySubject(request).getStudySubjectConsents();
assertNotNull(subjectConsents);
assertEquals(1, subjectConsents.getItem().size());
assertTrue(BeanUtils.deepCompare(getSubjectConsents().get(1), subjectConsents.getItem().get(0)));
consent = new Consent();
consent.setOfficialTitle(iso.ST(TEST_CONSENT_NAME2));
consent.setVersionNumberText(iso.ST("Wrong Version"));
request.setConsent(consent);
subjectConsents = service.queryConsentsByStudySubject(request).getStudySubjectConsents();
assertNotNull(subjectConsents);
assertEquals(0, subjectConsents.getItem().size());
}
protected void executeImportSubjectRegistryTest() throws SQLException, Exception {
SubjectRegistry service = getService();
// successful creation
final ImportStudySubjectRegistryRequest request = new ImportStudySubjectRegistryRequest();
request.setStudySubjects(new DSETStudySubject());
request.getStudySubjects().getItem().add(createStudySubjectForImport());
JAXBContext context = JAXBContext.newInstance("edu.duke.cabig.c3pr.webservice.subjectregistry");
Marshaller marshaller = context.createMarshaller();
marshaller.marshal( request , System.out );
System.out.flush();
System.out.println();
DSETStudySubject createdStudySubjects = service.importSubjectRegistry(request).getStudySubjects();
assertNotNull(createdStudySubjects);
assertEquals(1, createdStudySubjects.getItem().size());
assertTrue(BeanUtils.deepCompare(createStudySubjectForImport(), createdStudySubjects.getItem().get(0)));
}
protected SubjectRegistry getService() {
SubjectRegistryService service = new SubjectRegistryService(wsdlLocation,
SERVICE_NAME);
SOAPUtils.installSecurityHandler(service);
SubjectRegistry client = service.getSubjectRegistry();
return client;
}
public edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject createStudySubject() {
edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject studySubject = new edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject();
studySubject.setPaymentMethodCode(iso.CD(TEST_PAYMENT_METHOD));
studySubject.setStatusCode(iso.CD(TEST_DATA_ENTRY_STATUS));
studySubject.getSubjectIdentifier().add(createSubjectId());
studySubject.getSubjectIdentifier().add(createSubjectSystemId());
return studySubject;
}
public edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject createStudySubjectForImport() {
edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject studySubject = new edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject();
studySubject.setPaymentMethodCode(iso.CD(TEST_PAYMENT_METHOD));
studySubject.setStatusCode(iso.CD(TEST_DATA_ENTRY_STATUS));
studySubject.getSubjectIdentifier().add(createSubjectIdForImport());
studySubject.setStudySubjectProtocolVersion(createStudySubjectProtocolVersion());
studySubject.setEntity(createPersonForImport());
studySubject.getStudySubjectStatus().add(createStatus1());
studySubject.getStudySubjectStatus().add(createStatus2());
return studySubject;
}
public edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject createStudySubjectModified() {
edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject studySubject = new edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject();
studySubject.setPaymentMethodCode(iso.CD(TEST_PAYMENT_METHOD_MODIFIED));
studySubject.setStatusCode(iso.CD(TEST_DATA_ENTRY_STATUS_MODIFIED));
studySubject.getSubjectIdentifier().add(createSubjectIdModified());
studySubject.getSubjectIdentifier().add(createSubjectSystemId());
return studySubject;
}
protected static Date parseISODate(String isoDate) {
try {
return DateUtils.parseDate(isoDate,
new String[] { TS_DATETIME_PATTERN });
} catch (ParseException e) {
throw new RuntimeException(e);
}
}
public static OrganizationIdentifier createOrgId(){
OrganizationIdentifier oid = new OrganizationIdentifier();
oid.setIdentifier(iso.II(TEST_ORG_ID));
oid.setPrimaryIndicator(iso.BL(true));
oid.setTypeCode(iso.CD(ORG_ID_TYPE_CTEP));
return oid;
}
/**
* @return
*/
public static BiologicEntityIdentifier createBioEntityId() {
BiologicEntityIdentifier bioId = new BiologicEntityIdentifier();
bioId.setAssigningOrganization(createOrganization());
bioId.setIdentifier(iso.II(TEST_BIO_ID));
bioId.setTypeCode(iso.CD(ORG_ID_TYPE_MRN));
bioId.setEffectiveDateRange(iso.IVLTSDateTime(NullFlavor.NI));
bioId.setPrimaryIndicator(iso.BL(TEST_BIO_ID_PRIMARYINDICATOR));
return bioId;
}
/**
* @return
*/
protected static BiologicEntityIdentifier createBioEntitySystemId(boolean isPrimary) {
BiologicEntityIdentifier bioId = new BiologicEntityIdentifier();
bioId.setIdentifier(II(SUBJECT_SYSTEM_ID));
bioId.setTypeCode(CD(SYSTEM_ID_TYPE));
bioId.getTypeCode().setCodeSystemName(SYSTEM_NAME);
bioId.setEffectiveDateRange(IVLTSDateTime(NullFlavor.NI));
bioId.setPrimaryIndicator(isPrimary? BL(true):BL(false));
return bioId;
}
/**
* @return
*/
public static BiologicEntityIdentifier createBioEntityIdForImport() {
BiologicEntityIdentifier bioId = new BiologicEntityIdentifier();
bioId.setAssigningOrganization(createOrganization());
bioId.setIdentifier(iso.II(TEST_BIO_ID_IMPORT));
bioId.setTypeCode(iso.CD(ORG_ID_TYPE_MRN));
bioId.setEffectiveDateRange(iso.IVLTSDateTime(NullFlavor.NI));
bioId.setPrimaryIndicator(iso.BL(TEST_BIO_ID_PRIMARYINDICATOR));
return bioId;
}
/**
* @return
*/
public static BiologicEntityIdentifier createBioEntityIdModified() {
BiologicEntityIdentifier bioId = new BiologicEntityIdentifier();
bioId.setAssigningOrganization(createOrganization());
bioId.setIdentifier(iso.II(TEST_BIO_ID_MODIFIED));
bioId.setTypeCode(iso.CD(ORG_ID_TYPE_MRN));
bioId.setEffectiveDateRange(iso.IVLTSDateTime(NullFlavor.NI));
bioId.setPrimaryIndicator(iso.BL(TEST_BIO_ID_PRIMARYINDICATOR));
return bioId;
}
/**
* @return
*/
public static BiologicEntityIdentifier createBioEntitySystemIdModified(boolean isPrimary) {
BiologicEntityIdentifier bioId = new BiologicEntityIdentifier();
bioId.setIdentifier(II(SUBJECT_SYSTEM_ID_MODIFIED));
bioId.setTypeCode(CD(SYSTEM_ID_TYPE));
bioId.getTypeCode().setCodeSystemName(SYSTEM_NAME);
bioId.setEffectiveDateRange(IVLTSDateTime(NullFlavor.NI));
bioId.setPrimaryIndicator(isPrimary? BL(true):BL(false));
return bioId;
}
/**
* @return
*/
public static DocumentIdentifier createDocumentId() {
DocumentIdentifier docId = new DocumentIdentifier();
docId.setAssigningOrganization(createOrganization());
docId.setIdentifier(iso.II(TEST_STUDY_ID));
docId.setTypeCode(iso.CD(ORG_ID_TYPE_STUDY));
docId.setPrimaryIndicator(iso.BL(TEST_STUDY_ID_PRIMARYINDICATOR));
return docId;
}
protected DocumentIdentifier createSystemDocumentId() {
DocumentIdentifier docId = new DocumentIdentifier();
docId.setIdentifier(iso.II(TEST_SYSTEM_STUDY_ID));
docId.setPrimaryIndicator(iso.BL(true));
docId.setTypeCode(iso.CD("C3PR_SYSTEM"));
docId.getTypeCode().setCodeSystemName("MAYO");
return docId;
}
/**
* @return
*/
public static SubjectIdentifier createSubjectId() {
SubjectIdentifier subId = new SubjectIdentifier();
subId.setAssigningOrganization(createOrganization());
subId.setIdentifier(iso.II(TEST_STUDYSUBJECT_ID));
subId.setTypeCode(iso.CD(ORG_ID_TYPE_STUDYSUBJECT));
subId.setPrimaryIndicator(iso.BL(TEST_STUDYSUBJECT_ID_PRIMARYINDICATOR));
return subId;
}
/**
* @return
*/
public static SubjectIdentifier createSubjectSystemId() {
SubjectIdentifier subId = new SubjectIdentifier();
subId.setIdentifier(iso.II(TEST_STUDYSUBJECT_SYSTEM_ID));
subId.setTypeCode(iso.CD("STUDY_SUBJECT_IDENTIFIER_TYPE"));
subId.getTypeCode().setCodeSystemName("C3PR_SYSTEM");
subId.setPrimaryIndicator(iso.BL(TEST_STUDYSUBJECT_SYSTEM_ID_PRIMARYINDICATOR));
return subId;
}
/**
* @return
*/
public static SubjectIdentifier createSubjectIdForImport() {
SubjectIdentifier subId = new SubjectIdentifier();
subId.setAssigningOrganization(createOrganization());
subId.setIdentifier(iso.II(TEST_STUDYSUBJECT_ID_IMPORT));
subId.setTypeCode(iso.CD(ORG_ID_TYPE_STUDYSUBJECT));
subId.setPrimaryIndicator(iso.BL(TEST_STUDYSUBJECT_ID_PRIMARYINDICATOR));
return subId;
}
/**
* @return
*/
public static SubjectIdentifier createSubjectIdModified() {
SubjectIdentifier subId = new SubjectIdentifier();
subId.setAssigningOrganization(createOrganization());
subId.setIdentifier(iso.II(TEST_STUDYSUBJECT_ID_MODIFIED));
subId.setTypeCode(iso.CD(ORG_ID_TYPE_STUDYSUBJECT_MODIFIED));
subId.setPrimaryIndicator(iso.BL(TEST_STUDYSUBJECT_ID_PRIMARYINDICATOR));
return subId;
}
public static Organization createOrganization(){
Organization organization = new Organization();
OrganizationIdentifier oid = createOrgId();
organization.getOrganizationIdentifier().add(oid);
return organization;
}
public List<StudySubjectConsentVersion> getSubjectConsents(){
List<StudySubjectConsentVersion> returnList = new ArrayList<StudySubjectConsentVersion>();
//add 1st consent
StudySubjectConsentVersion studySubjectConsentVersion = new StudySubjectConsentVersion();
studySubjectConsentVersion.setConsentDeliveryDate(iso.TSDateTime(TEST_CONSENT_DELIVERY_DATE1));
studySubjectConsentVersion.setIdentifier(iso.II(TEST_CONSENTING_DOCID1));
studySubjectConsentVersion.setInformedConsentDate(iso.TSDateTime(TEST_CONSENT_SIGNED_DATE1));
studySubjectConsentVersion.setConsentDeclinedDate(iso.TSDateTime(TEST_CONSENT_DECLINED_DATE1));
studySubjectConsentVersion.setConsentingMethod(iso.CD(TEST_CONSENTING_METHOD1));
studySubjectConsentVersion.setConsentPresenter(iso.ST(TEST_CONSENT_PRESENTER1));
studySubjectConsentVersion.setConsent(new DocumentVersion());
studySubjectConsentVersion.getConsent().setOfficialTitle(iso.ST(TEST_CONSENT_NAME1));
studySubjectConsentVersion.getConsent().setText(iso.ED(TEST_CONSENT_DESC1));
// studySubjectConsentVersion.getConsent().setVersionNumberText(iso.ST(TEST_CONSENT_VERSION1));
PerformedStudySubjectMilestone subjectAnswer = new PerformedStudySubjectMilestone();
subjectAnswer.setMissedIndicator(iso.BL(TEST_CONSENT_ANS11));
subjectAnswer.setConsentQuestion(new DocumentVersion());
subjectAnswer.getConsentQuestion().setOfficialTitle(iso.ST(TEST_CONSENT_QUES11));
studySubjectConsentVersion.getSubjectConsentAnswer().add(subjectAnswer);
subjectAnswer = new PerformedStudySubjectMilestone();
subjectAnswer.setMissedIndicator(iso.BL(TEST_CONSENT_ANS12));
subjectAnswer.setConsentQuestion(new DocumentVersion());
subjectAnswer.getConsentQuestion().setOfficialTitle(iso.ST(TEST_CONSENT_QUES12));
studySubjectConsentVersion.getSubjectConsentAnswer().add(subjectAnswer);
returnList.add(studySubjectConsentVersion);
//add 2nd consent
studySubjectConsentVersion = new StudySubjectConsentVersion();
studySubjectConsentVersion.setConsentDeliveryDate(iso.TSDateTime(TEST_CONSENT_DELIVERY_DATE2));
studySubjectConsentVersion.setIdentifier(iso.II(TEST_CONSENTING_DOCID2));
studySubjectConsentVersion.setInformedConsentDate(iso.TSDateTime(TEST_CONSENT_SIGNED_DATE2));
studySubjectConsentVersion.setConsentDeclinedDate(iso.TSDateTime(TEST_CONSENT_DECLINED_DATE2));
studySubjectConsentVersion.setConsentingMethod(iso.CD(TEST_CONSENTING_METHOD2));
studySubjectConsentVersion.setConsentPresenter(iso.ST(TEST_CONSENT_PRESENTER2));
studySubjectConsentVersion.setConsent(new DocumentVersion());
studySubjectConsentVersion.getConsent().setOfficialTitle(iso.ST(TEST_CONSENT_NAME2));
studySubjectConsentVersion.getConsent().setText(iso.ED(TEST_CONSENT_DESC2));
studySubjectConsentVersion.getConsent().setVersionNumberText(iso.ST(TEST_CONSENT_VERSION2));
subjectAnswer = new PerformedStudySubjectMilestone();
subjectAnswer.setMissedIndicator(iso.BL(TEST_CONSENT_ANS21));
subjectAnswer.setConsentQuestion(new DocumentVersion());
subjectAnswer.getConsentQuestion().setOfficialTitle(iso.ST(TEST_CONSENT_QUES21));
studySubjectConsentVersion.getSubjectConsentAnswer().add(subjectAnswer);
subjectAnswer = new PerformedStudySubjectMilestone();
subjectAnswer.setMissedIndicator(iso.BL(TEST_CONSENT_ANS22));
subjectAnswer.setConsentQuestion(new DocumentVersion());
subjectAnswer.getConsentQuestion().setOfficialTitle(iso.ST(TEST_CONSENT_QUES22));
studySubjectConsentVersion.getSubjectConsentAnswer().add(subjectAnswer);
returnList.add(studySubjectConsentVersion);
return returnList;
}
/**
* @return
*/
public static AdvanceSearchCriterionParameter createAdvaceSearchParam() {
AdvanceSearchCriterionParameter param = new AdvanceSearchCriterionParameter();
param.setAttributeName(iso.ST(TEST_ATTRIBUTE_NAME));
param.setObjectContextName(iso.ST(TEST_OBJ_CTX_NAME));
param.setObjectName(iso.ST(TEST_OBJ_NAME));
param.setPredicate(iso.CD(TEST_PREDICATE));
param.setValues(iso.DSETST(Arrays.asList(new ST[] {iso.ST(TEST_STUDYSUBJECT_ID_MODIFIED) })));
return param;
}
public static PerformedStudySubjectMilestone createStatus1(){
PerformedStudySubjectMilestone status = new PerformedStudySubjectMilestone();
status.setStatusCode(iso.CD(TEST_REGISTRYSTATUS_CODE1));
status.setComment(iso.ST(TEST_REGISTRYSTATUS_COMMENT1));
status.setStatusDate(iso.TSDateTime(TEST_REGISTRYSTATUS_DATE1));
status.setReasonCode(new DSETCD());
status.getReasonCode().getItem().add(iso.CD(TEST_REGISTRYSTATUS_REASON11));
status.getReasonCode().getItem().add(iso.CD(TEST_REGISTRYSTATUS_REASON12));
return status;
}
public static PerformedStudySubjectMilestone createStatus2(){
PerformedStudySubjectMilestone status = new PerformedStudySubjectMilestone();
status.setStatusCode(iso.CD(TEST_REGISTRYSTATUS_CODE2));
status.setStatusDate(iso.TSDateTime(TEST_REGISTRYSTATUS_DATE2));
status.setReasonCode(new DSETCD());
status.getReasonCode().getItem().add(iso.CD(TEST_REGISTRYSTATUS_REASON21));
status.getReasonCode().getItem().add(iso.CD(TEST_REGISTRYSTATUS_REASON22));
return status;
}
public edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject createExpectedStudySubject(){
edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject studySubject = new edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject();
studySubject.setEntity(createPerson());
studySubject.setPaymentMethodCode(iso.CD(TEST_PAYMENT_METHOD));
studySubject.setStatusCode(iso.CD(TEST_DATA_ENTRY_STATUS));
studySubject.getSubjectIdentifier().add(createSubjectId());
studySubject.getSubjectIdentifier().add(createSubjectSystemId());
StudySubjectProtocolVersionRelationship studySubjectProtocolVersion = new StudySubjectProtocolVersionRelationship();
studySubject.setStudySubjectProtocolVersion(studySubjectProtocolVersion);
studySubjectProtocolVersion.setStudySiteProtocolVersion(new StudySiteProtocolVersionRelationship());
//setup study
studySubjectProtocolVersion.getStudySiteProtocolVersion().setStudyProtocolVersion(new StudyProtocolVersion());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().setStudyProtocolDocument(new StudyProtocolDocumentVersion());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setPublicTitle(iso.ST(TEST_LONGTITLE));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setOfficialTitle(iso.ST(TEST_SHORTTITLE));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setPublicDescription(iso.ST(TEST_DESC));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setDocument(new Document());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().getDocument().getDocumentIdentifier().add(createDocumentId());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().getDocument().getDocumentIdentifier().add(createSystemDocumentId());
//setup studysite
studySubjectProtocolVersion.getStudySiteProtocolVersion().setStudySite(new edu.duke.cabig.c3pr.webservice.common.StudySite());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudySite().setOrganization(new Organization());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudySite().getOrganization().getOrganizationIdentifier().add(createOrgId());
return studySubject;
}
public edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject createStudySubjectJAXBObjectModified(){
edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject studySubject = new edu.duke.cabig.c3pr.webservice.subjectregistry.StudySubject();
studySubject.setEntity(createPerson());
studySubject.setPaymentMethodCode(iso.CD(TEST_PAYMENT_METHOD_MODIFIED));
studySubject.setStatusCode(iso.CD(TEST_DATA_ENTRY_STATUS_MODIFIED));
studySubject.getSubjectIdentifier().add(createSubjectIdModified());
studySubject.getSubjectIdentifier().add(createSubjectSystemId());
StudySubjectProtocolVersionRelationship studySubjectProtocolVersion = new StudySubjectProtocolVersionRelationship();
studySubject.setStudySubjectProtocolVersion(studySubjectProtocolVersion);
studySubjectProtocolVersion.setStudySiteProtocolVersion(new StudySiteProtocolVersionRelationship());
//setup study
studySubjectProtocolVersion.getStudySiteProtocolVersion().setStudyProtocolVersion(new StudyProtocolVersion());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().setStudyProtocolDocument(new StudyProtocolDocumentVersion());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setPublicTitle(iso.ST(TEST_LONGTITLE));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setOfficialTitle(iso.ST(TEST_SHORTTITLE));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setPublicDescription(iso.ST(TEST_DESC));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setDocument(new Document());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().getDocument().getDocumentIdentifier().add(createDocumentId());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().getDocument().getDocumentIdentifier().add(createSystemDocumentId());
//setup studysite
studySubjectProtocolVersion.getStudySiteProtocolVersion().setStudySite(new edu.duke.cabig.c3pr.webservice.common.StudySite());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudySite().setOrganization(new Organization());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudySite().getOrganization().getOrganizationIdentifier().add(createOrgId());
return studySubject;
}
/**
* @return
*/
public static Person createPerson() {
Person person = new Person();
person.getBiologicEntityIdentifier().add(createBioEntityId());
person.getBiologicEntityIdentifier().add(createBioEntitySystemId(true));
person.setAdministrativeGenderCode(iso.CD(GENDER_MALE));
person.setBirthDate(iso.TSDateTime(TEST_BIRTH_DATE_ISO));
person.setEthnicGroupCode(iso.DSETCD(iso.CD(ETHNIC_CODE_NOT_REPORTED)));
person.setMaritalStatusCode(iso.CD(MARITAL_STATUS_SINGLE));
person.setName(iso.DSETENPN(iso.ENPN(iso.ENXP(TEST_FIRST_NAME,
EntityNamePartType.GIV), iso.ENXP(TEST_MID_NAME,
EntityNamePartType.GIV), iso.ENXP(TEST_LAST_NAME,
EntityNamePartType.FAM),iso.ENXP(TEST_NAME_PREFIX, EntityNamePartType.PFX),
iso.ENXP(TEST_NAME_SUFFIX, EntityNamePartType.SFX))));
person.setPostalAddress(iso.DSETAD(iso.AD(iso.ADXP(TEST_STREET_ADDRESS,
AddressPartType.SAL), iso.ADXP(TEST_CITY_NAME,
AddressPartType.CTY), iso.ADXP(TEST_STATE_CODE,
AddressPartType.STA), iso.ADXP(TEST_ZIP_CODE,
AddressPartType.ZIP), iso.ADXP(TEST_COUNTRY,
AddressPartType.CNT))));
person.getPostalAddress().getItem().add(iso.AD(iso.ADXP(TEST_STREET_ADDRESS_2,
AddressPartType.SAL), iso.ADXP(TEST_CITY_NAME_2,
AddressPartType.CTY), iso.ADXP(TEST_STATE_CODE_2,
AddressPartType.STA), iso.ADXP(TEST_ZIP_CODE_2,
AddressPartType.ZIP), iso.ADXP(TEST_COUNTRY_2,
AddressPartType.CNT)));
person.setRaceCode(iso.DSETCD(iso.CD(RACE_WHITE)));
person.setTelecomAddress(iso.BAGTEL(iso.TEL(TEST_EMAIL_ADDR_ISO),
iso.TEL(TEST_PHONE_ISO), iso.TEL(TEST_FAX_ISO)));
return person;
}
/**
* @return
*/
public static Person createPersonModified() {
Person person = new Person();
person.getBiologicEntityIdentifier().add(createBioEntityIdModified());
person.getBiologicEntityIdentifier().add(createBioEntitySystemIdModified(true));
person.setAdministrativeGenderCode(iso.CD(GENDER_MALE_MODIFIED));
person.setBirthDate(iso.TSDateTime(TEST_BIRTH_DATE_ISO_MODIFIED));
person.setEthnicGroupCode(iso.DSETCD(iso.CD(ETHNIC_CODE_MODIFIED)));
person.setMaritalStatusCode(iso.CD(MARITAL_STATUS_SINGLE_MODIFIED));
person.setName(iso.DSETENPN(iso.ENPN(iso.ENXP(TEST_FIRST_NAME_MODIFIED,
EntityNamePartType.GIV), iso.ENXP(TEST_MID_NAME_MODIFIED,
EntityNamePartType.GIV), iso.ENXP(TEST_LAST_NAME_MODIFIED,
EntityNamePartType.FAM), iso.ENXP(TEST_NAME_PREFIX_MODIFIED,
EntityNamePartType.PFX), iso.ENXP(TEST_NAME_SUFFIX_MODIFIED,
EntityNamePartType.SFX))));
person.setPostalAddress(iso.DSETAD(iso.AD(iso.ADXP(TEST_STREET_ADDRESS_MODIFIED,
AddressPartType.SAL), iso.ADXP(TEST_CITY_NAME_MODIFIED,
AddressPartType.CTY), iso.ADXP(TEST_STATE_CODE_MODIFIED,
AddressPartType.STA), iso.ADXP(TEST_ZIP_CODE_MODIFIED,
AddressPartType.ZIP), iso.ADXP(TEST_COUNTRY_MODIFIED,
AddressPartType.CNT))));
person.getPostalAddress().getItem().add(iso.AD(iso.ADXP(TEST_STREET_ADDRESS_2,
AddressPartType.SAL), iso.ADXP(TEST_CITY_NAME_2,
AddressPartType.CTY), iso.ADXP(TEST_STATE_CODE_2,
AddressPartType.STA), iso.ADXP(TEST_ZIP_CODE_2,
AddressPartType.ZIP), iso.ADXP(TEST_COUNTRY_2,
AddressPartType.CNT)));
person.setRaceCode(iso.DSETCD(iso.CD(RACE_WHITE_MODIFIED)));
person.setTelecomAddress(iso.BAGTEL(iso.TEL(TEST_EMAIL_ADDR_ISO_MODIFIED),
iso.TEL(TEST_PHONE_ISO_MODIFIED), iso.TEL(TEST_FAX_ISO_MODIFIED)));
return person;
}
/**
* @return
*/
public static Person createPersonForImport() {
Person person = new Person();
person.getBiologicEntityIdentifier().add(createBioEntityIdForImport());
person.setAdministrativeGenderCode(iso.CD(GENDER_MALE));
person.setBirthDate(iso.TSDateTime(TEST_BIRTH_DATE_ISO));
person.setEthnicGroupCode(iso.DSETCD(iso.CD(ETHNIC_CODE_NOT_REPORTED)));
person.setMaritalStatusCode(iso.CD(MARITAL_STATUS_SINGLE));
person.setName(iso.DSETENPN(iso.ENPN(iso.ENXP(TEST_FIRST_NAME,
EntityNamePartType.GIV), iso.ENXP(TEST_MID_NAME,
EntityNamePartType.GIV), iso.ENXP(TEST_LAST_NAME,
EntityNamePartType.FAM),iso.ENXP(TEST_NAME_PREFIX, EntityNamePartType.PFX),
iso.ENXP(TEST_NAME_SUFFIX, EntityNamePartType.SFX))));
person.setPostalAddress(iso.DSETAD(iso.AD(iso.ADXP(TEST_STREET_ADDRESS,
AddressPartType.SAL), iso.ADXP(TEST_CITY_NAME,
AddressPartType.CTY), iso.ADXP(TEST_STATE_CODE,
AddressPartType.STA), iso.ADXP(TEST_ZIP_CODE,
AddressPartType.ZIP), iso.ADXP(TEST_COUNTRY,
AddressPartType.CNT))));
person.setRaceCode(iso.DSETCD(iso.CD(RACE_WHITE)));
person.setTelecomAddress(iso.BAGTEL(iso.TEL(TEST_EMAIL_ADDR_ISO),
iso.TEL(TEST_PHONE_ISO), iso.TEL(TEST_FAX_ISO)));
return person;
}
private StudySubjectProtocolVersionRelationship createStudySubjectProtocolVersion(){
StudySubjectProtocolVersionRelationship studySubjectProtocolVersion = new StudySubjectProtocolVersionRelationship();
studySubjectProtocolVersion.setStudySiteProtocolVersion(new StudySiteProtocolVersionRelationship());
//setup study
studySubjectProtocolVersion.getStudySiteProtocolVersion().setStudyProtocolVersion(new StudyProtocolVersion());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().setStudyProtocolDocument(new StudyProtocolDocumentVersion());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setPublicTitle(iso.ST(TEST_LONGTITLE));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setOfficialTitle(iso.ST(TEST_SHORTTITLE));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setPublicDescription(iso.ST(TEST_DESC));
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().setDocument(new Document());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().getDocument().getDocumentIdentifier().add(createDocumentId());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudyProtocolVersion().getStudyProtocolDocument().getDocument().getDocumentIdentifier().add(createSystemDocumentId());
//setup studysite
studySubjectProtocolVersion.getStudySiteProtocolVersion().setStudySite(new edu.duke.cabig.c3pr.webservice.common.StudySite());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudySite().setOrganization(new Organization());
studySubjectProtocolVersion.getStudySiteProtocolVersion().getStudySite().getOrganization().getOrganizationIdentifier().add(createOrgId());
studySubjectProtocolVersion.getStudySubjectConsentVersion().addAll(getSubjectConsents());
return studySubjectProtocolVersion;
}
private boolean ifC3PRDefaultSystemIdentifierSent(List<BiologicEntityIdentifier> bioIdentifiers){
for(BiologicEntityIdentifier bioIdentifier:bioIdentifiers){
if(bioIdentifier.getTypeCode().getCodeSystemName() != null && bioIdentifier.getTypeCode().getCodeSystemName().equals(DEFAULT_SUBJECT_SYSTEM_ID_NAME)
&& bioIdentifier.getTypeCode().equals(DEFAULT_SUBJECT_SYSTEM_ID_TYPE)){
return true;
}
}
return false;
}
private void stripC3PRDefaultSystemIdentifier(List<BiologicEntityIdentifier> bioIdentifiers){
for(Iterator<BiologicEntityIdentifier> bioIdIterator = bioIdentifiers.iterator();bioIdIterator.hasNext();){
BiologicEntityIdentifier bioId = bioIdIterator.next();
if(bioId.getTypeCode().getCodeSystemName() != null && bioId.getTypeCode().getCodeSystemName().equals(DEFAULT_SUBJECT_SYSTEM_ID_NAME)
&& bioId.getTypeCode().getCode().equals(DEFAULT_SUBJECT_SYSTEM_ID_TYPE)){
bioIdIterator.remove();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* NormalizedString.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: SNAPSHOT Built on : Dec 21, 2007 (04:03:30 LKT)
*/
package org.apache.axis2.databinding.types.xsd;
/**
* NormalizedString bean class
*/
public class NormalizedString
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = normalizedString
Namespace URI = http://www.w3.org/2001/XMLSchema
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://www.w3.org/2001/XMLSchema")){
return "xsd";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for NormalizedString
*/
protected org.apache.axis2.databinding.types.NormalizedString localNormalizedString ;
/**
* Auto generated getter method
* @return org.apache.axis2.databinding.types.NormalizedString
*/
public org.apache.axis2.databinding.types.NormalizedString getNormalizedString(){
return localNormalizedString;
}
/**
* Auto generated setter method
* @param param NormalizedString
*/
public void setNormalizedString(org.apache.axis2.databinding.types.NormalizedString param){
this.localNormalizedString=param;
}
public java.lang.String toString(){
return localNormalizedString.toString();
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
NormalizedString.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://www.w3.org/2001/XMLSchema");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":normalizedString",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"normalizedString",
xmlWriter);
}
}
if (localNormalizedString==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("normalizedString cannot be null!!");
}else{
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localNormalizedString));
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT);
if (localNormalizedString != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localNormalizedString));
} else {
throw new org.apache.axis2.databinding.ADBException("normalizedString cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
public static NormalizedString fromString(java.lang.String value,
java.lang.String namespaceURI){
NormalizedString returnValue = new NormalizedString();
returnValue.setNormalizedString(
org.apache.axis2.databinding.utils.ConverterUtil.convertToNormalizedString(value));
return returnValue;
}
public static NormalizedString fromString(javax.xml.stream.XMLStreamReader xmlStreamReader,
java.lang.String content) {
if (content.indexOf(":") > -1){
java.lang.String prefix = content.substring(0,content.indexOf(":"));
java.lang.String namespaceUri = xmlStreamReader.getNamespaceContext().getNamespaceURI(prefix);
return NormalizedString.Factory.fromString(content,namespaceUri);
} else {
return NormalizedString.Factory.fromString(content,"");
}
}
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static NormalizedString parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
NormalizedString object =
new NormalizedString();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"normalizedString".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (NormalizedString)org.apache.axis2.databinding.types.xsd.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() || reader.hasText()){
if (reader.isStartElement() || reader.hasText()){
java.lang.String content = reader.getElementText();
object.setNormalizedString(
org.apache.axis2.databinding.utils.ConverterUtil.convertToNormalizedString(content));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Copyright (c) 2010-2015. Mogujie Inc. All Rights Reserved.
*/
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.mogujie.recsys.soa.idl;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2015-11-13")
public class User implements org.apache.thrift.TBase<User, User._Fields>, java.io.Serializable, Cloneable, Comparable<User> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("User");
private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I32, (short)1);
private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField AGE_FIELD_DESC = new org.apache.thrift.protocol.TField("age", org.apache.thrift.protocol.TType.I32, (short)3);
private static final org.apache.thrift.protocol.TField SEX_FIELD_DESC = new org.apache.thrift.protocol.TField("sex", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new UserStandardSchemeFactory());
schemes.put(TupleScheme.class, new UserTupleSchemeFactory());
}
public int id; // required
public String name; // optional
public int age; // optional
public String sex; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
ID((short)1, "id"),
NAME((short)2, "name"),
AGE((short)3, "age"),
SEX((short)4, "sex");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // ID
return ID;
case 2: // NAME
return NAME;
case 3: // AGE
return AGE;
case 4: // SEX
return SEX;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __ID_ISSET_ID = 0;
private static final int __AGE_ISSET_ID = 1;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.NAME,_Fields.AGE,_Fields.SEX};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.AGE, new org.apache.thrift.meta_data.FieldMetaData("age", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.SEX, new org.apache.thrift.meta_data.FieldMetaData("sex", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(User.class, metaDataMap);
}
public User() {
}
public User(
int id)
{
this();
this.id = id;
setIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public User(User other) {
__isset_bitfield = other.__isset_bitfield;
this.id = other.id;
if (other.isSetName()) {
this.name = other.name;
}
this.age = other.age;
if (other.isSetSex()) {
this.sex = other.sex;
}
}
public User deepCopy() {
return new User(this);
}
@Override
public void clear() {
setIdIsSet(false);
this.id = 0;
this.name = null;
setAgeIsSet(false);
this.age = 0;
this.sex = null;
}
public int getId() {
return this.id;
}
public User setId(int id) {
this.id = id;
setIdIsSet(true);
return this;
}
public void unsetId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID);
}
/** Returns true if field id is set (has been assigned a value) and false otherwise */
public boolean isSetId() {
return EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID);
}
public void setIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value);
}
public String getName() {
return this.name;
}
public User setName(String name) {
this.name = name;
return this;
}
public void unsetName() {
this.name = null;
}
/** Returns true if field name is set (has been assigned a value) and false otherwise */
public boolean isSetName() {
return this.name != null;
}
public void setNameIsSet(boolean value) {
if (!value) {
this.name = null;
}
}
public int getAge() {
return this.age;
}
public User setAge(int age) {
this.age = age;
setAgeIsSet(true);
return this;
}
public void unsetAge() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __AGE_ISSET_ID);
}
/** Returns true if field age is set (has been assigned a value) and false otherwise */
public boolean isSetAge() {
return EncodingUtils.testBit(__isset_bitfield, __AGE_ISSET_ID);
}
public void setAgeIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __AGE_ISSET_ID, value);
}
public String getSex() {
return this.sex;
}
public User setSex(String sex) {
this.sex = sex;
return this;
}
public void unsetSex() {
this.sex = null;
}
/** Returns true if field sex is set (has been assigned a value) and false otherwise */
public boolean isSetSex() {
return this.sex != null;
}
public void setSexIsSet(boolean value) {
if (!value) {
this.sex = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case ID:
if (value == null) {
unsetId();
} else {
setId((Integer)value);
}
break;
case NAME:
if (value == null) {
unsetName();
} else {
setName((String)value);
}
break;
case AGE:
if (value == null) {
unsetAge();
} else {
setAge((Integer)value);
}
break;
case SEX:
if (value == null) {
unsetSex();
} else {
setSex((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case ID:
return getId();
case NAME:
return getName();
case AGE:
return getAge();
case SEX:
return getSex();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case ID:
return isSetId();
case NAME:
return isSetName();
case AGE:
return isSetAge();
case SEX:
return isSetSex();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof User)
return this.equals((User)that);
return false;
}
public boolean equals(User that) {
if (that == null)
return false;
boolean this_present_id = true;
boolean that_present_id = true;
if (this_present_id || that_present_id) {
if (!(this_present_id && that_present_id))
return false;
if (this.id != that.id)
return false;
}
boolean this_present_name = true && this.isSetName();
boolean that_present_name = true && that.isSetName();
if (this_present_name || that_present_name) {
if (!(this_present_name && that_present_name))
return false;
if (!this.name.equals(that.name))
return false;
}
boolean this_present_age = true && this.isSetAge();
boolean that_present_age = true && that.isSetAge();
if (this_present_age || that_present_age) {
if (!(this_present_age && that_present_age))
return false;
if (this.age != that.age)
return false;
}
boolean this_present_sex = true && this.isSetSex();
boolean that_present_sex = true && that.isSetSex();
if (this_present_sex || that_present_sex) {
if (!(this_present_sex && that_present_sex))
return false;
if (!this.sex.equals(that.sex))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_id = true;
list.add(present_id);
if (present_id)
list.add(id);
boolean present_name = true && (isSetName());
list.add(present_name);
if (present_name)
list.add(name);
boolean present_age = true && (isSetAge());
list.add(present_age);
if (present_age)
list.add(age);
boolean present_sex = true && (isSetSex());
list.add(present_sex);
if (present_sex)
list.add(sex);
return list.hashCode();
}
@Override
public int compareTo(User other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetId()).compareTo(other.isSetId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetName()).compareTo(other.isSetName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetAge()).compareTo(other.isSetAge());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetAge()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.age, other.age);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetSex()).compareTo(other.isSetSex());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSex()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.sex, other.sex);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("User(");
boolean first = true;
sb.append("id:");
sb.append(this.id);
first = false;
if (isSetName()) {
if (!first) sb.append(", ");
sb.append("name:");
if (this.name == null) {
sb.append("null");
} else {
sb.append(this.name);
}
first = false;
}
if (isSetAge()) {
if (!first) sb.append(", ");
sb.append("age:");
sb.append(this.age);
first = false;
}
if (isSetSex()) {
if (!first) sb.append(", ");
sb.append("sex:");
if (this.sex == null) {
sb.append("null");
} else {
sb.append(this.sex);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws TException {
// check for required fields
// alas, we cannot check 'id' because it's a primitive and you chose the non-beans generator.
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (TException te) {
throw new java.io.IOException(te);
}
}
private static class UserStandardSchemeFactory implements SchemeFactory {
public UserStandardScheme getScheme() {
return new UserStandardScheme();
}
}
private static class UserStandardScheme extends StandardScheme<User> {
public void read(org.apache.thrift.protocol.TProtocol iprot, User struct) throws TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // ID
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.id = iprot.readI32();
struct.setIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // AGE
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.age = iprot.readI32();
struct.setAgeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // SEX
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.sex = iprot.readString();
struct.setSexIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
if (!struct.isSetId()) {
throw new TProtocolException("Required field 'id' was not found in serialized data! Struct: " + toString());
}
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, User struct) throws TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(ID_FIELD_DESC);
oprot.writeI32(struct.id);
oprot.writeFieldEnd();
if (struct.name != null) {
if (struct.isSetName()) {
oprot.writeFieldBegin(NAME_FIELD_DESC);
oprot.writeString(struct.name);
oprot.writeFieldEnd();
}
}
if (struct.isSetAge()) {
oprot.writeFieldBegin(AGE_FIELD_DESC);
oprot.writeI32(struct.age);
oprot.writeFieldEnd();
}
if (struct.sex != null) {
if (struct.isSetSex()) {
oprot.writeFieldBegin(SEX_FIELD_DESC);
oprot.writeString(struct.sex);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class UserTupleSchemeFactory implements SchemeFactory {
public UserTupleScheme getScheme() {
return new UserTupleScheme();
}
}
private static class UserTupleScheme extends TupleScheme<User> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, User struct) throws TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeI32(struct.id);
BitSet optionals = new BitSet();
if (struct.isSetName()) {
optionals.set(0);
}
if (struct.isSetAge()) {
optionals.set(1);
}
if (struct.isSetSex()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetName()) {
oprot.writeString(struct.name);
}
if (struct.isSetAge()) {
oprot.writeI32(struct.age);
}
if (struct.isSetSex()) {
oprot.writeString(struct.sex);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, User struct) throws TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.id = iprot.readI32();
struct.setIdIsSet(true);
BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
}
if (incoming.get(1)) {
struct.age = iprot.readI32();
struct.setAgeIsSet(true);
}
if (incoming.get(2)) {
struct.sex = iprot.readString();
struct.setSexIsSet(true);
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.raptor.legacy.systemtables;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.slice.Slice;
import io.prestosql.plugin.raptor.legacy.metadata.MetadataDao;
import io.prestosql.spi.PrestoException;
import io.prestosql.spi.connector.ColumnMetadata;
import io.prestosql.spi.connector.ConnectorTableMetadata;
import io.prestosql.spi.connector.RecordCursor;
import io.prestosql.spi.connector.SchemaTableName;
import io.prestosql.spi.predicate.Domain;
import io.prestosql.spi.predicate.TupleDomain;
import io.prestosql.spi.type.Type;
import org.skife.jdbi.v2.IDBI;
import org.skife.jdbi.v2.exceptions.DBIException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkPositionIndex;
import static com.google.common.base.Preconditions.checkState;
import static io.prestosql.plugin.raptor.legacy.RaptorColumnHandle.SHARD_UUID_COLUMN_TYPE;
import static io.prestosql.plugin.raptor.legacy.RaptorErrorCode.RAPTOR_CORRUPT_METADATA;
import static io.prestosql.plugin.raptor.legacy.metadata.DatabaseShardManager.maxColumn;
import static io.prestosql.plugin.raptor.legacy.metadata.DatabaseShardManager.minColumn;
import static io.prestosql.plugin.raptor.legacy.metadata.DatabaseShardManager.shardIndexTable;
import static io.prestosql.plugin.raptor.legacy.util.DatabaseUtil.metadataError;
import static io.prestosql.plugin.raptor.legacy.util.DatabaseUtil.onDemandDao;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.DateType.DATE;
import static io.prestosql.spi.type.TimestampType.TIMESTAMP_MILLIS;
import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType;
import static io.prestosql.spi.type.VarcharType.createVarcharType;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public class ShardMetadataRecordCursor
implements RecordCursor
{
private static final String SHARD_UUID = "shard_uuid";
private static final String XXHASH64 = "xxhash64";
private static final String SCHEMA_NAME = "table_schema";
private static final String TABLE_NAME = "table_name";
private static final String MIN_TIMESTAMP = "min_timestamp";
private static final String MAX_TIMESTAMP = "max_timestamp";
private static final String MIN_DATE = "min_date";
private static final String MAX_DATE = "max_date";
public static final SchemaTableName SHARD_METADATA_TABLE_NAME = new SchemaTableName("system", "shards");
public static final ConnectorTableMetadata SHARD_METADATA = new ConnectorTableMetadata(
SHARD_METADATA_TABLE_NAME,
ImmutableList.of(
new ColumnMetadata(SCHEMA_NAME, createUnboundedVarcharType()),
new ColumnMetadata(TABLE_NAME, createUnboundedVarcharType()),
new ColumnMetadata(SHARD_UUID, SHARD_UUID_COLUMN_TYPE),
new ColumnMetadata("bucket_number", BIGINT),
new ColumnMetadata("uncompressed_size", BIGINT),
new ColumnMetadata("compressed_size", BIGINT),
new ColumnMetadata("row_count", BIGINT),
new ColumnMetadata(XXHASH64, createVarcharType(16)),
new ColumnMetadata(MIN_TIMESTAMP, TIMESTAMP_MILLIS),
new ColumnMetadata(MAX_TIMESTAMP, TIMESTAMP_MILLIS),
new ColumnMetadata(MIN_DATE, DATE),
new ColumnMetadata(MAX_DATE, DATE)));
private static final List<ColumnMetadata> COLUMNS = SHARD_METADATA.getColumns();
private static final List<Type> TYPES = COLUMNS.stream().map(ColumnMetadata::getType).collect(toList());
private final IDBI dbi;
private final MetadataDao metadataDao;
private final Iterator<Long> tableIds;
private final List<String> columnNames;
private final TupleDomain<Integer> tupleDomain;
private ResultSet resultSet;
private Connection connection;
private PreparedStatement statement;
private final ResultSetValues resultSetValues;
private boolean closed;
private long completedBytes;
public ShardMetadataRecordCursor(IDBI dbi, TupleDomain<Integer> tupleDomain)
{
this.dbi = requireNonNull(dbi, "dbi is null");
this.metadataDao = onDemandDao(dbi, MetadataDao.class);
this.tupleDomain = requireNonNull(tupleDomain, "tupleDomain is null");
this.tableIds = getTableIds(dbi, tupleDomain);
this.columnNames = createQualifiedColumnNames();
this.resultSetValues = new ResultSetValues(TYPES);
this.resultSet = getNextResultSet();
}
private static String constructSqlTemplate(List<String> columnNames, long tableId)
{
return format("SELECT %s\nFROM %s x\n" +
"JOIN shards ON (x.shard_id = shards.shard_id AND shards.table_id = %s)\n" +
"JOIN tables ON (tables.table_id = %s)\n",
Joiner.on(", ").join(columnNames),
shardIndexTable(tableId),
tableId,
tableId);
}
private static List<String> createQualifiedColumnNames()
{
return ImmutableList.<String>builder()
.add("tables.schema_name")
.add("tables.table_name")
.add("shards." + COLUMNS.get(2).getName())
.add("shards." + COLUMNS.get(3).getName())
.add("shards." + COLUMNS.get(4).getName())
.add("shards." + COLUMNS.get(5).getName())
.add("shards." + COLUMNS.get(6).getName())
.add("shards." + COLUMNS.get(7).getName())
.add(MIN_TIMESTAMP)
.add(MAX_TIMESTAMP)
.add(MIN_DATE)
.add(MAX_DATE)
.build();
}
@Override
public long getCompletedBytes()
{
return completedBytes;
}
@Override
public long getReadTimeNanos()
{
return 0;
}
@Override
public Type getType(int field)
{
checkPositionIndex(field, TYPES.size());
return TYPES.get(field);
}
@Override
public boolean advanceNextPosition()
{
if (resultSet == null) {
close();
}
if (closed) {
return false;
}
try {
while (!resultSet.next()) {
resultSet = getNextResultSet();
if (resultSet == null) {
close();
return false;
}
}
completedBytes += resultSetValues.extractValues(
resultSet,
ImmutableSet.of(getColumnIndex(SHARD_METADATA, SHARD_UUID)),
ImmutableSet.of(getColumnIndex(SHARD_METADATA, XXHASH64)));
return true;
}
catch (SQLException | DBIException e) {
throw metadataError(e);
}
}
@Override
public boolean getBoolean(int field)
{
checkFieldType(field, boolean.class);
return resultSetValues.getBoolean(field);
}
@Override
public long getLong(int field)
{
checkFieldType(field, long.class);
return resultSetValues.getLong(field);
}
@Override
public double getDouble(int field)
{
checkFieldType(field, double.class);
return resultSetValues.getDouble(field);
}
@Override
public Slice getSlice(int field)
{
checkFieldType(field, Slice.class);
return resultSetValues.getSlice(field);
}
@Override
public Object getObject(int field)
{
throw new UnsupportedOperationException();
}
@Override
public boolean isNull(int field)
{
checkState(!closed, "cursor is closed");
checkPositionIndex(field, TYPES.size());
return resultSetValues.isNull(field);
}
@Override
public void close()
{
closed = true;
closeCurrentResultSet();
}
@SuppressWarnings("unused")
private void closeCurrentResultSet()
{
// use try-with-resources to close everything properly
//noinspection EmptyTryBlock
try (Connection connection = this.connection;
Statement statement = this.statement;
ResultSet resultSet = this.resultSet) {
// do nothing
}
catch (SQLException ignored) {
}
}
private ResultSet getNextResultSet()
{
closeCurrentResultSet();
if (!tableIds.hasNext()) {
return null;
}
Long tableId = tableIds.next();
Long columnId = metadataDao.getTemporalColumnId(tableId);
List<String> columnNames;
if (columnId == null) {
columnNames = getMappedColumnNames("null", "null", "null", "null");
}
else {
Type temporalType = metadataDao.getTableColumn(tableId, columnId).getDataType();
if (temporalType.equals(DATE)) {
columnNames = getMappedColumnNames("null", "null", minColumn(columnId), maxColumn(columnId));
}
else if (temporalType.equals(TIMESTAMP_MILLIS)) {
columnNames = getMappedColumnNames(minColumn(columnId), maxColumn(columnId), "null", "null");
}
else {
throw new PrestoException(RAPTOR_CORRUPT_METADATA, "Temporal column should be of type date or timestamp, not " + temporalType.getDisplayName());
}
}
try {
connection = dbi.open().getConnection();
statement = PreparedStatementBuilder.create(
connection,
constructSqlTemplate(columnNames, tableId),
columnNames,
TYPES,
ImmutableSet.of(getColumnIndex(SHARD_METADATA, SHARD_UUID)),
tupleDomain);
return statement.executeQuery();
}
catch (SQLException | DBIException e) {
close();
throw metadataError(e);
}
}
private List<String> getMappedColumnNames(String minTimestampColumn, String maxTimestampColumn, String minDateColumn, String maxDateColumn)
{
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (String column : columnNames) {
switch (column) {
case MIN_TIMESTAMP:
builder.add(minTimestampColumn);
break;
case MAX_TIMESTAMP:
builder.add(maxTimestampColumn);
break;
case MIN_DATE:
builder.add(minDateColumn);
break;
case MAX_DATE:
builder.add(maxDateColumn);
break;
default:
builder.add(column);
break;
}
}
return builder.build();
}
@VisibleForTesting
static Iterator<Long> getTableIds(IDBI dbi, TupleDomain<Integer> tupleDomain)
{
Map<Integer, Domain> domains = tupleDomain.getDomains().get();
Domain schemaNameDomain = domains.get(getColumnIndex(SHARD_METADATA, SCHEMA_NAME));
Domain tableNameDomain = domains.get(getColumnIndex(SHARD_METADATA, TABLE_NAME));
List<String> values = new ArrayList<>();
StringBuilder sql = new StringBuilder("SELECT table_id FROM tables ");
if (schemaNameDomain != null || tableNameDomain != null) {
sql.append("WHERE ");
List<String> predicates = new ArrayList<>();
if (tableNameDomain != null && tableNameDomain.isSingleValue()) {
predicates.add("table_name = ?");
values.add(getStringValue(tableNameDomain.getSingleValue()));
}
if (schemaNameDomain != null && schemaNameDomain.isSingleValue()) {
predicates.add("schema_name = ?");
values.add(getStringValue(schemaNameDomain.getSingleValue()));
}
sql.append(Joiner.on(" AND ").join(predicates));
}
ImmutableList.Builder<Long> tableIds = ImmutableList.builder();
try (Connection connection = dbi.open().getConnection();
PreparedStatement statement = connection.prepareStatement(sql.toString())) {
for (int i = 0; i < values.size(); i++) {
statement.setString(i + 1, values.get(i));
}
try (ResultSet resultSet = statement.executeQuery()) {
while (resultSet.next()) {
tableIds.add(resultSet.getLong("table_id"));
}
}
}
catch (SQLException | DBIException e) {
throw metadataError(e);
}
return tableIds.build().iterator();
}
private static int getColumnIndex(ConnectorTableMetadata tableMetadata, String columnName)
{
List<ColumnMetadata> columns = tableMetadata.getColumns();
for (int i = 0; i < columns.size(); i++) {
if (columns.get(i).getName().equals(columnName)) {
return i;
}
}
throw new IllegalArgumentException(format("Column '%s' not found", columnName));
}
private void checkFieldType(int field, Class<?> clazz)
{
checkState(!closed, "cursor is closed");
Type type = getType(field);
checkArgument(type.getJavaType() == clazz, "Type %s cannot be read as %s", type, clazz.getSimpleName());
}
private static String getStringValue(Object value)
{
return ((Slice) value).toStringUtf8();
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.cluster;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.StatusLine;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.client.AuthCache;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.protocol.HttpContext;
import org.h2.util.IOUtils;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.utils.TestUtils;
import org.pentaho.di.www.GetPropertiesServlet;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyMapOf;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
/**
* Tests for SlaveServer class
*
* @author Pavel Sakun
* @see SlaveServer
*/
public class SlaveServerTest {
SlaveServer slaveServer;
@BeforeClass
public static void beforeClass() throws KettleException {
PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() );
PluginRegistry.init();
String passwordEncoderPluginID =
Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_PASSWORD_ENCODER_PLUGIN ), "Kettle" );
Encr.init( passwordEncoderPluginID );
}
@AfterClass
public static void tearDown() {
PluginRegistry.getInstance().reset();
}
@Before
public void init() throws IOException {
SlaveConnectionManager connectionManager = SlaveConnectionManager.getInstance();
HttpClient httpClient = spy( connectionManager.createHttpClient() );
// mock response
CloseableHttpResponse closeableHttpResponseMock = mock( CloseableHttpResponse.class );
// mock status line
StatusLine statusLineMock = mock( StatusLine.class );
doReturn( HttpStatus.SC_NOT_FOUND ).when( statusLineMock ).getStatusCode();
doReturn( statusLineMock ).when( closeableHttpResponseMock ).getStatusLine();
// mock entity
HttpEntity httpEntityMock = mock( HttpEntity.class );
doReturn( httpEntityMock ).when( closeableHttpResponseMock ).getEntity();
doReturn( closeableHttpResponseMock ).when( httpClient ).execute( any( HttpGet.class ) );
doReturn( closeableHttpResponseMock ).when( httpClient ).execute( any( HttpPost.class ) );
doReturn( closeableHttpResponseMock ).when( httpClient ).execute( any( HttpPost.class ), any( HttpClientContext.class ) );
slaveServer = spy( new SlaveServer() );
doReturn( httpClient ).when( slaveServer ).getHttpClient();
doReturn( "response_body" ).when( slaveServer ).getResponseBodyAsString( any( InputStream.class ) );
}
private HttpResponse mockResponse( int statusCode, String entityText ) throws IOException {
HttpResponse resp = mock( HttpResponse.class );
StatusLine status = mock( StatusLine.class );
when( status.getStatusCode() ).thenReturn( statusCode );
when( resp.getStatusLine() ).thenReturn( status );
HttpEntity entity = mock( HttpEntity.class );
when( entity.getContent() ).thenReturn( IOUtils.getInputStream( entityText ) );
when( resp.getEntity() ).thenReturn( entity );
return resp;
}
@Test( expected = KettleException.class )
public void testExecService() throws Exception {
HttpGet httpGetMock = mock( HttpGet.class );
URI uriMock = new URI( "fake" );
doReturn( uriMock ).when( httpGetMock ).getURI();
doReturn( httpGetMock ).when( slaveServer ).buildExecuteServiceMethod( anyString(), anyMapOf( String.class,
String.class ) );
slaveServer.setHostname( "hostNameStub" );
slaveServer.setUsername( "userNAmeStub" );
slaveServer.execService( "wrong_app_name" );
fail( "Incorrect connection details had been used, but no exception was thrown" );
}
@Test( expected = KettleException.class )
public void testSendXML() throws Exception {
slaveServer.setHostname( "hostNameStub" );
slaveServer.setUsername( "userNAmeStub" );
HttpPost httpPostMock = mock( HttpPost.class );
URI uriMock = new URI( "fake" );
doReturn( uriMock ).when( httpPostMock ).getURI();
doReturn( httpPostMock ).when( slaveServer ).buildSendXMLMethod( any( byte[].class ), anyString() );
slaveServer.sendXML( "", "" );
fail( "Incorrect connection details had been used, but no exception was thrown" );
}
@Test( expected = KettleException.class )
public void testSendExport() throws Exception {
slaveServer.setHostname( "hostNameStub" );
slaveServer.setUsername( "userNAmeStub" );
HttpPost httpPostMock = mock( HttpPost.class );
URI uriMock = new URI( "fake" );
doReturn( uriMock ).when( httpPostMock ).getURI();
doReturn( httpPostMock ).when( slaveServer ).buildSendExportMethod( anyString(), anyString(), any(
InputStream.class ) );
File tempFile;
tempFile = File.createTempFile( "PDI-", "tmp" );
tempFile.deleteOnExit();
slaveServer.sendExport( tempFile.getAbsolutePath(), "", "" );
fail( "Incorrect connection details had been used, but no exception was thrown" );
}
@Test
public void testSendExportOk() throws Exception {
slaveServer.setUsername( "uname" );
slaveServer.setPassword( "passw" );
slaveServer.setHostname( "hname" );
slaveServer.setPort( "1111" );
HttpPost httpPostMock = mock( HttpPost.class );
URI uriMock = new URI( "fake" );
final String responseContent = "baah";
when( httpPostMock.getURI() ).thenReturn( uriMock );
doReturn( uriMock ).when( httpPostMock ).getURI();
HttpClient client = mock( HttpClient.class );
when( client.execute( any(), any( HttpContext.class ) ) ).then( new Answer<HttpResponse>() {
@Override
public HttpResponse answer( InvocationOnMock invocation ) throws Throwable {
HttpClientContext context = invocation.getArgumentAt( 1, HttpClientContext.class );
Credentials cred = context.getCredentialsProvider().getCredentials( new AuthScope( "hname", 1111 ) );
assertEquals( "uname", cred.getUserPrincipal().getName() );
return mockResponse( 200, responseContent );
}
} );
// override init
when( slaveServer.getHttpClient() ).thenReturn( client );
when( slaveServer.getResponseBodyAsString( any() ) ).thenCallRealMethod();
doReturn( httpPostMock ).when( slaveServer ).buildSendExportMethod( anyString(), anyString(), any(
InputStream.class ) );
File tempFile;
tempFile = File.createTempFile( "PDI-", "tmp" );
tempFile.deleteOnExit();
String result = slaveServer.sendExport( tempFile.getAbsolutePath(), null, null );
assertEquals( responseContent, result );
}
@Test
public void testAddCredentials() throws IOException, ClassNotFoundException {
String testUser = "test_username";
slaveServer.setUsername( testUser );
String testPassword = "test_password";
slaveServer.setPassword( testPassword );
String host = "somehost";
slaveServer.setHostname( host );
int port = 1000;
slaveServer.setPort( "" + port );
HttpClientContext auth = slaveServer.getAuthContext();
Credentials cred = auth.getCredentialsProvider().getCredentials( new AuthScope( host, port ) );
assertEquals( testUser, cred.getUserPrincipal().getName() );
assertEquals( testPassword, cred.getPassword() );
String user2 = "user2";
slaveServer.setUsername( user2 );
slaveServer.setPassword( "pass2" );
auth = slaveServer.getAuthContext();
cred = auth.getCredentialsProvider().getCredentials( new AuthScope( host, port ) );
assertEquals( user2, cred.getUserPrincipal().getName() );
}
@Test
public void testAuthCredentialsSchemeWithSSL() {
slaveServer.setUsername( "admin" );
slaveServer.setPassword( "password" );
slaveServer.setHostname( "localhost" );
slaveServer.setPort( "8443" );
slaveServer.setSslMode( true );
AuthCache cache = slaveServer.getAuthContext().getAuthCache();
assertNotNull( cache.get( new HttpHost( "localhost", 8443, "https" ) ) );
assertNull( cache.get( new HttpHost( "localhost", 8443, "http" ) ) );
}
@Test
public void testAuthCredentialsSchemeWithoutSSL() {
slaveServer.setUsername( "admin" );
slaveServer.setPassword( "password" );
slaveServer.setHostname( "localhost" );
slaveServer.setPort( "8080" );
slaveServer.setSslMode( false );
AuthCache cache = slaveServer.getAuthContext().getAuthCache();
assertNull( cache.get( new HttpHost( "localhost", 8080, "https" ) ) );
assertNotNull( cache.get( new HttpHost( "localhost", 8080, "http" ) ) );
}
@Test
public void testModifyingName() {
slaveServer.setName( "test" );
List<SlaveServer> list = new ArrayList<SlaveServer>();
list.add( slaveServer );
SlaveServer slaveServer2 = spy( new SlaveServer() );
slaveServer2.setName( "test" );
slaveServer2.verifyAndModifySlaveServerName( list, null );
assertTrue( !slaveServer.getName().equals( slaveServer2.getName() ) );
}
@Test
public void testEqualsHashCodeConsistency() throws Exception {
SlaveServer slave = new SlaveServer();
slave.setName( "slave" );
TestUtils.checkEqualsHashCodeConsistency( slave, slave );
SlaveServer slaveSame = new SlaveServer();
slaveSame.setName( "slave" );
assertTrue( slave.equals( slaveSame ) );
TestUtils.checkEqualsHashCodeConsistency( slave, slaveSame );
SlaveServer slaveCaps = new SlaveServer();
slaveCaps.setName( "SLAVE" );
TestUtils.checkEqualsHashCodeConsistency( slave, slaveCaps );
SlaveServer slaveOther = new SlaveServer();
slaveOther.setName( "something else" );
TestUtils.checkEqualsHashCodeConsistency( slave, slaveOther );
}
@Test
public void testGetKettleProperties() throws Exception {
String encryptedResponse = "3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e6"
+ "73d225554462d38223f3e0a3c21444f43545950452070726f706572"
+ "746965730a202053595354454d2022687474703a2f2f6a6176612e737"
+ "56e2e636f6d2f6474642f70726f706572746965732e647464223e0a3c"
+ "70726f706572746965733e0a2020203c636f6d6d656e743e3c2f636f6d6d6"
+ "56e743e0a2020203c656e747279206b65793d224167696c6542494461746162"
+ "617365223e4167696c6542493c2f656e7470c7a6a5f445d7808bbb1cbc64d797bc84";
doReturn( encryptedResponse ).when( slaveServer ).execService( GetPropertiesServlet.CONTEXT_PATH + "/?xml=Y" );
slaveServer.getKettleProperties().getProperty( "AgileBIDatabase" );
assertEquals( "AgileBI", slaveServer.getKettleProperties().getProperty( "AgileBIDatabase" ) );
}
}
| |
/*
* Copyright 2013-2015 JIWHIZ Consulting Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jiwhiz.rest.site;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import javax.inject.Inject;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.hateoas.MediaTypes;
import com.jiwhiz.domain.post.BlogPost;
import com.jiwhiz.domain.post.BlogPostRepository;
import com.jiwhiz.domain.post.CommentPost;
import com.jiwhiz.domain.post.CommentPostRepository;
import com.jiwhiz.domain.post.CommentStatusType;
import com.jiwhiz.rest.AbstractRestControllerTest;
import com.jiwhiz.rest.ApiUrls;
/**
* @author Yuan Ji
*/
public class PublicBlogRestControllerTest extends AbstractRestControllerTest {
@Inject
BlogPostRepository blogPostRepositoryMock;
@Inject
CommentPostRepository commentPostRepositoryMock;
@Before
public void setup() {
Mockito.reset(blogPostRepositoryMock);
Mockito.reset(commentPostRepositoryMock);
super.setup();
}
@Test
public void getPublicBlogPosts_ShouldReturnAllPublicBlogPosts() throws Exception {
Pageable pageable = new PageRequest(0, 10);
when(blogPostRepositoryMock.findByPublishedIsTrueOrderByPublishedTimeDesc(any(Pageable.class)))
.thenReturn(new PageImpl<BlogPost>(getTestPublishedBlogPostList(), pageable, 2));
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$._embedded.blogPostList", hasSize(2)))
.andExpect(jsonPath("$._embedded.blogPostList[0].id", is(BLOGS_1_ID)))
.andExpect(jsonPath("$._embedded.blogPostList[0].authorId", is(BLOGS_1_AUTHOR_ID)))
.andExpect(jsonPath("$._embedded.blogPostList[0].title", is(BLOGS_1_TITLE)))
.andExpect(jsonPath("$._embedded.blogPostList[1].id", is(BLOGS_2_ID)))
.andExpect(jsonPath("$._embedded.blogPostList[1].authorId", is(BLOGS_2_AUTHOR_ID)))
.andExpect(jsonPath("$._embedded.blogPostList[1].title", is(BLOGS_2_TITLE)))
//check links
.andExpect(jsonPath("$._links.self.templated", is(true)))
.andExpect(jsonPath("$._links.self.href", endsWith("public/blogs{?page,size,sort}")))
//check page
.andExpect(jsonPath("$.page.size", is(10)))
.andExpect(jsonPath("$.page.totalElements", is(2)))
.andExpect(jsonPath("$.page.totalPages", is(1)))
.andExpect(jsonPath("$.page.number", is(0)))
;
verify(blogPostRepositoryMock, times(1)).findByPublishedIsTrueOrderByPublishedTimeDesc(pageable);
verifyNoMoreInteractions(blogPostRepositoryMock);
}
@Test
public void getPublicBlogPostById_ShouldReturnBlogPost() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG, BLOG_ID))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$.id", is(BLOG_ID)))
.andExpect(jsonPath("$.authorId", is(BLOG_AUTHOR_ID)))
.andExpect(jsonPath("$.title", is(BLOG_TITLE)))
.andExpect(jsonPath("$._links.self.href", endsWith(BLOG_ID)))
.andExpect(jsonPath("$._links.comments.templated", is(true)))
.andExpect(jsonPath("$._links.comments.href", endsWith(ApiUrls.URL_SITE_BLOGS+"/"+BLOG_ID+"/comments{?page,size,sort}")))
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
}
@Test
public void getPublicBlogPost_ShouldReturnHttpStatusCode404ForUnpublishedBlog() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
blog.setPublished(false);
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG, BLOG_ID))
.andExpect(status().isNotFound())
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
}
@Test
public void getBlogApprovedCommentPosts_ShouldReturnApprovedComments() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
Page<CommentPost> page = new PageImpl<CommentPost>(getTestApprovedCommentPostList(), new PageRequest(0, 10), 2);
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
when(commentPostRepositoryMock.findByBlogPostIdAndStatusOrderByCreatedTimeAsc(
eq(BLOG_ID), eq(CommentStatusType.APPROVED), any(Pageable.class)))
.thenReturn(page);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG_COMMENTS, BLOG_ID))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$._embedded.commentPostList", hasSize(2)))
.andExpect(jsonPath("$._embedded.commentPostList[0].id", is(COMMENTS_1_ID)))
.andExpect(jsonPath("$._embedded.commentPostList[0].blogPostId", is(BLOG_ID)))
.andExpect(jsonPath("$._embedded.commentPostList[0].authorId", is(COMMENTS_1_AUTHOR_ID)))
.andExpect(jsonPath("$._embedded.commentPostList[0].content", is (COMMENTS_1_CONTENT)))
.andExpect(jsonPath("$._embedded.commentPostList[1].id", is(COMMENTS_2_ID)))
.andExpect(jsonPath("$._embedded.commentPostList[1].blogPostId", is(BLOG_ID)))
.andExpect(jsonPath("$._embedded.commentPostList[1].authorId", is(COMMENTS_2_AUTHOR_ID)))
.andExpect(jsonPath("$._embedded.commentPostList[1].content", is(COMMENTS_2_CONTENT)))
.andExpect(jsonPath("$._links.self.templated", is(true)))
.andExpect(jsonPath("$._links.self.href", endsWith("/comments{?page,size,sort}")))
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
verify(commentPostRepositoryMock, times(1)).findByBlogPostIdAndStatusOrderByCreatedTimeAsc(
eq(BLOG_ID), eq(CommentStatusType.APPROVED), any(Pageable.class));
verifyNoMoreInteractions(commentPostRepositoryMock);
}
@Test
public void getBlogApprovedCommentPostById_ShouldReturnApprovedComment() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
CommentPost comment = getTestApprovedCommentPost();
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
when(commentPostRepositoryMock.findOne(COMMENT_ID)).thenReturn(comment);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG_COMMENTS_COMMENT, BLOG_ID, COMMENT_ID))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$.id", is(COMMENT_ID)))
.andExpect(jsonPath("$.authorId", is(COMMENT_AUTHOR_ID)))
.andExpect(jsonPath("$.content", is(COMMENT_CONTENT)))
.andExpect(jsonPath("$.blogPostId", is(BLOG_ID)))
.andExpect(jsonPath("$.status", is(CommentStatusType.APPROVED.name())))
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
verify(commentPostRepositoryMock, times(1)).findOne(COMMENT_ID);
verifyNoMoreInteractions(commentPostRepositoryMock);
}
@Test
public void getBlogApprovedCommentPostById_ShouldReturn404IfNoSuchComment() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
when(commentPostRepositoryMock.findOne(COMMENT_ID)).thenReturn(null);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG_COMMENTS_COMMENT, BLOG_ID, COMMENT_ID))
.andExpect(status().isNotFound())
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
verify(commentPostRepositoryMock, times(1)).findOne(COMMENT_ID);
verifyNoMoreInteractions(commentPostRepositoryMock);
}
@Test
public void getBlogApprovedCommentPostById_ShouldReturn404IfCommentIsNotForBlog() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
CommentPost comment = getTestApprovedCommentPost();
comment.setBlogPostId("Other");
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
when(commentPostRepositoryMock.findOne(COMMENT_ID)).thenReturn(comment);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG_COMMENTS_COMMENT, BLOG_ID, COMMENT_ID))
.andExpect(status().isNotFound())
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
verify(commentPostRepositoryMock, times(1)).findOne(COMMENT_ID);
verifyNoMoreInteractions(commentPostRepositoryMock);
}
@Test
public void getBlogApprovedCommentPostById_ShouldReturn404IfCommentIsNotApproved() throws Exception {
BlogPost blog = getTestSinglePublishedBlogPost();
CommentPost comment = getTestApprovedCommentPost();
comment.setStatus(CommentStatusType.PENDING);
when(blogPostRepositoryMock.findOne(BLOG_ID)).thenReturn(blog);
when(commentPostRepositoryMock.findOne(COMMENT_ID)).thenReturn(comment);
mockMvc.perform(get(ApiUrls.API_ROOT + ApiUrls.URL_SITE_BLOGS_BLOG_COMMENTS_COMMENT, BLOG_ID, COMMENT_ID))
.andExpect(status().isNotFound())
;
verify(blogPostRepositoryMock, times(1)).findOne(BLOG_ID);
verifyNoMoreInteractions(blogPostRepositoryMock);
verify(commentPostRepositoryMock, times(1)).findOne(COMMENT_ID);
verifyNoMoreInteractions(commentPostRepositoryMock);
}
}
| |
// Generated from ./src/main/java/com/metadave/breeze/parser/Breeze.g4 by ANTLR 4.0
package com.metadave.breeze.parser;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class BreezeParser extends Parser {
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
LSQUARE=1, RSQUARE=2, LPAREN=3, RPAREN=4, COMMA=5, EQUALS=6, BAR=7, QUESTION=8,
SPLAT=9, ATTS=10, TYPEID=11, CONID=12, INT=13, WS=14;
public static final String[] tokenNames = {
"<INVALID>", "'['", "']'", "'('", "')'", "','", "'='", "'|'", "'?'", "'*'",
"'attributes'", "TYPEID", "CONID", "INT", "WS"
};
public static final int
RULE_asd = 0, RULE_definition = 1, RULE_type = 2, RULE_product_type = 3,
RULE_sum_type = 4, RULE_atts = 5, RULE_constructor = 6, RULE_fields = 7,
RULE_field = 8, RULE_id = 9;
public static final String[] ruleNames = {
"asd", "definition", "type", "product_type", "sum_type", "atts", "constructor",
"fields", "field", "id"
};
@Override
public String getGrammarFileName() { return "Breeze.g4"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
public BreezeParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class AsdContext extends ParserRuleContext {
public List<DefinitionContext> definition() {
return getRuleContexts(DefinitionContext.class);
}
public DefinitionContext definition(int i) {
return getRuleContext(DefinitionContext.class,i);
}
public AsdContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_asd; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterAsd(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitAsd(this);
}
}
public final AsdContext asd() throws RecognitionException {
AsdContext _localctx = new AsdContext(_ctx, getState());
enterRule(_localctx, 0, RULE_asd);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(21);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
{
setState(20); definition();
}
}
setState(23);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( _la==TYPEID );
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class DefinitionContext extends ParserRuleContext {
public TerminalNode EQUALS() { return getToken(BreezeParser.EQUALS, 0); }
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode TYPEID() { return getToken(BreezeParser.TYPEID, 0); }
public DefinitionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_definition; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterDefinition(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitDefinition(this);
}
}
public final DefinitionContext definition() throws RecognitionException {
DefinitionContext _localctx = new DefinitionContext(_ctx, getState());
enterRule(_localctx, 2, RULE_definition);
try {
enterOuterAlt(_localctx, 1);
{
setState(25); match(TYPEID);
setState(26); match(EQUALS);
setState(27); type();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class TypeContext extends ParserRuleContext {
public Product_typeContext product_type() {
return getRuleContext(Product_typeContext.class,0);
}
public Sum_typeContext sum_type() {
return getRuleContext(Sum_typeContext.class,0);
}
public TypeContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_type; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterType(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitType(this);
}
}
public final TypeContext type() throws RecognitionException {
TypeContext _localctx = new TypeContext(_ctx, getState());
enterRule(_localctx, 4, RULE_type);
try {
setState(31);
switch (_input.LA(1)) {
case CONID:
enterOuterAlt(_localctx, 1);
{
setState(29); sum_type();
}
break;
case LPAREN:
enterOuterAlt(_localctx, 2);
{
setState(30); product_type();
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class Product_typeContext extends ParserRuleContext {
public FieldsContext fields() {
return getRuleContext(FieldsContext.class,0);
}
public Product_typeContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_product_type; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterProduct_type(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitProduct_type(this);
}
}
public final Product_typeContext product_type() throws RecognitionException {
Product_typeContext _localctx = new Product_typeContext(_ctx, getState());
enterRule(_localctx, 6, RULE_product_type);
try {
enterOuterAlt(_localctx, 1);
{
setState(33); fields();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class Sum_typeContext extends ParserRuleContext {
public ConstructorContext constructor;
public List<ConstructorContext> ctor = new ArrayList<ConstructorContext>();
public ConstructorContext constructor(int i) {
return getRuleContext(ConstructorContext.class,i);
}
public TerminalNode BAR(int i) {
return getToken(BreezeParser.BAR, i);
}
public AttsContext atts() {
return getRuleContext(AttsContext.class,0);
}
public List<TerminalNode> BAR() { return getTokens(BreezeParser.BAR); }
public List<ConstructorContext> constructor() {
return getRuleContexts(ConstructorContext.class);
}
public Sum_typeContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_sum_type; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterSum_type(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitSum_type(this);
}
}
public final Sum_typeContext sum_type() throws RecognitionException {
Sum_typeContext _localctx = new Sum_typeContext(_ctx, getState());
enterRule(_localctx, 8, RULE_sum_type);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(35); ((Sum_typeContext)_localctx).constructor = constructor();
((Sum_typeContext)_localctx).ctor.add(((Sum_typeContext)_localctx).constructor);
setState(40);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==BAR) {
{
{
setState(36); match(BAR);
setState(37); ((Sum_typeContext)_localctx).constructor = constructor();
((Sum_typeContext)_localctx).ctor.add(((Sum_typeContext)_localctx).constructor);
}
}
setState(42);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(44);
_la = _input.LA(1);
if (_la==ATTS) {
{
setState(43); atts();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class AttsContext extends ParserRuleContext {
public FieldsContext fields() {
return getRuleContext(FieldsContext.class,0);
}
public TerminalNode ATTS() { return getToken(BreezeParser.ATTS, 0); }
public AttsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_atts; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterAtts(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitAtts(this);
}
}
public final AttsContext atts() throws RecognitionException {
AttsContext _localctx = new AttsContext(_ctx, getState());
enterRule(_localctx, 10, RULE_atts);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(46); match(ATTS);
setState(48);
_la = _input.LA(1);
if (_la==LPAREN) {
{
setState(47); fields();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ConstructorContext extends ParserRuleContext {
public TerminalNode CONID() { return getToken(BreezeParser.CONID, 0); }
public FieldsContext fields() {
return getRuleContext(FieldsContext.class,0);
}
public ConstructorContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_constructor; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterConstructor(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitConstructor(this);
}
}
public final ConstructorContext constructor() throws RecognitionException {
ConstructorContext _localctx = new ConstructorContext(_ctx, getState());
enterRule(_localctx, 12, RULE_constructor);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(50); match(CONID);
setState(52);
_la = _input.LA(1);
if (_la==LPAREN) {
{
setState(51); fields();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FieldsContext extends ParserRuleContext {
public FieldContext field;
public List<FieldContext> fs = new ArrayList<FieldContext>();
public List<FieldContext> field() {
return getRuleContexts(FieldContext.class);
}
public TerminalNode RPAREN() { return getToken(BreezeParser.RPAREN, 0); }
public TerminalNode COMMA(int i) {
return getToken(BreezeParser.COMMA, i);
}
public FieldContext field(int i) {
return getRuleContext(FieldContext.class,i);
}
public List<TerminalNode> COMMA() { return getTokens(BreezeParser.COMMA); }
public TerminalNode LPAREN() { return getToken(BreezeParser.LPAREN, 0); }
public FieldsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_fields; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterFields(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitFields(this);
}
}
public final FieldsContext fields() throws RecognitionException {
FieldsContext _localctx = new FieldsContext(_ctx, getState());
enterRule(_localctx, 14, RULE_fields);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(54); match(LPAREN);
setState(55); ((FieldsContext)_localctx).field = field();
((FieldsContext)_localctx).fs.add(((FieldsContext)_localctx).field);
setState(60);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(56); match(COMMA);
setState(57); ((FieldsContext)_localctx).field = field();
((FieldsContext)_localctx).fs.add(((FieldsContext)_localctx).field);
}
}
setState(62);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(63); match(RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FieldContext extends ParserRuleContext {
public IdContext id() {
return getRuleContext(IdContext.class,0);
}
public TerminalNode QUESTION() { return getToken(BreezeParser.QUESTION, 0); }
public TerminalNode SPLAT() { return getToken(BreezeParser.SPLAT, 0); }
public TerminalNode TYPEID() { return getToken(BreezeParser.TYPEID, 0); }
public FieldContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_field; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterField(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitField(this);
}
}
public final FieldContext field() throws RecognitionException {
FieldContext _localctx = new FieldContext(_ctx, getState());
enterRule(_localctx, 16, RULE_field);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(65); match(TYPEID);
setState(67);
_la = _input.LA(1);
if (_la==QUESTION || _la==SPLAT) {
{
setState(66);
_la = _input.LA(1);
if ( !(_la==QUESTION || _la==SPLAT) ) {
_errHandler.recoverInline(this);
}
consume();
}
}
setState(70);
_la = _input.LA(1);
if (_la==TYPEID || _la==CONID) {
{
setState(69); id();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class IdContext extends ParserRuleContext {
public TerminalNode CONID() { return getToken(BreezeParser.CONID, 0); }
public TerminalNode TYPEID() { return getToken(BreezeParser.TYPEID, 0); }
public IdContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_id; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).enterId(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof BreezeListener ) ((BreezeListener)listener).exitId(this);
}
}
public final IdContext id() throws RecognitionException {
IdContext _localctx = new IdContext(_ctx, getState());
enterRule(_localctx, 18, RULE_id);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(72);
_la = _input.LA(1);
if ( !(_la==TYPEID || _la==CONID) ) {
_errHandler.recoverInline(this);
}
consume();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static final String _serializedATN =
"\2\3\20M\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t"+
"\t\4\n\t\n\4\13\t\13\3\2\6\2\30\n\2\r\2\16\2\31\3\3\3\3\3\3\3\3\3\4\3"+
"\4\5\4\"\n\4\3\5\3\5\3\6\3\6\3\6\7\6)\n\6\f\6\16\6,\13\6\3\6\5\6/\n\6"+
"\3\7\3\7\5\7\63\n\7\3\b\3\b\5\b\67\n\b\3\t\3\t\3\t\3\t\7\t=\n\t\f\t\16"+
"\t@\13\t\3\t\3\t\3\n\3\n\5\nF\n\n\3\n\5\nI\n\n\3\13\3\13\3\13\2\f\2\4"+
"\6\b\n\f\16\20\22\24\2\4\3\n\13\3\r\16K\2\27\3\2\2\2\4\33\3\2\2\2\6!\3"+
"\2\2\2\b#\3\2\2\2\n%\3\2\2\2\f\60\3\2\2\2\16\64\3\2\2\2\208\3\2\2\2\22"+
"C\3\2\2\2\24J\3\2\2\2\26\30\5\4\3\2\27\26\3\2\2\2\30\31\3\2\2\2\31\27"+
"\3\2\2\2\31\32\3\2\2\2\32\3\3\2\2\2\33\34\7\r\2\2\34\35\7\b\2\2\35\36"+
"\5\6\4\2\36\5\3\2\2\2\37\"\5\n\6\2 \"\5\b\5\2!\37\3\2\2\2! \3\2\2\2\""+
"\7\3\2\2\2#$\5\20\t\2$\t\3\2\2\2%*\5\16\b\2&\'\7\t\2\2\')\5\16\b\2(&\3"+
"\2\2\2),\3\2\2\2*(\3\2\2\2*+\3\2\2\2+.\3\2\2\2,*\3\2\2\2-/\5\f\7\2.-\3"+
"\2\2\2./\3\2\2\2/\13\3\2\2\2\60\62\7\f\2\2\61\63\5\20\t\2\62\61\3\2\2"+
"\2\62\63\3\2\2\2\63\r\3\2\2\2\64\66\7\16\2\2\65\67\5\20\t\2\66\65\3\2"+
"\2\2\66\67\3\2\2\2\67\17\3\2\2\289\7\5\2\29>\5\22\n\2:;\7\7\2\2;=\5\22"+
"\n\2<:\3\2\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?A\3\2\2\2@>\3\2\2\2AB\7\6"+
"\2\2B\21\3\2\2\2CE\7\r\2\2DF\t\2\2\2ED\3\2\2\2EF\3\2\2\2FH\3\2\2\2GI\5"+
"\24\13\2HG\3\2\2\2HI\3\2\2\2I\23\3\2\2\2JK\t\3\2\2K\25\3\2\2\2\13\31!"+
"*.\62\66>EH";
public static final ATN _ATN =
ATNSimulator.deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.model;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Ordering;
import org.immutables.value.Value;
import java.nio.file.Path;
import java.util.Set;
import java.util.SortedSet;
@JsonAutoDetect(
fieldVisibility = JsonAutoDetect.Visibility.NONE,
getterVisibility = JsonAutoDetect.Visibility.NONE,
setterVisibility = JsonAutoDetect.Visibility.NONE)
@BuckStyleImmutable
@Value.Immutable(prehash = true)
abstract class AbstractBuildTarget
implements
Comparable<AbstractBuildTarget>,
HasBuildTarget {
private static final Ordering<Iterable<Flavor>> LEXICOGRAPHICAL_ORDERING =
Ordering.<Flavor>natural().lexicographical();
@Value.Parameter
public abstract UnflavoredBuildTarget getUnflavoredBuildTarget();
@Value.NaturalOrder
@Value.Parameter
public abstract SortedSet<Flavor> getFlavors();
@Value.Check
protected void check() {
Preconditions.checkArgument(
getFlavors().comparator() == Ordering.natural(),
"Flavors must be ordered using natural ordering.");
}
@JsonProperty("cell")
public Optional<String> getCell() {
return getUnflavoredBuildTarget().getCell();
}
public Path getCellPath() {
return getUnflavoredBuildTarget().getCellPath();
}
@JsonProperty("baseName")
public String getBaseName() {
return getUnflavoredBuildTarget().getBaseName();
}
public String getBaseNameWithSlash() {
return getUnflavoredBuildTarget().getBaseNameWithSlash();
}
public Path getBasePath() {
return getUnflavoredBuildTarget().getBasePath();
}
public String getBasePathWithSlash() {
return getUnflavoredBuildTarget().getBasePathWithSlash();
}
@JsonProperty("shortName")
public String getShortName() {
return getUnflavoredBuildTarget().getShortName();
}
/**
* If this build target were //third_party/java/guava:guava-latest, then this would return
* "guava-latest". Note that the flavor of the target is included here.
*/
public String getShortNameAndFlavorPostfix() {
return getShortName() + getFlavorPostfix();
}
public String getFlavorPostfix() {
if (getFlavors().isEmpty()) {
return "";
}
return "#" + getFlavorsAsString();
}
@JsonProperty("flavor")
private String getFlavorsAsString() {
return Joiner.on(",").join(getFlavors());
}
/**
* If this build target is //third_party/java/guava:guava-latest, then this would return
* "//third_party/java/guava:guava-latest".
*/
@Value.Derived
public String getFullyQualifiedName() {
return getUnflavoredBuildTarget().getFullyQualifiedName() + getFlavorPostfix();
}
@JsonIgnore
public boolean isFlavored() {
return !(getFlavors().isEmpty());
}
public UnflavoredBuildTarget checkUnflavored() {
Preconditions.checkState(!isFlavored(), "%s is flavored.", this);
return getUnflavoredBuildTarget();
}
public static BuildTarget of(UnflavoredBuildTarget unflavoredBuildTarget) {
return BuildTarget.of(
unflavoredBuildTarget,
ImmutableSortedSet.<Flavor>of());
}
public static BuildTarget.Builder builder(BuildTarget buildTarget) {
return BuildTarget
.builder()
.setUnflavoredBuildTarget(buildTarget.getUnflavoredBuildTarget())
.addAllFlavors(buildTarget.getFlavors());
}
public static BuildTarget.Builder builder(UnflavoredBuildTarget buildTarget) {
return BuildTarget
.builder()
.setUnflavoredBuildTarget(buildTarget);
}
public static BuildTarget.Builder builder(Path cellPath, String baseName, String shortName) {
return BuildTarget
.builder()
.setUnflavoredBuildTarget(
UnflavoredBuildTarget.of(cellPath, Optional.<String>absent(), baseName, shortName));
}
/** @return {@link #getFullyQualifiedName()} */
@Override
public String toString() {
return getFullyQualifiedName();
}
@Override
public int compareTo(AbstractBuildTarget o) {
if (this == o) {
return 0;
}
return ComparisonChain.start()
.compare(getUnflavoredBuildTarget(), o.getUnflavoredBuildTarget())
.compare(getFlavors(), o.getFlavors(), LEXICOGRAPHICAL_ORDERING)
.result();
}
@Override
public BuildTarget getBuildTarget() {
return BuildTarget.copyOf(this);
}
public BuildTarget withoutFlavors(Set<Flavor> flavors) {
BuildTarget.Builder builder = BuildTarget.builder();
builder.setUnflavoredBuildTarget(getUnflavoredBuildTarget());
for (Flavor flavor : getFlavors()) {
if (!flavors.contains(flavor)) {
builder.addFlavors(flavor);
}
}
return builder.build();
}
public BuildTarget withoutCell() {
return BuildTarget.builder(
getUnflavoredBuildTarget().getCellPath(),
getBaseName(),
getShortName())
.addAllFlavors(getFlavors())
.build();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.annotations.processors;
import java.io.FileNotFoundException;
import java.util.List;
import javax.tools.Diagnostic;
import javax.tools.Diagnostic.Kind;
import javax.tools.JavaFileObject;
import org.junit.Test;
import static org.junit.Assert.*;
public class PerspectiveProcessorTest extends AbstractProcessorTest {
final Result result = new Result();
@Override
protected AbstractErrorAbsorbingProcessor getProcessorUnderTest() {
return new PerspectiveProcessor(new GenerationCompleteCallback() {
@Override
public void generationComplete(String code) {
result.setActualCode(code);
}
});
}
@Test
public void testNoPerspectiveAnnotation() throws FileNotFoundException {
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
"org/uberfire/annotations/processors/PerspectiveTest1");
assertSuccessfulCompilation(diagnostics);
assertNull(result.getActualCode());
}
@Test
public void testIncorrectReturnTypeWithoutArguments() throws FileNotFoundException {
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
"org/uberfire/annotations/processors/PerspectiveTest2");
assertCompilationMessage(diagnostics,
Kind.ERROR,
10,
17,
"Methods annotated with @Perspective must return org.uberfire.workbench.model.PerspectiveDefinition");
assertNull(result.getActualCode());
}
@Test
public void testCorrectReturnTypeWithArguments() throws FileNotFoundException {
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
"org/uberfire/annotations/processors/PerspectiveTest3");
assertCompilationMessage(diagnostics,
Kind.ERROR,
Diagnostic.NOPOS,
Diagnostic.NOPOS,
"A WorkbenchPerspective class must have either a valid @Perspective method or at least one @WorkbenchPanel field.");
assertNull(result.getActualCode());
}
@Test
public void testCorrectReturnTypeWithoutArguments() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest4";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest4.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testCorrectReturnTypeWithoutArgumentsIsDefault() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest5";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest5.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testCorrectReturnTypeWithAllAnnotationsOnStart() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest6";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest6.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testCorrectReturnTypeWithAllAnnotationsOnStartWithPath() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest7";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest7.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchMenuAnnotationCorrectReturnType() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest8";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest8.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchMenuAnnotationWrongReturnType() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest9";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest9.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchToolBarAnnotationCorrectReturnType() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest10";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest10.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchToolBarAnnotationWrongReturnType() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest11";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest11.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchTemplateAnnotation() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest12";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest12.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
printDiagnostics(diagnostics);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchTemplateAnnotationWithOnlyWorkbenchParts() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest13";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest13.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
printDiagnostics(diagnostics);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testWorkbenchTemplateAnnotationMustHaveWorkbenchPanelsOrParts() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest14";
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
assertFailedCompilation(diagnostics);
}
@Test
public void testWorkbenchTemplateAnnotationShouldNotAllowTwoDefaultWorkbenchPanels() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest15";
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
assertCompilationMessage(diagnostics,
Kind.ERROR,
15,
8,
"Found more than one @WorkbenchPanel with isDefault=true.");
}
@Test
public void testWorkbenchTemplateAnnotationWithNoDefaultWorkbenchPanel() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest16";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest16.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testAlonePartAnnotationShouldGenerateDefaultPanel() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest17";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest17.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
printDiagnostics(diagnostics);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testAlonePartsAnnotationShouldGenerateDefaultPanel() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest18";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest18.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
printDiagnostics(diagnostics);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testPartsAnnotationShouldReceiveParameters() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest19";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest19.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
printDiagnostics(diagnostics);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testPerspectiveWithActivator() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest20";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest20.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testNonTransientPerspective() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest21";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest21.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void twoDefaultPerspectivesShouldGenerateAnError() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest19";
final String pathCompilationUnit2 = "org/uberfire/annotations/processors/PerspectiveTest7";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest19.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit,
pathCompilationUnit2);
assertFailedCompilation(diagnostics);
assertCompilationMessage(diagnostics,
Kind.ERROR,
-1,
-1,
"Found too many default WorkbenchPerspectives (expected 1). Found: (HomePerspective, PerspectiveTest7).");
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
}
@Test
public void testWorkbenchPerspectivesOnStartMultipleMethods() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest22";
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
assertCompilationMessage(diagnostics,
Kind.ERROR,
36,
17,
"Found multiple @OnStartup methods. Each class can declare at most one.");
assertFailedCompilation(diagnostics);
}
@Test
public void combiningPerspectiveMethodAndWorkbenchPanelFieldsShouldGenerateAnError() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest23";
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(getProcessorUnderTest(),
pathCompilationUnit);
assertFailedCompilation(diagnostics);
assertCompilationMessage(diagnostics,
Kind.ERROR,
19,
8,
"This WorkbenchPerspective has both a @Perspective method and a @WorkbenchPanel field. Only one or the other is allowed.");
}
@Test
public void testDynamicPerspective() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest24";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest24.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
@Test
public void testQualifiersInTheActivity() throws FileNotFoundException {
final String pathCompilationUnit = "org/uberfire/annotations/processors/PerspectiveTest25";
final String pathExpectedResult = "org/uberfire/annotations/processors/expected/PerspectiveTest25.expected";
result.setExpectedCode(getExpectedSourceCode(pathExpectedResult));
final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile(
getProcessorUnderTest(),
pathCompilationUnit);
assertSuccessfulCompilation(diagnostics);
assertNotNull(result.getActualCode());
assertNotNull(result.getExpectedCode());
assertEquals(result.getExpectedCode(),
result.getActualCode());
}
private void printDiagnostics(List<Diagnostic<? extends JavaFileObject>> diagnostics) {
for (Diagnostic<? extends JavaFileObject> diagnostic : diagnostics) {
System.out.println(diagnostic);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.clouddeploy.v1.model;
/**
* A `Target` resource in the Google Cloud Deploy API. A `Target` defines a location to which a
* Skaffold configuration can be deployed.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Cloud Deploy API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Target extends com.google.api.client.json.GenericJson {
/**
* Optional. User annotations. These attributes can only be set and used by the user, and not by
* Google Cloud Deploy. See https://google.aip.dev/128#annotations for more details such as format
* and size limitations.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> annotations;
/**
* Information specifying an Anthos Cluster.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AnthosCluster anthosCluster;
/**
* Output only. Time at which the `Target` was created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Optional. Description of the `Target`. Max length is 255 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Optional. This checksum is computed by the server based on the value of other fields, and may
* be sent on update and delete requests to ensure the client has an up-to-date value before
* proceeding.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String etag;
/**
* Configurations for all execution that relates to this `Target`. Each
* `ExecutionEnvironmentUsage` value may only be used in a single configuration; using the same
* value multiple times is an error. When one or more configurations are specified, they must
* include the `RENDER` and `DEPLOY` `ExecutionEnvironmentUsage` values. When no configurations
* are specified, execution will use the default specified in `DefaultPool`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ExecutionConfig> executionConfigs;
static {
// hack to force ProGuard to consider ExecutionConfig used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ExecutionConfig.class);
}
/**
* Information specifying a GKE Cluster.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GkeCluster gke;
/**
* Optional. Labels are attributes that can be set and used by both the user and by Google Cloud
* Deploy. Labels must meet the following constraints: * Keys and values can contain only
* lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8
* encoding, and international characters are allowed. * Keys must start with a lowercase letter
* or international character. * Each resource is limited to a maximum of 64 labels. Both keys and
* values are additionally constrained to be <= 128 bytes.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Optional. Name of the `Target`. Format is
* projects/{project}/locations/{location}/targets/a-z{0,62}.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Optional. Whether or not the `Target` requires approval.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean requireApproval;
/**
* Output only. Resource id of the `Target`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String targetId;
/**
* Output only. Unique identifier of the `Target`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String uid;
/**
* Output only. Most recent time at which the `Target` was updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateTime;
/**
* Optional. User annotations. These attributes can only be set and used by the user, and not by
* Google Cloud Deploy. See https://google.aip.dev/128#annotations for more details such as format
* and size limitations.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getAnnotations() {
return annotations;
}
/**
* Optional. User annotations. These attributes can only be set and used by the user, and not by
* Google Cloud Deploy. See https://google.aip.dev/128#annotations for more details such as format
* and size limitations.
* @param annotations annotations or {@code null} for none
*/
public Target setAnnotations(java.util.Map<String, java.lang.String> annotations) {
this.annotations = annotations;
return this;
}
/**
* Information specifying an Anthos Cluster.
* @return value or {@code null} for none
*/
public AnthosCluster getAnthosCluster() {
return anthosCluster;
}
/**
* Information specifying an Anthos Cluster.
* @param anthosCluster anthosCluster or {@code null} for none
*/
public Target setAnthosCluster(AnthosCluster anthosCluster) {
this.anthosCluster = anthosCluster;
return this;
}
/**
* Output only. Time at which the `Target` was created.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. Time at which the `Target` was created.
* @param createTime createTime or {@code null} for none
*/
public Target setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Optional. Description of the `Target`. Max length is 255 characters.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* Optional. Description of the `Target`. Max length is 255 characters.
* @param description description or {@code null} for none
*/
public Target setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Optional. This checksum is computed by the server based on the value of other fields, and may
* be sent on update and delete requests to ensure the client has an up-to-date value before
* proceeding.
* @return value or {@code null} for none
*/
public java.lang.String getEtag() {
return etag;
}
/**
* Optional. This checksum is computed by the server based on the value of other fields, and may
* be sent on update and delete requests to ensure the client has an up-to-date value before
* proceeding.
* @param etag etag or {@code null} for none
*/
public Target setEtag(java.lang.String etag) {
this.etag = etag;
return this;
}
/**
* Configurations for all execution that relates to this `Target`. Each
* `ExecutionEnvironmentUsage` value may only be used in a single configuration; using the same
* value multiple times is an error. When one or more configurations are specified, they must
* include the `RENDER` and `DEPLOY` `ExecutionEnvironmentUsage` values. When no configurations
* are specified, execution will use the default specified in `DefaultPool`.
* @return value or {@code null} for none
*/
public java.util.List<ExecutionConfig> getExecutionConfigs() {
return executionConfigs;
}
/**
* Configurations for all execution that relates to this `Target`. Each
* `ExecutionEnvironmentUsage` value may only be used in a single configuration; using the same
* value multiple times is an error. When one or more configurations are specified, they must
* include the `RENDER` and `DEPLOY` `ExecutionEnvironmentUsage` values. When no configurations
* are specified, execution will use the default specified in `DefaultPool`.
* @param executionConfigs executionConfigs or {@code null} for none
*/
public Target setExecutionConfigs(java.util.List<ExecutionConfig> executionConfigs) {
this.executionConfigs = executionConfigs;
return this;
}
/**
* Information specifying a GKE Cluster.
* @return value or {@code null} for none
*/
public GkeCluster getGke() {
return gke;
}
/**
* Information specifying a GKE Cluster.
* @param gke gke or {@code null} for none
*/
public Target setGke(GkeCluster gke) {
this.gke = gke;
return this;
}
/**
* Optional. Labels are attributes that can be set and used by both the user and by Google Cloud
* Deploy. Labels must meet the following constraints: * Keys and values can contain only
* lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8
* encoding, and international characters are allowed. * Keys must start with a lowercase letter
* or international character. * Each resource is limited to a maximum of 64 labels. Both keys and
* values are additionally constrained to be <= 128 bytes.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Optional. Labels are attributes that can be set and used by both the user and by Google Cloud
* Deploy. Labels must meet the following constraints: * Keys and values can contain only
* lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8
* encoding, and international characters are allowed. * Keys must start with a lowercase letter
* or international character. * Each resource is limited to a maximum of 64 labels. Both keys and
* values are additionally constrained to be <= 128 bytes.
* @param labels labels or {@code null} for none
*/
public Target setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Optional. Name of the `Target`. Format is
* projects/{project}/locations/{location}/targets/a-z{0,62}.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Optional. Name of the `Target`. Format is
* projects/{project}/locations/{location}/targets/a-z{0,62}.
* @param name name or {@code null} for none
*/
public Target setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Optional. Whether or not the `Target` requires approval.
* @return value or {@code null} for none
*/
public java.lang.Boolean getRequireApproval() {
return requireApproval;
}
/**
* Optional. Whether or not the `Target` requires approval.
* @param requireApproval requireApproval or {@code null} for none
*/
public Target setRequireApproval(java.lang.Boolean requireApproval) {
this.requireApproval = requireApproval;
return this;
}
/**
* Output only. Resource id of the `Target`.
* @return value or {@code null} for none
*/
public java.lang.String getTargetId() {
return targetId;
}
/**
* Output only. Resource id of the `Target`.
* @param targetId targetId or {@code null} for none
*/
public Target setTargetId(java.lang.String targetId) {
this.targetId = targetId;
return this;
}
/**
* Output only. Unique identifier of the `Target`.
* @return value or {@code null} for none
*/
public java.lang.String getUid() {
return uid;
}
/**
* Output only. Unique identifier of the `Target`.
* @param uid uid or {@code null} for none
*/
public Target setUid(java.lang.String uid) {
this.uid = uid;
return this;
}
/**
* Output only. Most recent time at which the `Target` was updated.
* @return value or {@code null} for none
*/
public String getUpdateTime() {
return updateTime;
}
/**
* Output only. Most recent time at which the `Target` was updated.
* @param updateTime updateTime or {@code null} for none
*/
public Target setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
}
@Override
public Target set(String fieldName, Object value) {
return (Target) super.set(fieldName, value);
}
@Override
public Target clone() {
return (Target) super.clone();
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
/*
* XSD/DTD Model generator tool
*
* By Gregory Shrago
* 2002 - 2006
*/
package org.jetbrains.idea.devkit.dom.generator;
import org.apache.xerces.impl.dv.XSSimpleType;
import org.apache.xerces.impl.dv.xs.XSSimpleTypeDecl;
import org.apache.xerces.impl.xs.XSAttributeGroupDecl;
import org.apache.xerces.impl.xs.XSComplexTypeDecl;
import org.apache.xerces.impl.xs.XSElementDecl;
import org.apache.xerces.impl.xs.XSParticleDecl;
import org.apache.xerces.impl.xs.util.XSObjectListImpl;
import org.apache.xerces.xni.parser.XMLEntityResolver;
import org.apache.xerces.xs.*;
import org.w3c.dom.DOMConfiguration;
import org.w3c.dom.DOMError;
import org.w3c.dom.DOMErrorHandler;
import org.w3c.dom.TypeInfo;
import org.w3c.dom.bootstrap.DOMImplementationRegistry;
import javax.xml.namespace.QName;
import java.io.File;
import java.util.*;
/**
* @author Gregory.Shrago
* @author Konstantin Bulenkov
*/
@SuppressWarnings("HardCodedStringLiteral")
public class XSDModelLoader implements ModelLoader {
private static final boolean TEXT_ELEMENTS_ARE_COMPLEX = false;
private ModelDesc model;
@Override
public void loadModel(ModelDesc model, Collection<File> files, XMLEntityResolver resolver) throws Exception {
this.model = model;
processSchemas(files, resolver);
}
public static boolean checkComplexType(XSTypeDefinition td) {
if (td.getTypeCategory() != XSTypeDefinition.COMPLEX_TYPE) return false;
XSComplexTypeDefinition ctd = (XSComplexTypeDefinition) td;
if (ctd.getContentType() == XSComplexTypeDefinition.CONTENTTYPE_ELEMENT) {
return true;
}
if ((td instanceof XSComplexTypeDecl) && ((XSComplexTypeDecl) td).getAbstract()) return true;
if (TEXT_ELEMENTS_ARE_COMPLEX) return true;
if (ctd.getAttributeUses() != null) {
for (int i = 0; i < ctd.getAttributeUses().getLength(); i++) {
XSSimpleTypeDefinition xsstd = ((XSAttributeUse) ctd.getAttributeUses().item(i)).getAttrDeclaration().getTypeDefinition();
if ("ID".equals(xsstd.getName())) continue;
return true;
}
}
return false;
}
public static boolean checkEnumType(XSTypeDefinition td) {
final XSSimpleTypeDefinition st;
if (td.getTypeCategory() == XSTypeDefinition.COMPLEX_TYPE) {
XSComplexTypeDefinition ctd = (XSComplexTypeDefinition) td;
if (ctd.getContentType() != XSComplexTypeDefinition.CONTENTTYPE_SIMPLE) return false;
if (ctd.getAttributeUses() != null) {
for (int i = 0; i < ctd.getAttributeUses().getLength(); i++) {
XSSimpleTypeDefinition xsstd = ((XSAttributeUse) ctd.getAttributeUses().item(i)).getAttrDeclaration().getTypeDefinition();
if ("ID".equals(xsstd.getName())) continue;
return false;
}
}
st = ctd.getSimpleType();
} else if (td.getTypeCategory() == XSTypeDefinition.SIMPLE_TYPE) {
st = (XSSimpleTypeDefinition) td;
} else {
return false;
}
return st.getLexicalEnumeration() != null && st.getLexicalEnumeration().getLength() != 0;
}
private static boolean checkBooleanType(XSTypeDefinition td) {
if (td.getTypeCategory() != XSTypeDefinition.SIMPLE_TYPE) return false;
final XSSimpleTypeDefinition st = ((XSSimpleTypeDefinition) td);
final XSObjectList facets = st.getFacets();
for (int i = 0; i < facets.getLength(); i++) {
final XSFacet facet = (XSFacet) facets.item(i);
if (facet.getFacetKind() == XSSimpleTypeDefinition.FACET_LENGTH) {
if ("0".equals(facet.getLexicalFacetValue())) {
return true;
}
}
}
return false;
}
private XSModel loadSchema(File schemaFile, XMLEntityResolver resolver) throws Exception {
// get DOM Implementation using DOM Registry
System.setProperty(
DOMImplementationRegistry.PROPERTY,
"org.apache.xerces.dom.DOMXSImplementationSourceImpl");
DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance();
XSImplementation impl = (XSImplementation) registry.getDOMImplementation("XS-Loader");
XSLoader schemaLoader = impl.createXSLoader(null);
DOMConfiguration config = schemaLoader.getConfig();
// create Error Handler
DOMErrorHandler errorHandler = new DOMErrorHandler() {
@Override
public boolean handleError(DOMError domError) {
Util.log("DOMError: " + domError.getMessage());
Object relatedException = domError.getRelatedException();
if (relatedException != null) {
Util.log("DOMError: " + relatedException);
if (relatedException instanceof Throwable) {
((Throwable) relatedException).printStackTrace(System.out);
}
}
return false;
}
};
// set error handler
config.setParameter("error-handler", errorHandler);
// set validation feature
config.setParameter("validate", Boolean.TRUE);
// parse document
config.setParameter("error-handler", errorHandler);
config.setParameter("http://apache.org/xml/properties/internal/entity-resolver", resolver);
XSModel model = schemaLoader.loadURI(schemaFile.getAbsolutePath());
if (model == null) return null;
XSNamedMap components = model.getComponents(XSConstants.ELEMENT_DECLARATION);
for (int i = 0; i < components.getLength(); i++) {
XSObject obj = components.item(i);
QName qname = new QName(obj.getNamespace(), obj.getName());
String file = this.model.qname2FileMap.get(qname);
this.model.qname2FileMap.put(qname, (file == null ? "" : file + ";") + schemaFile.getName());
}
components = model.getComponents(XSConstants.TYPE_DEFINITION);
for (int i = 0; i < components.getLength(); i++) {
XSObject obj = components.item(i);
QName qname = new QName(obj.getNamespace(), obj.getName());
String file = this.model.qname2FileMap.get(qname);
this.model.qname2FileMap.put(qname, (file == null ? "" : file) + ":" + schemaFile.getName() + ":");
}
return model;
}
public void processSchemas(Collection<File> schemas, XMLEntityResolver resolver) throws Exception {
Map<String, NamespaceDesc> nsdMap = model.nsdMap;
Map<String, TypeDesc> jtMap = model.jtMap;
final NamespaceDesc nsdDef = nsdMap.get("");
final ArrayList<XSModel> models = new ArrayList<>();
final HashMap<String, XSTypeDefinition> types = new HashMap<>();
for (File schemaFile : schemas) {
String fileName = schemaFile.getPath();
if (schemaFile.isDirectory() || !fileName.endsWith(".xsd")) {
Util.log("skipping " + fileName);
continue;
}
Util.log("loading " + fileName + "..");
final XSModel model = loadSchema(schemaFile, resolver);
if (model == null) continue;
final StringList namespaceList = model.getNamespaces();
for (int i = 0; i < namespaceList.getLength(); i++) {
String ns = namespaceList.item(i);
if (!nsdMap.containsKey(ns)) {
Util.log("Adding default namespace desc for: " + ns);
NamespaceDesc nsd = new NamespaceDesc(ns, nsdDef);
nsdMap.put(ns, nsd);
}
}
models.add(model);
final XSNamedMap typeDefMap = model.getComponents(XSConstants.TYPE_DEFINITION);
for (int i = 0; i < typeDefMap.getLength(); i++) {
XSTypeDefinition o = (XSTypeDefinition) typeDefMap.item(i);
NamespaceDesc nsd = nsdMap.get(o.getNamespace());
if (nsd != null && nsd.skip) continue;
final String key = o.getName() + "," + o.getNamespace();
types.put(key, o);
}
final XSNamedMap elementDeclMap = model.getComponents(XSConstants.ELEMENT_DECLARATION);
for (int i = 0; i < elementDeclMap.getLength(); i++) {
XSElementDeclaration o = (XSElementDeclaration) elementDeclMap.item(i);
if (o.getTypeDefinition().getAnonymous() && (o.getTypeDefinition() instanceof XSComplexTypeDefinition)) {
//types.put(o.getName() + "," + o.getNamespace(), o);
XSComplexTypeDefinition ctd = makeTypeFromAnonymous(o);
NamespaceDesc nsd = nsdMap.get(o.getNamespace());
if (nsd != null && nsd.skip) continue;
final String key = ctd.getName() + "," + ctd.getNamespace();
types.put(key, ctd);
}
}
}
Util.log(types.size() + " elements loaded, processing..");
ArrayList<XSTypeDefinition> toProcess = new ArrayList<>(types.values());
ArrayList<XSComplexTypeDefinition> toAdd = new ArrayList<>();
for (ListIterator<XSTypeDefinition> it = toProcess.listIterator(); it.hasNext();) {
XSTypeDefinition td = it.next();
Util.log("processing " + td.getName() + "," + td.getNamespace() + "..");
if (checkComplexType(td)) {
processType((XSComplexTypeDefinition) td, models, jtMap, nsdMap, toAdd);
} else if (checkEnumType(td)) {
processEnumType(td, jtMap, nsdMap);
}
if (toAdd.size() != 0) {
for (XSComplexTypeDefinition o : toAdd) {
final String key = o.getName() + "," + o.getNamespace();
if (!types.containsKey(key)) {
Util.log(" adding " + o.getName() + "," + o.getNamespace());
types.put(key, o);
it.add(o);
it.previous();
} else {
Util.logwarn(key + " already exists");
}
}
toAdd.clear();
}
}
}
private XSComplexTypeDefinition makeTypeFromAnonymous(XSObject o) {
final XSComplexTypeDecl ctd = new XSComplexTypeDecl();
if (o instanceof XSElementDeclaration && ((XSElementDeclaration) o).getTypeDefinition() instanceof XSComplexTypeDecl) {
final XSComplexTypeDecl ctd1 = (XSComplexTypeDecl) ((XSElementDeclaration) o).getTypeDefinition();
final XSObjectListImpl annotations = ctd1.getAnnotations() instanceof XSObjectListImpl ? (XSObjectListImpl) ctd1.getAnnotations() : new XSObjectListImpl();
ctd.setValues(o.getName(), ctd1.getNamespace(), ctd1.getBaseType(), ctd1.getDerivationMethod(),
ctd1.getFinal(), ctd1.getProhibitedSubstitutions(), ctd1.getContentType(),
ctd1.getAbstract(), ctd1.getAttrGrp(), (XSSimpleType) ctd1.getSimpleType(),
(XSParticleDecl) ctd1.getParticle(), annotations);
ctd.setName(o.getName() + Util.ANONYMOUS_ELEM_TYPE_SUFFIX);
} else if (o instanceof XSAttributeDeclaration) {
final XSSimpleTypeDecl ctd1 = (XSSimpleTypeDecl) ((XSAttributeDeclaration) o).getTypeDefinition();
final XSObjectListImpl annotations = ctd1.getAnnotations() instanceof XSObjectListImpl ? (XSObjectListImpl) ctd1.getAnnotations() : new XSObjectListImpl();
ctd.setValues(o.getName(), ctd1.getNamespace(), ctd1.getBaseType(), XSConstants.DERIVATION_RESTRICTION,
ctd1.getFinal(), (short) 0, XSComplexTypeDefinition.CONTENTTYPE_SIMPLE,
false, new XSAttributeGroupDecl(), ctd1,
null, annotations);
ctd.setName(o.getName() + Util.ANONYMOUS_ATTR_TYPE_SUFFIX);
}
model.qname2FileMap.put(new QName(ctd.getNamespace(), ctd.getName()), model.qname2FileMap.get(new QName(o.getNamespace(), o.getName())));
return ctd;
}
public void processEnumType(XSTypeDefinition def, Map<String, TypeDesc> jtMap, Map<String, NamespaceDesc> nsdMap) throws Exception {
boolean complexType = def instanceof XSComplexTypeDefinition;
if (!nsdMap.containsKey(def.getNamespace())) {
Util.log("Namespace desc not found for: " + def);
}
final String typeName = toJavaTypeName(def, nsdMap);
final TypeDesc td = new TypeDesc(def.getName(), def.getNamespace(), typeName, TypeDesc.TypeEnum.ENUM);
final XSComplexTypeDefinition ct = complexType ? (XSComplexTypeDefinition) def : null;
final XSSimpleTypeDefinition st = (XSSimpleTypeDefinition) (complexType ? ((XSComplexTypeDefinition) def).getSimpleType() : def);
for (int i = 0; i < st.getLexicalEnumeration().getLength(); i++) {
final String s = st.getLexicalEnumeration().item(i);
td.fdMap.put(s, new FieldDesc(Util.computeEnumConstantName(s, td.name), s));
}
final XSObjectList anns = complexType ? ct.getAnnotations() : st.getAnnotations();
td.documentation = parseAnnotationString("Enumeration " + def.getNamespace() + ":" + def.getName() + " documentation", anns != null && anns.getLength() > 0 ? ((XSAnnotation) anns.item(0)).getAnnotationString() : null);
jtMap.put(model.toJavaQualifiedTypeName(def, nsdMap, true), td);
}
public void processType(XSComplexTypeDefinition def, List<XSModel> models, Map<String, TypeDesc> jtMap, Map<String, NamespaceDesc> nsdMap, ArrayList<XSComplexTypeDefinition> toAdd) throws Exception {
if (!nsdMap.containsKey(def.getNamespace())) {
Util.log("Namespace desc not found for: " + def);
}
String typeName = toJavaTypeName(def, nsdMap);
TypeDesc td = jtMap.get(model.toJavaQualifiedTypeName(def, nsdMap, false));
if (td != null) {
if (td.fdMap.size() == 0) {
// Util.log("Reusing forward decl: "+typeName);
} else {
Util.logerr("merging: type names collision: " + typeName);
}
} else {
td = new TypeDesc(def.getName(), def.getNamespace(), typeName, TypeDesc.TypeEnum.CLASS);
}
XSObjectList anns = def.getAnnotations();
td.documentation = parseAnnotationString("Type " + def.getNamespace() + ":" + def.getName() + " documentation",
anns != null && anns.getLength() > 0 ? ((XSAnnotation) anns.item(0)).getAnnotationString() : null);
TypeDesc tdBase = null;
if (checkComplexType(def.getBaseType())) {
XSComplexTypeDefinition base = (XSComplexTypeDefinition) def.getBaseType();
String typeNameBase = toJavaTypeName(base, nsdMap);
if ((tdBase = jtMap.get(model.toJavaQualifiedTypeName(base, nsdMap, false))) == null) {
// logwarn("forward decl: "+et);
tdBase = new TypeDesc(base.getName(), base.getNamespace(), typeNameBase, TypeDesc.TypeEnum.CLASS);
jtMap.put(model.toJavaQualifiedTypeName(base, nsdMap, false), tdBase);
}
}
if (def.getSimpleType() != null || def.getContentType() == XSComplexTypeDefinition.CONTENTTYPE_MIXED) {
FieldDesc fd = new FieldDesc(FieldDesc.SIMPLE, "value", "String", null, "null", true);
fd.realIndex = td.fdMap.size();
td.fdMap.put(fd.name, fd);
}
XSObjectList attrs = def.getAttributeUses();
for (int i = 0; i < attrs.getLength(); i++) {
XSAttributeUse au = (XSAttributeUse) attrs.item(i);
XSAttributeDeclaration ad = au.getAttrDeclaration();
XSSimpleTypeDefinition atd = ad.getTypeDefinition();
XSAnnotation ann = ad.getAnnotation();
String documentation = parseAnnotationString("Attribute " + ad.getNamespace() + ":" + ad.getName() + " documentation", ann != null ? ann.getAnnotationString() : null);
// skip "ID" and "FIXED"
if ("ID".equals(atd.getName())) continue;
// "language", "dewey-versionType", "boolean"
if (ad.getConstraintType() == XSConstants.VC_FIXED) continue;
FieldDesc fd1 = new FieldDesc(FieldDesc.ATTR, Util.toJavaFieldName(ad.getName()), "String", null, "null", au.getRequired());
fd1.tagName = ad.getName();
fd1.documentation = documentation;
fd1.realIndex = td.fdMap.size();
td.duplicates = Util.addToNameMap(td.fdMap, fd1, false) || td.duplicates;
if (checkEnumType(ad.getTypeDefinition())) {
XSTypeDefinition etRoot = ad.getTypeDefinition();
if (etRoot.getAnonymous()) {
etRoot = makeTypeFromAnonymous(ad);
if (toAdd != null) toAdd.add((XSComplexTypeDefinition) etRoot);
}
fd1.type = toJavaTypeName(etRoot, nsdMap);
fd1.contentQualifiedName = model.toJavaQualifiedTypeName(etRoot, nsdMap, true);
// forward decl
if (jtMap.get(fd1.contentQualifiedName) == null) {
// logwarn("forward decl: "+et);
TypeDesc ftd = new TypeDesc(etRoot.getName(), etRoot.getNamespace(), fd1.type, TypeDesc.TypeEnum.ENUM);
jtMap.put(fd1.contentQualifiedName, ftd);
// // anonymous (simple type) enum
// if (ad.getTypeDefinition().getAnonymous()) {
// processEnumType(ad.getTypeDefinition(), jtMap, nsdMap);
// }
}
} else {
fd1.simpleTypesString = getSimpleTypesString(ad.getTypeDefinition());
}
}
LinkedList<PEntry> plist = new LinkedList<>();
if (def.getParticle() != null) {
plist.add(new PEntry(def.getParticle(), false));
}
processParticles(def, plist, nsdMap, jtMap, td, models, toAdd, tdBase);
jtMap.put(model.toJavaQualifiedTypeName(def, nsdMap, false), td);
if (td.fdMap.size() == 1 && def.getSimpleType() != null) {
// calc type hierarchy for simple content
FieldDesc fd = td.fdMap.get("value");
fd.simpleTypesString = getSimpleTypesString(def);
}
}
private static String getSimpleTypesString(XSTypeDefinition et) {
StringBuilder typesHierarchy = new StringBuilder();
while (et != null && !"anySimpleType".equals(et.getName()) && !"anyType".equals(et.getName()) && et.getNamespace() != null) {
typesHierarchy.append(et.getNamespace().substring(et.getNamespace().lastIndexOf("/") + 1)).append(":").append(et.getName()).append(";");
if (et instanceof XSSimpleType) {
XSSimpleType simpleType = (XSSimpleType) et;
if (simpleType.getVariety() == XSSimpleTypeDefinition.VARIETY_LIST
|| simpleType.getVariety() == XSSimpleTypeDefinition.VARIETY_UNION) {
XSObjectList list = simpleType.getMemberTypes();
if (list.getLength() > 0) {
typesHierarchy.append("{");
for (int i = 0; i < list.getLength(); i++) {
typesHierarchy.append(getSimpleTypesString((XSTypeDefinition) list.item(i)));
}
typesHierarchy.append("}");
}
}
}
et = et.getBaseType();
}
return typesHierarchy.toString();
}
private TypeDesc processGroup(XSModelGroup modelGroup, List<XSModel> models, Map<String, TypeDesc> jtMap, Map<String, NamespaceDesc> nsdMap) {
XSModelGroupDefinition def = null;
for (XSModel xsModel : models) {
XSNamedMap map = xsModel.getComponents(XSConstants.MODEL_GROUP_DEFINITION);
for (int i = 0; i < map.getLength(); i++) {
XSModelGroupDefinition mg = (XSModelGroupDefinition) map.item(i);
final XSModelGroup xsModelGroup = mg.getModelGroup();
if (xsModelGroup == modelGroup || xsModelGroup.toString().equals(modelGroup.toString())) {
def = mg;
break;
}
}
}
if (def == null) return null;
if (!nsdMap.containsKey(def.getNamespace())) {
Util.log("Namespace desc not found for: " + def);
}
String typeName = toJavaTypeName(def, nsdMap);
final String typeQName = model.toJavaQualifiedTypeName(def, nsdMap, false);
TypeDesc td = jtMap.get(typeQName);
if (td != null) {
if (td.type == TypeDesc.TypeEnum.GROUP_INTERFACE) {
return td;
} else {
Util.logerr("type-group conflict: " + typeName);
return null;
}
} else {
td = new TypeDesc(def.getName(), def.getNamespace(), typeName, TypeDesc.TypeEnum.GROUP_INTERFACE);
}
XSAnnotation ann = def.getAnnotation();
td.documentation = parseAnnotationString("Type " + def.getNamespace() + ":" + def.getName() + " documentation",
ann == null ? null : ann.getAnnotationString());
td.type = TypeDesc.TypeEnum.GROUP_INTERFACE;
LinkedList<PEntry> plist = new LinkedList<>();
for (int i = 0; i < def.getModelGroup().getParticles().getLength(); i++) {
XSParticle p = (XSParticle) def.getModelGroup().getParticles().item(i);
plist.add(new PEntry(p, false));
}
processParticles(def, plist, nsdMap, jtMap, td, models, null, null);
jtMap.put(typeQName, td);
return td;
}
private void processParticles(XSObject def, LinkedList<PEntry> plist, Map<String, NamespaceDesc> nsdMap, Map<String, TypeDesc> jtMap, TypeDesc td, List<XSModel> models, ArrayList<XSComplexTypeDefinition> toAdd, TypeDesc baseClass) {
final boolean globalMerge = jtMap.containsKey(model.toJavaQualifiedTypeName(def, nsdMap, td.type == TypeDesc.TypeEnum.ENUM));
final HashMap<XSParticle, String> globalChoice = new HashMap<>();
final ArrayList<XSObjectList> choiceList = new ArrayList<>();
final ArrayList<TypeDesc> supers = new ArrayList<>();
if (baseClass != null) {
supers.add(baseClass);
}
while (!plist.isEmpty()) {
final PEntry pentry = plist.removeFirst();
final XSParticle p = pentry.p;
if (p.getTerm() instanceof XSElementDecl) {
final XSElementDecl el = (XSElementDecl) p.getTerm();
if (el.getConstraintType() == XSConstants.VC_FIXED) continue;
XSTypeDefinition etRoot = el.getTypeDefinition();
XSTypeDefinition et = etRoot;
XSAnnotation ann = el.getAnnotation();
String documentation = parseAnnotationString("Element " + el.getNamespace() + ":" + el.getName() + " documentation", ann != null ? ann.getAnnotationString() : null);
final FieldDesc fd1 = new FieldDesc(FieldDesc.STR, Util.toJavaFieldName(el.getName()), et.getName(), null, "null", !pentry.many && p.getMinOccurs() > 0);
fd1.documentation = documentation;
fd1.tagName = el.getName();
while (et.getBaseType() != null && !"anySimpleType".equals(et.getBaseType().getName()) && !"anyType".equals(et.getBaseType().getName())) {
et = et.getBaseType();
}
if (checkEnumType(etRoot)) {
if (etRoot.getAnonymous()) {
etRoot = makeTypeFromAnonymous(el);
if (toAdd != null) toAdd.add((XSComplexTypeDefinition) etRoot);
}
fd1.type = toJavaTypeName(etRoot, nsdMap);
fd1.clType = FieldDesc.OBJ;
fd1.contentQualifiedName = model.toJavaQualifiedTypeName(etRoot, nsdMap, true);
// forward decl
if (!jtMap.containsKey(fd1.contentQualifiedName)) {
// logwarn("forward decl: "+et);
TypeDesc ftd = new TypeDesc(etRoot.getName(), etRoot.getNamespace(), fd1.type, TypeDesc.TypeEnum.ENUM);
jtMap.put(fd1.contentQualifiedName, ftd);
}
} else if (checkComplexType(etRoot)) {
if (etRoot.getAnonymous()) {
etRoot = makeTypeFromAnonymous(el);
if (toAdd != null) toAdd.add((XSComplexTypeDefinition) etRoot);
}
fd1.type = toJavaTypeName(etRoot, nsdMap);
fd1.clType = FieldDesc.OBJ;
fd1.contentQualifiedName = model.toJavaQualifiedTypeName(etRoot, nsdMap, false);
// forward decl
if (jtMap.get(fd1.contentQualifiedName) == null) {
//logwarn("forward decl: "+etRoot);
jtMap.put(fd1.contentQualifiedName, new TypeDesc(etRoot.getName(), etRoot.getNamespace(), fd1.type, TypeDesc.TypeEnum.CLASS));
}
} else if (checkBooleanType(etRoot)) {
fd1.type = "boolean";
fd1.clType = FieldDesc.BOOL;
} else {
if (etRoot instanceof XSComplexTypeDefinition) {
final XSComplexTypeDefinition ct = (XSComplexTypeDefinition) etRoot;
// XXX xerces2.7.1 wierd annotation inheritance bug fix
//ann = (XSAnnotation) (ct.getAnnotations()!=null && ct.getAnnotations().getLength()>0?ct.getAnnotations().item(0):null);
ann = (XSAnnotation) (ct.getAnnotations() != null && ct.getAnnotations().getLength() > 0 ? ct.getAnnotations().item(ct.getAnnotations().getLength() - 1) : null);
documentation = parseAnnotationString("Type " + ct.getNamespace() + ":" + ct.getName() + " documentation", ann != null ? ann.getAnnotationString() : null);
if (documentation != null) {
fd1.documentation = fd1.documentation != null ? fd1.documentation + "\n" + documentation : documentation;
}
}
fd1.simpleTypesString = getSimpleTypesString(etRoot);
// "fully-qualified-classType", "jndi-nameType", "transaction-typeType"
// "java-identifierType", "pathType"
fd1.type = et.getName();
if (fd1.type == null) {
fd1.type = "String";
fd1.def = "null";
fd1.clType = FieldDesc.STR;
// fd1.type = "boolean";
// fd1.def = "false";
// fd1.clType = FieldDesc.BOOL;
} else if (fd1.type.equals("string") || fd1.type.equals("anyURI")) {
fd1.type = "String";
} else if (fd1.type.equals("boolean")) {
fd1.type = "String";
} else if (fd1.type.equals("emptyType")) {
fd1.type = "boolean";
fd1.def = "false";
fd1.clType = FieldDesc.BOOL;
} else if (fd1.type.equals("decimal")) {
fd1.type = "String";
fd1.def = "\"0.0\"";
} else if (fd1.type.equals("QName")) {
fd1.type = "String";
} else if (fd1.type.equals("extensibleType")) {
fd1.type = "Object";
} else {
if (et.getBaseType() != null &&
("anySimpleType".equals(et.getBaseType().getName())
|| "anyType".equals(et.getBaseType().getName()))) {
fd1.type = "String";
fd1.def = "null";
fd1.clType = FieldDesc.STR;
} else {
fd1.type = "boolean";
fd1.def = "false";
fd1.clType = FieldDesc.BOOL;
}
Util.logwarn("using '" + fd1.type + "' for unknown base type: " + et.getName() + " for " + el);
}
}
if ((pentry.many || p.getMaxOccursUnbounded() || p.getMaxOccurs() > 1) && fd1.clType != FieldDesc.BOOL) {
fd1.elementType = fd1.type;
fd1.elementName = fd1.name;
fd1.type = "List<" + fd1.elementType + ">";
fd1.name = Util.pluralize(fd1.name);
fd1.def = "new ArrayList(0)";
fd1.clType = -fd1.clType;
fd1.comment = "array of " + fd1.elementType;
}
fd1.realIndex = td.fdMap.size();
boolean merge = globalMerge || globalChoice.containsKey(p) && globalChoice.containsValue(fd1.name);
td.duplicates = Util.addToNameMap(td.fdMap, fd1, merge) || td.duplicates;
globalChoice.put(p, fd1.name);
} else if (p.getTerm() instanceof XSModelGroup) {
boolean addToGlobalChoice = false;
boolean many = p.getMaxOccursUnbounded() || p.getMaxOccurs() > 1;
XSObjectList l = ((XSModelGroup) p.getTerm()).getParticles();
if (!many) {
if (((XSModelGroup) p.getTerm()).getCompositor() == XSModelGroup.COMPOSITOR_CHOICE) {
addToGlobalChoice = true;
choiceList.add(l);
} else {
// generate group interface???
XSModelGroup groupDef = (XSModelGroup) p.getTerm();
TypeDesc gtd = processGroup(groupDef, models, jtMap, nsdMap);
if (gtd != null) supers.add(gtd);
}
}
if (globalChoice.containsKey(p)) {
addToGlobalChoice = true;
}
for (int i = 0; i < l.getLength(); i++) {
final PEntry o = new PEntry((XSParticle) l.item(i), many);
plist.add(o);
if (addToGlobalChoice && !globalChoice.containsKey(o.p)) {
globalChoice.put(o.p, null);
}
}
}
}
int i = 0;
for (Iterator<FieldDesc> it = td.fdMap.values().iterator(); it.hasNext(); i++) {
FieldDesc fd = it.next();
fd.idx = i;
}
for (XSObjectList l : choiceList) {
final ArrayList<XSParticle> clist = new ArrayList<>();
final LinkedList<XSParticle> elist = new LinkedList<>();
for (i = 0; i < l.getLength(); i++) {
elist.add((XSParticle) l.item(i));
}
while (!elist.isEmpty()) {
final XSParticle p = elist.removeFirst();
if (p.getTerm() instanceof XSModelGroup) {
XSObjectList l2 = ((XSModelGroup) p.getTerm()).getParticles();
for (int i2 = 0; i2 < l2.getLength(); i2++) {
elist.addFirst((XSParticle) l2.item(i2));
}
} else if (p.getTerm() instanceof XSElementDecl) {
clist.add(p);
}
}
boolean choiceOpt = true;
FieldDesc[] choice = new FieldDesc[clist.size()];
for (i = 0; i < choice.length; i++) {
XSParticle p = clist.get(i);
XSElementDecl el = (XSElementDecl) p.getTerm();
String s = Util.toJavaFieldName(el.getName());
if (p.getMaxOccursUnbounded() || p.getMaxOccurs() > 1) {
s = Util.pluralize(s);
}
FieldDesc fd = td.fdMap.get(s);
if (fd == null) {
fd = td.fdMap.get(Util.pluralize(s));
if (fd == null) {
Util.logerr("uknown choice element: " + s);
}
}
if (fd != null) {
choice[i] = fd;
choice[i].choice = choice;
if (fd.required) choiceOpt = false;
}
}
for (i = 0; i < choice.length; i++) {
if (choice[i] != null) {
choice[i].choiceOpt = choiceOpt;
}
}
}
td.supers = supers.toArray(new TypeDesc[0]);
}
public static String parseAnnotationString(String title, String str) {
if (str == null) return null;
int idx = str.indexOf(":documentation");
if (idx == -1) idx = str.indexOf("<documentation");
if (idx == -1) return null;
idx = str.indexOf(">", idx + 1);
if (idx == -1) return null;
int idx2 = str.indexOf(":documentation", idx + 1);
if (idx2 == -1) idx2 = str.indexOf("</documentation", idx + 1);
idx2 = str.lastIndexOf("<", idx2 + 1);
str = str.substring(idx + 1, idx2).trim();
idx = str.indexOf("<![CDATA[");
if (idx > -1) {
idx = str.indexOf("[", idx + 3);
idx2 = str.indexOf("]]>", idx + 1);
str = str.substring(idx + 1, idx2);
}
return "<pre>\n<h3>" + title + "</h3>\n" + str + "\n</pre>";
}
public String toJavaTypeName(XSObject xs, Map<String, NamespaceDesc> nsdMap) {
String name = xs.getName();
if (name == null) {
if (xs instanceof TypeInfo) {
name = ((TypeInfo) xs).getTypeName();
if (name != null && name.startsWith("#")) {
name = name.substring(1);
}
}
}
return model.toJavaTypeName(name, xs.getNamespace());
}
public static class PEntry {
public PEntry(XSParticle p, boolean many) {
this.p = p;
this.many = many;
}
XSParticle p;
boolean many;
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.pap.store;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.balana.*;
import org.wso2.balana.combine.PolicyCombiningAlgorithm;
import org.wso2.balana.combine.xacml2.OnlyOneApplicablePolicyAlg;
import org.wso2.balana.ctx.EvaluationCtx;
import org.wso2.balana.ctx.Status;
import org.wso2.balana.finder.PolicyFinder;
import org.wso2.balana.finder.PolicyFinderModule;
import org.wso2.balana.finder.PolicyFinderResult;
import org.wso2.carbon.identity.entitlement.EntitlementException;
import org.wso2.carbon.identity.entitlement.dto.PolicyDTO;
import org.wso2.carbon.identity.entitlement.policy.collection.DefaultPolicyCollection;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
public class PAPPolicyFinder extends PolicyFinderModule {
// the logger we'll use for all messages
private static Log log = LogFactory.getLog(PAPPolicyFinder.class);
// the list of policy URLs passed to the constructor
private PAPPolicyStoreReader policyReader;
// the map of policies
private DefaultPolicyCollection policies;
//keeps policy ids according to the order
private List<String> policyIds;
private PolicyFinder policyFinder;
// only five policies are allowed
private int maxInMemoryPolicies = 5;
/**
* Creates a PAPPolicyFinder that provides access to the given collection of policies.
* Any policy that cannot be loaded will be noted in the log, but will not cause an error. The
* schema file used to validate policies is defined by the property
* PolicyRepository.POLICY_SCHEMA_PROPERTY. If the retrieved property is null, then no schema
* validation will occur.
*
* @param policyReader Policy store repository for Registry
*/
public PAPPolicyFinder(PAPPolicyStoreReader policyReader) {
this.policyReader = policyReader;
}
/**
* Always returns <code>true</code> since this module does support finding policies based on
* reference.
*
* @return true
*/
public boolean isIdReferenceSupported() {
return true;
}
/*
* (non-Javadoc)
*
* @see org.wso2.balana.finder.PolicyFinderModule#isRequestSupported()
*/
public boolean isRequestSupported() {
return true;
}
/*
* (non-Javadoc)
*
* @see org.wso2.balana.finder.PolicyFinderModule#init(org.wso2.balana.finder.CarbonPolicyFinder)
*/
public void init(PolicyFinder finder) {
PolicyCombiningAlgorithm algorithm;
this.policyFinder = finder;
try {
// for PAP policy store, Global policy combining algorithm is not needed. As we are
// only evaluating one policy therefore using default algorithm.
algorithm = new OnlyOneApplicablePolicyAlg();
initPolicyIds();
this.policies = new DefaultPolicyCollection(algorithm, 0);
} catch (EntitlementException e) {
log.error("Error while initializing PAPPolicyFinder", e);
}
}
/*
* (non-Javadoc)
*
* @see org.wso2.balana.finder.PolicyFinderModule#findPolicy(java.net.URI, int,
* org.wso2.balana.VersionConstraints, org.wso2.balana.PolicyMetaData)
*/
public PolicyFinderResult findPolicy(URI idReference, int type, VersionConstraints constraints,
PolicyMetaData parentMetaData) {
// clear all current policies
policies.getPolicies().clear();
AbstractPolicy policy = null;
try {
AbstractPolicy policyFromStore = policyReader.readPolicy(idReference.toString(),
this.policyFinder);
if (policyFromStore != null) {
if (type == PolicyReference.POLICY_REFERENCE) {
if (policyFromStore instanceof Policy) {
policy = policyFromStore;
policies.addPolicy(policy);
}
} else {
if (policyFromStore instanceof PolicySet) {
policy = policyFromStore;
policies.addPolicy(policy);
}
}
}
} catch (EntitlementException e) {
// ignore and just log the error.
log.error(e);
}
if (policy == null) {
return new PolicyFinderResult();
} else {
return new PolicyFinderResult(policy);
}
}
/*
* (non-Javadoc)
*
* @see org.wso2.balana.finder.PolicyFinderModule#findPolicy(org.wso2.balana.EvaluationCtx)
*/
public PolicyFinderResult findPolicy(EvaluationCtx context) {
// clear all current policies
policies.getPolicies().clear();
ArrayList<AbstractPolicy> list = new ArrayList<AbstractPolicy>();
try {
for (String policyId : policyIds) {
if (list.size() == maxInMemoryPolicies) {
break;
}
AbstractPolicy policy = null;
try {
policy = policyReader.readPolicy(policyId, this.policyFinder);
} catch (EntitlementException e) {
//log and ignore
log.error(e);
}
if (policy == null) {
continue;
} else {
policies.addPolicy(policy);
}
// see if we match
MatchResult match = policy.match(context);
int result = match.getResult();
// if there was an error, we stop right away
if (result == MatchResult.INDETERMINATE) {
log.error(match.getStatus().getMessage());
throw new EntitlementException(match.getStatus().getMessage());
}
// if we matched, we keep track of the matching policy...
if (result == MatchResult.MATCH) {
if (log.isDebugEnabled()) {
log.debug("Matching XACML policy found " + policy.getId().toString());
}
list.add(policy);
}
}
AbstractPolicy policy = policies.getEffectivePolicy(list);
if (policy == null) {
return new PolicyFinderResult();
} else {
return new PolicyFinderResult(policy);
}
} catch (EntitlementException e) {
ArrayList<String> code = new ArrayList<String>();
code.add(Status.STATUS_PROCESSING_ERROR);
Status status = new Status(code, e.getMessage());
return new PolicyFinderResult(status);
}
}
/**
* Sets polices ids that is evaluated
*
* @param policyIds
*/
public void setPolicyIds(List<String> policyIds) {
this.policyIds = policyIds;
}
public void initPolicyIds() throws EntitlementException {
this.policyIds = new ArrayList<String>();
PolicyDTO[] policyDTOs = policyReader.readAllLightPolicyDTOs();
for (PolicyDTO dto : policyDTOs) {
if (dto.isActive()) {
policyIds.add(dto.getPolicyId());
}
}
}
}
| |
// XMLReaderAdapter.java - adapt an SAX2 XMLReader to a SAX1 Parser
// http://www.saxproject.org
// Written by David Megginson
// NO WARRANTY! This class is in the public domain.
// $Id: XMLReaderAdapter.java,v 1.9 2004/04/26 17:34:35 dmegginson Exp $
package org.xml.sax.helpers;
import java.io.IOException;
import java.util.Locale;
import org.xml.sax.Parser; // deprecated
import org.xml.sax.Locator;
import org.xml.sax.InputSource;
import org.xml.sax.AttributeList; // deprecated
import org.xml.sax.EntityResolver;
import org.xml.sax.DTDHandler;
import org.xml.sax.DocumentHandler; // deprecated
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXNotSupportedException;
/**
* Adapt a SAX2 XMLReader as a SAX1 Parser.
*
* <blockquote>
* <em>This module, both source code and documentation, is in the
* Public Domain, and comes with <strong>NO WARRANTY</strong>.</em>
* See <a href='http://www.saxproject.org'>http://www.saxproject.org</a>
* for further information.
* </blockquote>
*
* <p>This class wraps a SAX2 {@link org.xml.sax.XMLReader XMLReader}
* and makes it act as a SAX1 {@link org.xml.sax.Parser Parser}. The XMLReader
* must support a true value for the
* http://xml.org/sax/features/namespace-prefixes property or parsing will fail
* with a {@link org.xml.sax.SAXException SAXException}; if the XMLReader
* supports a false value for the http://xml.org/sax/features/namespaces
* property, that will also be used to improve efficiency.</p>
*
* @since SAX 2.0
* @author David Megginson
* @version 2.0.1 (sax2r2)
* @see org.xml.sax.Parser
* @see org.xml.sax.XMLReader
*/
public class XMLReaderAdapter implements Parser, ContentHandler
{
////////////////////////////////////////////////////////////////////
// Constructor.
////////////////////////////////////////////////////////////////////
/**
* Create a new adapter.
*
* <p>Use the "org.xml.sax.driver" property to locate the SAX2
* driver to embed.</p>
*
* @exception org.xml.sax.SAXException If the embedded driver
* cannot be instantiated or if the
* org.xml.sax.driver property is not specified.
*/
public XMLReaderAdapter ()
throws SAXException
{
setup(XMLReaderFactory.createXMLReader());
}
/**
* Create a new adapter.
*
* <p>Create a new adapter, wrapped around a SAX2 XMLReader.
* The adapter will make the XMLReader act like a SAX1
* Parser.</p>
*
* @param xmlReader The SAX2 XMLReader to wrap.
* @exception java.lang.NullPointerException If the argument is null.
*/
public XMLReaderAdapter (XMLReader xmlReader)
{
setup(xmlReader);
}
/**
* Internal setup.
*
* @param xmlReader The embedded XMLReader.
*/
private void setup (XMLReader xmlReader)
{
if (xmlReader == null) {
throw new NullPointerException("XMLReader must not be null");
}
this.xmlReader = xmlReader;
qAtts = new AttributesAdapter();
}
////////////////////////////////////////////////////////////////////
// Implementation of org.xml.sax.Parser.
////////////////////////////////////////////////////////////////////
/**
* Set the locale for error reporting.
*
* <p>This is not supported in SAX2, and will always throw
* an exception.</p>
*
* @param locale the locale for error reporting.
* @see org.xml.sax.Parser#setLocale
* @exception org.xml.sax.SAXException Thrown unless overridden.
*/
public void setLocale (Locale locale)
throws SAXException
{
throw new SAXNotSupportedException("setLocale not supported");
}
/**
* Register the entity resolver.
*
* @param resolver The new resolver.
* @see org.xml.sax.Parser#setEntityResolver
*/
public void setEntityResolver (EntityResolver resolver)
{
xmlReader.setEntityResolver(resolver);
}
/**
* Register the DTD event handler.
*
* @param handler The new DTD event handler.
* @see org.xml.sax.Parser#setDTDHandler
*/
public void setDTDHandler (DTDHandler handler)
{
xmlReader.setDTDHandler(handler);
}
/**
* Register the SAX1 document event handler.
*
* <p>Note that the SAX1 document handler has no Namespace
* support.</p>
*
* @param handler The new SAX1 document event handler.
* @see org.xml.sax.Parser#setDocumentHandler
*/
public void setDocumentHandler (DocumentHandler handler)
{
documentHandler = handler;
}
/**
* Register the error event handler.
*
* @param handler The new error event handler.
* @see org.xml.sax.Parser#setErrorHandler
*/
public void setErrorHandler (ErrorHandler handler)
{
xmlReader.setErrorHandler(handler);
}
/**
* Parse the document.
*
* <p>This method will throw an exception if the embedded
* XMLReader does not support the
* http://xml.org/sax/features/namespace-prefixes property.</p>
*
* @param systemId The absolute URL of the document.
* @exception java.io.IOException If there is a problem reading
* the raw content of the document.
* @exception org.xml.sax.SAXException If there is a problem
* processing the document.
* @see #parse(org.xml.sax.InputSource)
* @see org.xml.sax.Parser#parse(java.lang.String)
*/
public void parse (String systemId)
throws IOException, SAXException
{
parse(new InputSource(systemId));
}
/**
* Parse the document.
*
* <p>This method will throw an exception if the embedded
* XMLReader does not support the
* http://xml.org/sax/features/namespace-prefixes property.</p>
*
* @param input An input source for the document.
* @exception java.io.IOException If there is a problem reading
* the raw content of the document.
* @exception org.xml.sax.SAXException If there is a problem
* processing the document.
* @see #parse(java.lang.String)
* @see org.xml.sax.Parser#parse(org.xml.sax.InputSource)
*/
public void parse (InputSource input)
throws IOException, SAXException
{
setupXMLReader();
xmlReader.parse(input);
}
/**
* Set up the XML reader.
*/
private void setupXMLReader ()
throws SAXException
{
xmlReader.setFeature("http://xml.org/sax/features/namespace-prefixes", true);
try {
xmlReader.setFeature("http://xml.org/sax/features/namespaces",
false);
} catch (SAXException e) {
// NO OP: it's just extra information, and we can ignore it
}
xmlReader.setContentHandler(this);
}
////////////////////////////////////////////////////////////////////
// Implementation of org.xml.sax.ContentHandler.
////////////////////////////////////////////////////////////////////
/**
* Set a document locator.
*
* @param locator The document locator.
* @see org.xml.sax.ContentHandler#setDocumentLocator
*/
public void setDocumentLocator (Locator locator)
{
if (documentHandler != null)
documentHandler.setDocumentLocator(locator);
}
/**
* Start document event.
*
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#startDocument
*/
public void startDocument ()
throws SAXException
{
if (documentHandler != null)
documentHandler.startDocument();
}
/**
* End document event.
*
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#endDocument
*/
public void endDocument ()
throws SAXException
{
if (documentHandler != null)
documentHandler.endDocument();
}
/**
* Adapt a SAX2 start prefix mapping event.
*
* @param prefix The prefix being mapped.
* @param uri The Namespace URI being mapped to.
* @see org.xml.sax.ContentHandler#startPrefixMapping
*/
public void startPrefixMapping (String prefix, String uri)
{
}
/**
* Adapt a SAX2 end prefix mapping event.
*
* @param prefix The prefix being mapped.
* @see org.xml.sax.ContentHandler#endPrefixMapping
*/
public void endPrefixMapping (String prefix)
{
}
/**
* Adapt a SAX2 start element event.
*
* @param uri The Namespace URI.
* @param localName The Namespace local name.
* @param qName The qualified (prefixed) name.
* @param atts The SAX2 attributes.
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#endDocument
*/
public void startElement (String uri, String localName,
String qName, Attributes atts)
throws SAXException
{
if (documentHandler != null) {
qAtts.setAttributes(atts);
documentHandler.startElement(qName, qAtts);
}
}
/**
* Adapt a SAX2 end element event.
*
* @param uri The Namespace URI.
* @param localName The Namespace local name.
* @param qName The qualified (prefixed) name.
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#endElement
*/
public void endElement (String uri, String localName,
String qName)
throws SAXException
{
if (documentHandler != null)
documentHandler.endElement(qName);
}
/**
* Adapt a SAX2 characters event.
*
* @param ch An array of characters.
* @param start The starting position in the array.
* @param length The number of characters to use.
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#characters
*/
public void characters (char ch[], int start, int length)
throws SAXException
{
if (documentHandler != null)
documentHandler.characters(ch, start, length);
}
/**
* Adapt a SAX2 ignorable whitespace event.
*
* @param ch An array of characters.
* @param start The starting position in the array.
* @param length The number of characters to use.
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#ignorableWhitespace
*/
public void ignorableWhitespace (char ch[], int start, int length)
throws SAXException
{
if (documentHandler != null)
documentHandler.ignorableWhitespace(ch, start, length);
}
/**
* Adapt a SAX2 processing instruction event.
*
* @param target The processing instruction target.
* @param data The remainder of the processing instruction
* @exception org.xml.sax.SAXException The client may raise a
* processing exception.
* @see org.xml.sax.ContentHandler#processingInstruction
*/
public void processingInstruction (String target, String data)
throws SAXException
{
if (documentHandler != null)
documentHandler.processingInstruction(target, data);
}
/**
* Adapt a SAX2 skipped entity event.
*
* @param name The name of the skipped entity.
* @see org.xml.sax.ContentHandler#skippedEntity
* @exception org.xml.sax.SAXException Throwable by subclasses.
*/
public void skippedEntity (String name)
throws SAXException
{
}
////////////////////////////////////////////////////////////////////
// Internal state.
////////////////////////////////////////////////////////////////////
XMLReader xmlReader;
DocumentHandler documentHandler;
AttributesAdapter qAtts;
////////////////////////////////////////////////////////////////////
// Internal class.
////////////////////////////////////////////////////////////////////
/**
* Internal class to wrap a SAX2 Attributes object for SAX1.
*/
final class AttributesAdapter implements AttributeList
{
AttributesAdapter ()
{
}
/**
* Set the embedded Attributes object.
*
* @param The embedded SAX2 Attributes.
*/
void setAttributes (Attributes attributes)
{
this.attributes = attributes;
}
/**
* Return the number of attributes.
*
* @return The length of the attribute list.
* @see org.xml.sax.AttributeList#getLength
*/
public int getLength ()
{
return attributes.getLength();
}
/**
* Return the qualified (prefixed) name of an attribute by position.
*
* @return The qualified name.
* @see org.xml.sax.AttributeList#getName
*/
public String getName (int i)
{
return attributes.getQName(i);
}
/**
* Return the type of an attribute by position.
*
* @return The type.
* @see org.xml.sax.AttributeList#getType(int)
*/
public String getType (int i)
{
return attributes.getType(i);
}
/**
* Return the value of an attribute by position.
*
* @return The value.
* @see org.xml.sax.AttributeList#getValue(int)
*/
public String getValue (int i)
{
return attributes.getValue(i);
}
/**
* Return the type of an attribute by qualified (prefixed) name.
*
* @return The type.
* @see org.xml.sax.AttributeList#getType(java.lang.String)
*/
public String getType (String qName)
{
return attributes.getType(qName);
}
/**
* Return the value of an attribute by qualified (prefixed) name.
*
* @return The value.
* @see org.xml.sax.AttributeList#getValue(java.lang.String)
*/
public String getValue (String qName)
{
return attributes.getValue(qName);
}
private Attributes attributes;
}
}
// end of XMLReaderAdapter.java
| |
/*******************************************************************************
* Copyright 2014 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.ai.tests.pfa.tests;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.ai.msg.MessageManager;
import com.badlogic.gdx.ai.msg.Telegram;
import com.badlogic.gdx.ai.msg.Telegraph;
import com.badlogic.gdx.ai.pfa.PathFinderQueue;
import com.badlogic.gdx.ai.pfa.PathFinderRequest;
import com.badlogic.gdx.ai.pfa.PathFinderRequestControl;
import com.badlogic.gdx.ai.pfa.PathSmoother;
import com.badlogic.gdx.ai.pfa.PathSmootherRequest;
import com.badlogic.gdx.ai.pfa.indexed.IndexedAStarPathFinder;
import com.badlogic.gdx.ai.pfa.indexed.IndexedAStarPathFinder.Metrics;
import com.badlogic.gdx.ai.sched.LoadBalancingScheduler;
import com.badlogic.gdx.ai.tests.PathFinderTests;
import com.badlogic.gdx.ai.tests.pfa.PathFinderTestBase;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledManhattanDistance;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledNode;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledRaycastCollisionDetector;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledSmoothableGraphPath;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.flat.FlatTiledGraph;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.flat.FlatTiledNode;
import com.badlogic.gdx.graphics.Camera;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.CheckBox;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.scenes.scene2d.ui.Slider;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.badlogic.gdx.utils.Pool;
import com.badlogic.gdx.utils.Pool.Poolable;
/** This test shows interruptible flat pathfinding through a {@link PathFinderQueue}.
*
* @author davebaol */
public class InterruptibleFlatTiledAStarTest extends PathFinderTestBase implements Telegraph {
final static float width = 8; // 5; // 10;
final static int PF_REQUEST = 1;
final static int PF_RESPONSE = 2;
ShapeRenderer renderer;
Vector3 tmpUnprojection = new Vector3();
int lastScreenX;
int lastScreenY;
int lastEndTileX;
int lastEndTileY;
int startTileX;
int startTileY;
FlatTiledGraph worldMap;
TiledSmoothableGraphPath<FlatTiledNode> activePath;
TiledSmoothableGraphPath<FlatTiledNode> workPath;
boolean isActivePathSmoothed;
TiledManhattanDistance<FlatTiledNode> heuristic;
IndexedAStarPathFinder<FlatTiledNode> pathFinder;
PathSmoother<FlatTiledNode, Vector2> pathSmoother;
Pool<MyPathFinderRequest> requestPool;
LoadBalancingScheduler scheduler;
boolean smooth = false;
CheckBox checkDiagonal;
CheckBox checkSmooth;
CheckBox checkMetrics;
Slider sliderMillisAvailablePerFrame;
public InterruptibleFlatTiledAStarTest (PathFinderTests container) {
super(container, "Interruptible Flat Tiled A*");
}
@Override
public void create () {
lastEndTileX = -1;
lastEndTileY = -1;
startTileX = 1;
startTileY = 1;
// Create the map
worldMap = new FlatTiledGraph();
int roomCount = MathUtils.random(80, 150);// 100, 260);//70, 120);
int roomMinSize = 3;
int roomMaxSize = 15;
int squashIterations = 100;
worldMap.init(roomCount, roomMinSize, roomMaxSize, squashIterations);
activePath = new TiledSmoothableGraphPath<FlatTiledNode>();
workPath = new TiledSmoothableGraphPath<FlatTiledNode>();
heuristic = new TiledManhattanDistance<FlatTiledNode>();
pathFinder = new IndexedAStarPathFinder<FlatTiledNode>(worldMap, true);
pathSmoother = new PathSmoother<FlatTiledNode, Vector2>(new TiledRaycastCollisionDetector<FlatTiledNode>(worldMap));
requestPool = new Pool<MyPathFinderRequest>() {
@Override
protected MyPathFinderRequest newObject () {
return new MyPathFinderRequest();
}
};
PathFinderQueue<FlatTiledNode> pathFinderQueue = new PathFinderQueue<FlatTiledNode>(pathFinder);
MessageManager.getInstance().addListener(pathFinderQueue, PF_REQUEST);
scheduler = new LoadBalancingScheduler(100);
scheduler.add(pathFinderQueue, 1, 0);
renderer = new ShapeRenderer();
inputProcessor = new TiledAStarInputProcessor(this);
Table detailTable = new Table(container.skin);
detailTable.row();
checkSmooth = new CheckBox("[RED]S[]mooth Path", container.skin);
checkSmooth.setChecked(smooth);
checkSmooth.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
CheckBox checkBox = (CheckBox)event.getListenerActor();
smooth = checkBox.isChecked();
updatePath(true);
}
});
detailTable.add(checkSmooth);
detailTable.row();
checkDiagonal = new CheckBox("Prefer [RED]D[]iagonal", container.skin);
checkDiagonal.setChecked(worldMap.diagonal);
checkDiagonal.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
CheckBox checkBox = (CheckBox)event.getListenerActor();
worldMap.diagonal = checkBox.isChecked();
updatePath(true);
}
});
detailTable.add(checkDiagonal);
detailTable.row();
addSeparator(detailTable);
detailTable.row();
checkMetrics = new CheckBox("Calculate [RED]M[]etrics", container.skin);
checkMetrics.setChecked(pathFinder.metrics != null);
checkMetrics.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
CheckBox checkBox = (CheckBox)event.getListenerActor();
pathFinder.metrics = checkBox.isChecked() ? new Metrics() : null;
updatePath(true);
}
});
detailTable.add(checkMetrics);
detailTable.row();
addSeparator(detailTable);
detailTable.row();
sliderMillisAvailablePerFrame = new Slider(0.1f, 40f, 0.1f, false, container.skin);
sliderMillisAvailablePerFrame.setValue(16);
final Label labelMillisAvailablePerFrame = new Label("Millis Available per Frame [["
+ sliderMillisAvailablePerFrame.getValue() + "]", container.skin);
detailTable.add(labelMillisAvailablePerFrame);
detailTable.row();
sliderMillisAvailablePerFrame.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
labelMillisAvailablePerFrame.setText("Millis Available per Frame [[" + sliderMillisAvailablePerFrame.getValue() + "]");
}
});
Table sliderMapfTable = new Table();
sliderMapfTable.add(new Label("[RED]-[] ", container.skin));
sliderMapfTable.add(sliderMillisAvailablePerFrame);
sliderMapfTable.add(new Label(" [RED]+[]", container.skin));
detailTable.add(sliderMapfTable);
detailWindow = createDetailWindow(detailTable);
}
@Override
public void render () {
long timeToRun = (long)(sliderMillisAvailablePerFrame.getValue() * 1000000f);
scheduler.run(timeToRun);
// Draw dungeon
renderer.begin(ShapeType.Filled);
for (int x = 0; x < FlatTiledGraph.sizeX; x++) {
for (int y = 0; y < FlatTiledGraph.sizeY; y++) {
switch (worldMap.getNode(x, y).type) {
case TiledNode.TILE_FLOOR:
renderer.setColor(Color.WHITE);
break;
case TiledNode.TILE_WALL:
renderer.setColor(Color.GRAY);
break;
default:
renderer.setColor(Color.BLACK);
break;
}
renderer.rect(x * width, y * width, width, width);
}
}
// Draw active path
renderer.setColor(Color.RED);
int nodeCount = activePath.getCount();
for (int i = 0; i < nodeCount; i++) {
FlatTiledNode node = activePath.nodes.get(i);
renderer.rect(node.x * width, node.y * width, width, width);
}
if (isActivePathSmoothed) {
renderer.end();
renderer.begin(ShapeType.Line);
float hw = width / 2f;
if (nodeCount > 0) {
FlatTiledNode prevNode = activePath.nodes.get(0);
for (int i = 1; i < nodeCount; i++) {
FlatTiledNode node = activePath.nodes.get(i);
renderer.line(node.x * width + hw, node.y * width + hw, prevNode.x * width + hw, prevNode.y * width + hw);
prevNode = node;
}
}
}
renderer.end();
}
@Override
public void dispose () {
renderer.dispose();
worldMap = null;
activePath = null;
workPath = null;
heuristic = null;
pathFinder = null;
pathSmoother = null;
scheduler = null;
MessageManager.getInstance().clear();
}
public Camera getCamera () {
return container.stage.getViewport().getCamera();
}
@Override
public boolean handleMessage (Telegram telegram) {
switch (telegram.message) {
case PF_RESPONSE: // PathFinderQueue will call us directly, no need to register for this message
MyPathFinderRequest pfr = (MyPathFinderRequest)telegram.extraInfo;
if (PathFinderRequestControl.DEBUG) {
@SuppressWarnings("unchecked")
PathFinderQueue<FlatTiledNode> pfQueue = (PathFinderQueue<FlatTiledNode>)telegram.sender;
System.out.println("pfQueue.size = " + pfQueue.size() + " executionFrames = " + pfr.executionFrames);
}
// Swap double buffer
workPath = activePath;
activePath = (TiledSmoothableGraphPath<FlatTiledNode>)pfr.resultPath;
isActivePathSmoothed = pfr.smoothEnabled;
// Release the request
requestPool.free(pfr);
break;
}
return true;
}
private void updatePath (boolean forceUpdate) {
getCamera().unproject(tmpUnprojection.set(lastScreenX, lastScreenY, 0));
int tileX = (int)(tmpUnprojection.x / width);
int tileY = (int)(tmpUnprojection.y / width);
if (forceUpdate || tileX != lastEndTileX || tileY != lastEndTileY) {
final FlatTiledNode startNode = worldMap.getNode(startTileX, startTileY);
FlatTiledNode endNode = worldMap.getNode(tileX, tileY);
if (forceUpdate || endNode.type == TiledNode.TILE_FLOOR) {
if (endNode.type == TiledNode.TILE_FLOOR) {
lastEndTileX = tileX;
lastEndTileY = tileY;
} else {
endNode = worldMap.getNode(lastEndTileX, lastEndTileY);
}
MyPathFinderRequest pfRequest = requestPool.obtain();
pfRequest.startNode = startNode;
pfRequest.endNode = endNode;
pfRequest.heuristic = heuristic;
pfRequest.responseMessageCode = PF_RESPONSE;
MessageManager.getInstance().dispatchMessage(this, PF_REQUEST, pfRequest);
// worldMap.startNode = startNode;
// long startTime = nanoTime();
// pathFinder.searchNodePath(startNode, endNode, heuristic, path);
// if (pathFinder.metrics != null) {
// float elapsed = (TimeUtils.nanoTime() - startTime) / 1000000f;
// System.out.println("----------------- Indexed A* Path Finder Metrics -----------------");
// System.out.println("Visited nodes................... = " + pathFinder.metrics.visitedNodes);
// System.out.println("Open list additions............. = " + pathFinder.metrics.openListAdditions);
// System.out.println("Open list peak.................. = " + pathFinder.metrics.openListPeak);
// System.out.println("Path finding elapsed time (ms).. = " + elapsed);
// }
// if (smooth) {
// startTime = nanoTime();
// pathSmoother.smoothPath(path);
// if (pathFinder.metrics != null) {
// float elapsed = (TimeUtils.nanoTime() - startTime) / 1000000f;
// System.out.println("Path smoothing elapsed time (ms) = " + elapsed);
// }
// }
}
}
}
/** An {@link InputProcessor} that allows you to define a path to find.
*
* @autor davebaol */
static class TiledAStarInputProcessor extends InputAdapter {
InterruptibleFlatTiledAStarTest test;
public TiledAStarInputProcessor (InterruptibleFlatTiledAStarTest test) {
this.test = test;
}
@Override
public boolean keyTyped (char character) {
switch (character) {
case 'm':
case 'M':
test.checkMetrics.toggle();
break;
case 'd':
case 'D':
test.checkDiagonal.toggle();
break;
case 's':
case 'S':
test.checkSmooth.toggle();
break;
case '-':
test.sliderMillisAvailablePerFrame.setValue(test.sliderMillisAvailablePerFrame.getValue()
- test.sliderMillisAvailablePerFrame.getStepSize());
break;
case '+':
test.sliderMillisAvailablePerFrame.setValue(test.sliderMillisAvailablePerFrame.getValue()
+ test.sliderMillisAvailablePerFrame.getStepSize());
break;
}
return true;
}
@Override
public boolean touchUp (int screenX, int screenY, int pointer, int button) {
test.getCamera().unproject(test.tmpUnprojection.set(screenX, screenY, 0));
int tileX = (int)(test.tmpUnprojection.x / width);
int tileY = (int)(test.tmpUnprojection.y / width);
FlatTiledNode startNode = test.worldMap.getNode(tileX, tileY);
if (startNode.type == TiledNode.TILE_FLOOR) {
test.startTileX = tileX;
test.startTileY = tileY;
test.updatePath(true);
}
return true;
}
@Override
public boolean mouseMoved (int screenX, int screenY) {
test.lastScreenX = screenX;
test.lastScreenY = screenY;
test.updatePath(false);
return true;
}
}
class MyPathFinderRequest extends PathFinderRequest<FlatTiledNode> implements Poolable {
PathSmootherRequest<FlatTiledNode, Vector2> pathSmootherRequest;
boolean smoothEnabled;
boolean smoothFinished;
public MyPathFinderRequest () {
this.resultPath = new TiledSmoothableGraphPath<FlatTiledNode>();
pathSmootherRequest = new PathSmootherRequest<FlatTiledNode, Vector2>();
}
@Override
public boolean initializeSearch (long timeToRun) {
resultPath = workPath;
resultPath.clear();
smoothEnabled = smooth;
pathSmootherRequest.refresh((TiledSmoothableGraphPath<FlatTiledNode>)resultPath);
smoothFinished = false;
worldMap.startNode = startNode;
return true;
}
@Override
public boolean finalizeSearch (long timeToRun) {
if (pathFound && smoothEnabled && !smoothFinished) {
smoothFinished = pathSmoother.smoothPath(pathSmootherRequest, timeToRun);
if (!smoothFinished) return false;
}
return true;
}
@Override
public void reset () {
this.startNode = null;
this.endNode = null;
this.heuristic = null;
this.client = null;
}
}
}
| |
/*
* Copyright 2017 Open mHealth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.openmhealth.shim.ihealth;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.openmhealth.shim.*;
import org.openmhealth.shim.ihealth.mapper.*;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.security.oauth2.client.OAuth2RestOperations;
import org.springframework.security.oauth2.client.resource.OAuth2ProtectedResourceDetails;
import org.springframework.security.oauth2.client.resource.UserRedirectRequiredException;
import org.springframework.security.oauth2.client.token.AccessTokenRequest;
import org.springframework.security.oauth2.client.token.RequestEnhancer;
import org.springframework.security.oauth2.client.token.grant.code.AuthorizationCodeAccessTokenProvider;
import org.springframework.security.oauth2.client.token.grant.code.AuthorizationCodeResourceDetails;
import org.springframework.security.oauth2.common.AuthenticationScheme;
import org.springframework.security.oauth2.common.DefaultOAuth2AccessToken;
import org.springframework.security.oauth2.common.DefaultOAuth2RefreshToken;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import org.springframework.security.oauth2.common.util.SerializationUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.MultiValueMap;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.HttpServerErrorException;
import org.springframework.web.client.ResponseExtractor;
import org.springframework.web.util.UriComponentsBuilder;
import java.io.IOException;
import java.net.URI;
import java.time.OffsetDateTime;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static com.google.common.collect.Lists.newArrayList;
import static java.util.Collections.singletonList;
import static org.openmhealth.shim.ihealth.IHealthShim.IHealthDataTypes.*;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Encapsulates parameters specific to the iHealth REST API and processes requests made of shimmer for iHealth data.
*
* @author Chris Schaefbauer
* @author Emerson Farrugia
*/
@Component
public class IHealthShim extends OAuth2Shim {
private static final Logger logger = getLogger(IHealthShim.class);
public static final String SHIM_KEY = "ihealth";
private static final String USER_AUTHORIZATION_URL_SUFFIX = "/OAuthv2/userauthorization/";
private static final String ACCESS_TOKEN_URL_SUFFIX = USER_AUTHORIZATION_URL_SUFFIX;
@Autowired
private IHealthClientSettings clientSettings;
@Override
public String getLabel() {
return "iHealth";
}
@Override
public String getShimKey() {
return SHIM_KEY;
}
@Override
public String getUserAuthorizationUrl() {
return clientSettings.getApiBaseUrl() + USER_AUTHORIZATION_URL_SUFFIX;
}
@Override
public String getAccessTokenUrl() {
return clientSettings.getApiBaseUrl() + ACCESS_TOKEN_URL_SUFFIX;
}
@Override
protected OAuth2ClientSettings getClientSettings() {
return clientSettings;
}
@Override
public AuthorizationCodeAccessTokenProvider getAuthorizationCodeAccessTokenProvider() {
return new IHealthAuthorizationCodeAccessTokenProvider();
}
@Override
public ShimDataType[] getShimDataTypes() {
return new ShimDataType[] {
BLOOD_PRESSURE, // TODO the order matters here since the first is used as a trigger request
BLOOD_GLUCOSE,
PHYSICAL_ACTIVITY,
BODY_WEIGHT,
BODY_MASS_INDEX,
HEART_RATE,
STEP_COUNT,
SLEEP_DURATION,
OXYGEN_SATURATION
};
}
public enum IHealthDataTypes implements ShimDataType {
PHYSICAL_ACTIVITY("sport.json"),
BLOOD_GLUCOSE("glucose.json"),
BLOOD_PRESSURE("bp.json"),
BODY_WEIGHT("weight.json"),
BODY_MASS_INDEX("weight.json"),
HEART_RATE("bp.json", "spo2.json"),
STEP_COUNT("activity.json"),
SLEEP_DURATION("sleep.json"),
OXYGEN_SATURATION("spo2.json");
private List<String> endPoint;
IHealthDataTypes(String endpoint) {
this.endPoint = singletonList(endpoint);
}
IHealthDataTypes(String... endpoints) {
this.endPoint = Lists.newArrayList(endpoints);
}
public List<String> getEndpoints() {
return endPoint;
}
}
@Override
protected ResponseEntity<ShimDataResponse> getData(OAuth2RestOperations restTemplate,
ShimDataRequest shimDataRequest) throws ShimException {
final IHealthDataTypes dataType;
try {
dataType = valueOf(
shimDataRequest.getDataTypeKey().trim().toUpperCase());
}
catch (NullPointerException | IllegalArgumentException e) {
throw new ShimException("Null or Invalid data type parameter: "
+ shimDataRequest.getDataTypeKey()
+ " in shimDataRequest, cannot retrieve data.");
}
OffsetDateTime now = OffsetDateTime.now();
OffsetDateTime startDate = shimDataRequest.getStartDateTime() == null ?
now.minusDays(1) : shimDataRequest.getStartDateTime();
OffsetDateTime endDate = shimDataRequest.getEndDateTime() == null ?
now.plusDays(1) : shimDataRequest.getEndDateTime();
/*
The physical activity point handles start and end datetimes differently than the other endpoints. It
requires use to include the range until the beginning of the next day.
*/
if (dataType == PHYSICAL_ACTIVITY) {
endDate = endDate.plusDays(1);
}
// SC and SV values are client-based keys that are unique to each endpoint within a project
String scValue = clientSettings.getClientSerialNumber();
List<String> svValues = getEndpointSecrets(dataType);
List<JsonNode> responseEntities = newArrayList();
int i = 0;
// We iterate because one of the measures (Heart rate) comes from multiple endpoints, so we submit
// requests to each of these endpoints, map the responses separately and then combine them
for (String endpoint : dataType.getEndpoints()) {
UriComponentsBuilder uriBuilder = UriComponentsBuilder.fromUriString(clientSettings.getApiBaseUrl() + "/");
// Need to use a dummy userId if we haven't authenticated yet. This is the case where we are using
// getData to trigger Spring to conduct the OAuth exchange
String userId = "uk";
if (shimDataRequest.getAccessParameters() != null) {
OAuth2AccessToken token =
SerializationUtils.deserialize(shimDataRequest.getAccessParameters().getSerializedToken());
userId = Preconditions.checkNotNull((String) token.getAdditionalInformation().get("UserID"));
uriBuilder.queryParam("access_token", token.getValue());
}
uriBuilder.path("/user/")
.path(userId + "/")
.path(endpoint)
.queryParam("client_id", restTemplate.getResource().getClientId())
.queryParam("client_secret", restTemplate.getResource().getClientSecret())
.queryParam("start_time", startDate.toEpochSecond())
.queryParam("end_time", endDate.toEpochSecond())
.queryParam("locale", "default")
.queryParam("sc", scValue)
// TODO this is way too brittle, retrieve endpoint secret by endpoint instead of by measure type
.queryParam("sv", svValues.get(i));
ResponseEntity<JsonNode> responseEntity;
try {
URI url = uriBuilder.build().encode().toUri();
responseEntity = restTemplate.getForEntity(url, JsonNode.class);
}
catch (HttpClientErrorException | HttpServerErrorException e) {
// TODO figure out how to handle this
logger.error("A request for iHealth data failed.", e);
throw e;
}
if (shimDataRequest.getNormalize()) {
IHealthDataPointMapper mapper;
switch (dataType) {
case PHYSICAL_ACTIVITY:
mapper = new IHealthPhysicalActivityDataPointMapper();
break;
case BLOOD_GLUCOSE:
mapper = new IHealthBloodGlucoseDataPointMapper();
break;
case BLOOD_PRESSURE:
mapper = new IHealthBloodPressureDataPointMapper();
break;
case BODY_WEIGHT:
mapper = new IHealthBodyWeightDataPointMapper();
break;
case BODY_MASS_INDEX:
mapper = new IHealthBodyMassIndexDataPointMapper();
break;
case STEP_COUNT:
mapper = new IHealthStepCountDataPointMapper();
break;
case SLEEP_DURATION:
mapper = new IHealthSleepDurationDataPointMapper();
break;
case HEART_RATE:
// there are two different mappers for heart rate because the data can come from two endpoints
if (endpoint.equals("bp.json")) {
mapper = new IHealthBloodPressureEndpointHeartRateDataPointMapper();
break;
}
else if (endpoint.equals("spo2.json")) {
mapper = new IHealthBloodOxygenEndpointHeartRateDataPointMapper();
break;
}
case OXYGEN_SATURATION:
mapper = new IHealthOxygenSaturationDataPointMapper();
break;
default:
throw new UnsupportedOperationException();
}
responseEntities.addAll(mapper.asDataPoints(singletonList(responseEntity.getBody())));
}
else {
responseEntities.add(responseEntity.getBody());
}
i++;
}
return ResponseEntity.ok().body(
ShimDataResponse.result(SHIM_KEY, responseEntities));
}
private List<String> getEndpointSecrets(IHealthDataTypes dataType) {
switch (dataType) {
case PHYSICAL_ACTIVITY:
return singletonList(clientSettings.getSportEndpointSecret());
case BODY_WEIGHT:
return singletonList(clientSettings.getWeightEndpointSecret());
case BODY_MASS_INDEX:
return singletonList(
clientSettings.getWeightEndpointSecret()); // body mass index comes from the weight endpoint
case BLOOD_PRESSURE:
return singletonList(clientSettings.getBloodPressureEndpointSecret());
case BLOOD_GLUCOSE:
return singletonList(clientSettings.getBloodGlucoseEndpointSecret());
case STEP_COUNT:
return singletonList(clientSettings.getActivityEndpointSecret());
case SLEEP_DURATION:
return singletonList(clientSettings.getSleepEndpointSecret());
case HEART_RATE:
return newArrayList(clientSettings.getBloodPressureEndpointSecret(),
clientSettings.getSpO2EndpointSecret());
case OXYGEN_SATURATION:
return singletonList(clientSettings.getSpO2EndpointSecret());
default:
throw new UnsupportedOperationException();
}
}
@Override
public OAuth2ProtectedResourceDetails getResource() {
AuthorizationCodeResourceDetails resource = (AuthorizationCodeResourceDetails) super.getResource();
resource.setAuthenticationScheme(AuthenticationScheme.none);
return resource;
}
@Override
protected String getAuthorizationUrl(UserRedirectRequiredException exception, Map<String, String> addlParameters) {
final OAuth2ProtectedResourceDetails resource = getResource();
UriComponentsBuilder callBackUriBuilder = UriComponentsBuilder.fromUriString(getDefaultRedirectUrl())
.queryParam("state", exception.getStateKey());
UriComponentsBuilder authorizationUriBuilder = UriComponentsBuilder.fromUriString(exception.getRedirectUri())
.queryParam("client_id", resource.getClientId())
.queryParam("response_type", "code")
.queryParam("APIName", Joiner.on(' ').join(resource.getScope()))
.queryParam("RequiredAPIName", Joiner.on(' ').join(resource.getScope()))
.queryParam("redirect_uri", callBackUriBuilder.build().toString());
return authorizationUriBuilder.build().encode().toString();
}
public class IHealthAuthorizationCodeAccessTokenProvider extends AuthorizationCodeAccessTokenProvider {
public IHealthAuthorizationCodeAccessTokenProvider() {
this.setTokenRequestEnhancer(new RequestEnhancer() {
@Override
public void enhance(AccessTokenRequest request,
OAuth2ProtectedResourceDetails resource,
MultiValueMap<String, String> form, HttpHeaders headers) {
form.set("client_id", resource.getClientId());
form.set("client_secret", resource.getClientSecret());
form.set("redirect_uri", getDefaultRedirectUrl());
form.set("state", request.getStateKey());
}
});
}
@Override
protected HttpMethod getHttpMethod() {
return HttpMethod.GET;
}
@Override
protected ResponseExtractor<OAuth2AccessToken> getResponseExtractor() {
return new ResponseExtractor<OAuth2AccessToken>() {
@Override
public OAuth2AccessToken extractData(ClientHttpResponse response) throws IOException {
JsonNode node = new ObjectMapper().readTree(response.getBody());
String token = Preconditions
.checkNotNull(node.path("AccessToken").textValue(), "Missing access token: %s", node);
String refreshToken = Preconditions
.checkNotNull(node.path("RefreshToken").textValue(), "Missing refresh token: %s" + node);
String userId =
Preconditions.checkNotNull(node.path("UserID").textValue(), "Missing UserID: %s", node);
long expiresIn = node.path("Expires").longValue() * 1000;
Preconditions.checkArgument(expiresIn > 0, "Missing Expires: %s", node);
DefaultOAuth2AccessToken accessToken = new DefaultOAuth2AccessToken(token);
accessToken.setExpiration(new Date(System.currentTimeMillis() + expiresIn));
accessToken.setRefreshToken(new DefaultOAuth2RefreshToken(refreshToken));
accessToken.setAdditionalInformation(ImmutableMap.<String, Object>of("UserID", userId));
return accessToken;
}
};
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.journal.raft;
import alluxio.ProcessUtils;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.master.journal.checkpoint.CheckpointInputStream;
import alluxio.master.journal.JournalEntryAssociation;
import alluxio.master.journal.JournalUtils;
import alluxio.master.journal.Journaled;
import alluxio.master.journal.sink.JournalSink;
import alluxio.proto.journal.Journal.JournalEntry;
import alluxio.util.StreamUtils;
import com.google.common.base.Preconditions;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import io.atomix.copycat.server.Commit;
import io.atomix.copycat.server.Snapshottable;
import io.atomix.copycat.server.StateMachine;
import io.atomix.copycat.server.storage.snapshot.SnapshotReader;
import io.atomix.copycat.server.storage.snapshot.SnapshotWriter;
import net.jcip.annotations.ThreadSafe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import javax.annotation.concurrent.GuardedBy;
/**
* A state machine for managing all of Alluxio's journaled state. Entries applied to this state
* machine will be forwarded to the appropriate internal master.
*
* The state machine starts by resetting all state, then applying the entries offered by copycat.
* When the master becomes primary, it should wait until the state machine is up to date and no
* other primary master is serving, then call {@link #upgrade}. Once the state machine is upgraded,
* it will ignore all entries appended by copycat because those entries are applied to primary
* master state before being written to copycat.
*/
@ThreadSafe
public class JournalStateMachine extends StateMachine implements Snapshottable {
private static final Logger LOG = LoggerFactory.getLogger(JournalStateMachine.class);
private final Map<String, RaftJournal> mJournals;
@GuardedBy("this")
private boolean mIgnoreApplys = false;
@GuardedBy("this")
private boolean mClosed = false;
private volatile long mLastAppliedCommitIndex = -1;
// The last special "primary start" sequence number applied to this state machine. These special
// sequence numbers are identified by being negative.
private volatile long mLastPrimaryStartSequenceNumber = 0;
private volatile long mNextSequenceNumberToRead = 0;
private volatile boolean mSnapshotting = false;
// The start time of the most recent snapshot
private volatile long mLastSnapshotStartTime = 0;
/** A supplier of journal sinks for this journal. */
private final Supplier<Set<JournalSink>> mJournalSinks;
/**
* @param journals master journals; these journals are still owned by the caller, not by the
* journal state machine
* @param journalSinks a supplier for journal sinks
*/
public JournalStateMachine(Map<String, RaftJournal> journals,
Supplier<Set<JournalSink>> journalSinks) {
mJournals = Collections.unmodifiableMap(journals);
mJournalSinks = journalSinks;
resetState();
LOG.info("Initialized new journal state machine");
}
/**
* Applies a journal entry commit to the state machine.
*
* This method is automatically discovered by the Copycat framework.
*
* @param commit the commit
*/
public synchronized void applyJournalEntryCommand(Commit<JournalEntryCommand> commit) {
JournalEntry entry;
try {
entry = JournalEntry.parseFrom(commit.command().getSerializedJournalEntry());
} catch (Exception e) {
ProcessUtils.fatalError(LOG, e,
"Encountered invalid journal entry in commit: %s.", commit);
System.exit(-1);
throw new IllegalStateException(e); // We should never reach here.
}
try {
applyEntry(entry);
} finally {
Preconditions.checkState(commit.index() > mLastAppliedCommitIndex);
mLastAppliedCommitIndex = commit.index();
commit.close();
}
}
/**
* Applies the journal entry, ignoring empty entries and expanding multi-entries.
*
* @param entry the entry to apply
*/
private void applyEntry(JournalEntry entry) {
Preconditions.checkState(
entry.getAllFields().size() <= 1
|| (entry.getAllFields().size() == 2 && entry.hasSequenceNumber()),
"Raft journal entries should never set multiple fields in addition to sequence "
+ "number, but found %s",
entry);
if (entry.getJournalEntriesCount() > 0) {
// This entry aggregates multiple entries.
for (JournalEntry e : entry.getJournalEntriesList()) {
applyEntry(e);
}
} else if (entry.getSequenceNumber() < 0) {
// Negative sequence numbers indicate special entries used to indicate that a new primary is
// starting to serve.
mLastPrimaryStartSequenceNumber = entry.getSequenceNumber();
} else if (entry.toBuilder().clearSequenceNumber().build()
.equals(JournalEntry.getDefaultInstance())) {
// Ignore empty entries, they are created during snapshotting.
} else {
applySingleEntry(entry);
}
}
@SuppressFBWarnings(value = "VO_VOLATILE_INCREMENT",
justification = "All writes to mNextSequenceNumberToRead are synchronized")
private synchronized void applySingleEntry(JournalEntry entry) {
if (mClosed) {
return;
}
long newSN = entry.getSequenceNumber();
if (newSN < mNextSequenceNumberToRead) {
// This can happen due to retried writes. For example, if flushing [3, 4] fails, we will
// retry, and the log may end up looking like [1, 2, 3, 4, 3, 4] if the original request
// eventually succeeds. Once we've read the first "4", we must ignore the next two entries.
LOG.debug("Ignoring duplicate journal entry with SN {} when next SN is {}", newSN,
mNextSequenceNumberToRead);
return;
}
if (newSN > mNextSequenceNumberToRead) {
ProcessUtils.fatalError(LOG,
"Unexpected journal entry. The next expected SN is %s, but"
+ " encountered an entry with SN %s. Full journal entry: %s",
mNextSequenceNumberToRead, newSN, entry);
}
mNextSequenceNumberToRead++;
if (!mIgnoreApplys) {
applyToMaster(entry);
}
}
private synchronized void applyToMaster(JournalEntry entry) {
String masterName;
try {
masterName = JournalEntryAssociation.getMasterForEntry(entry);
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Unrecognized journal entry: %s", entry);
throw new IllegalStateException();
}
try {
Journaled master = mJournals.get(masterName).getStateMachine();
LOG.trace("Applying entry to master {}: {} ", masterName, entry);
master.processJournalEntry(entry);
JournalUtils.sinkAppend(mJournalSinks, entry);
} catch (Throwable t) {
JournalUtils.handleJournalReplayFailure(LOG, t,
"Failed to apply journal entry to master %s. Entry: %s", masterName, entry);
}
}
@Override
public synchronized void snapshot(SnapshotWriter writer) {
// Snapshot format is [snapshotId, name1, bytes1, name2, bytes2, ...].
if (mClosed) {
return;
}
LOG.debug("Calling snapshot");
Preconditions.checkState(!mSnapshotting, "Cannot call snapshot multiple times concurrently");
mSnapshotting = true;
mLastSnapshotStartTime = System.currentTimeMillis();
long snapshotId = mNextSequenceNumberToRead - 1;
try (SnapshotWriterStream sws = new SnapshotWriterStream(writer)) {
writer.writeLong(snapshotId);
JournalUtils.writeToCheckpoint(sws, getStateMachines());
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to snapshot");
throw new RuntimeException(t);
}
LOG.info("Completed snapshot up to SN {} in {}ms", snapshotId,
System.currentTimeMillis() - mLastSnapshotStartTime);
mSnapshotting = false;
}
@Override
public synchronized void install(SnapshotReader snapshotReader) {
if (mClosed) {
return;
}
if (mIgnoreApplys) {
LOG.warn("Unexpected request to install a snapshot on a read-only journal state machine");
return;
}
long snapshotId = 0L;
try (InputStream srs = new SnapshotReaderStream(snapshotReader)) {
snapshotId = snapshotReader.readLong();
JournalUtils.restoreFromCheckpoint(new CheckpointInputStream(srs), getStateMachines());
} catch (Throwable t) {
JournalUtils.handleJournalReplayFailure(LOG, t,
"Failed to install snapshot");
if (ServerConfiguration.getBoolean(PropertyKey.MASTER_JOURNAL_TOLERATE_CORRUPTION)) {
return;
}
}
if (snapshotId < mNextSequenceNumberToRead - 1) {
LOG.warn("Installed snapshot for SN {} but next SN to read is {}", snapshotId,
mNextSequenceNumberToRead);
}
mNextSequenceNumberToRead = snapshotId + 1;
LOG.info("Successfully installed snapshot up to SN {}", snapshotId);
}
private List<Journaled> getStateMachines() {
return StreamUtils.map(RaftJournal::getStateMachine, mJournals.values());
}
private synchronized void resetState() {
if (mClosed) {
return;
}
if (mIgnoreApplys) {
LOG.warn("Unexpected call to resetState() on a read-only journal state machine");
return;
}
for (RaftJournal journal : mJournals.values()) {
journal.getStateMachine().resetState();
}
}
/**
* Upgrades the journal state machine to primary mode.
*
* @return the last sequence number read while in secondary mode
*/
public synchronized long upgrade() {
mIgnoreApplys = true;
return mNextSequenceNumberToRead - 1;
}
/**
* @return the sequence number of the last entry applied to the state machine
*/
public long getLastAppliedSequenceNumber() {
return mNextSequenceNumberToRead - 1;
}
/**
* @return the last primary term start sequence number applied to this state machine
*/
public long getLastPrimaryStartSequenceNumber() {
return mLastPrimaryStartSequenceNumber;
}
/**
* @return the start time of the most recent snapshot
*/
public long getLastSnapshotStartTime() {
return mLastSnapshotStartTime;
}
/**
* @return whether the state machine is in the process of taking a snapshot
*/
public boolean isSnapshotting() {
return mSnapshotting;
}
/**
* Closes the journal state machine, causing all further modification requests to be ignored.
*/
public synchronized void close() {
mClosed = true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.geode.InternalGemFireException;
import org.apache.geode.annotations.VisibleForTesting;
import org.apache.geode.cache.server.ServerLoad;
import org.apache.geode.cache.wan.GatewayReceiver;
import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID;
import org.apache.geode.logging.internal.executors.LoggingExecutors;
/**
* A data structure used to hold load information for a locator
*
* @since GemFire 5.7
*
*/
public class LocatorLoadSnapshot {
private static final String LOAD_IMBALANCE_THRESHOLD_PROPERTY_NAME =
"gemfire.locator-load-imbalance-threshold";
public static final float DEFAULT_LOAD_IMBALANCE_THRESHOLD = 10;
private final Map<ServerLocation, String[]> serverGroupMap = new HashMap<>();
private final Map<String, Map<ServerLocationAndMemberId, LoadHolder>> connectionLoadMap =
new HashMap<>();
private final Map<String, Map<ServerLocation, LoadHolder>> queueLoadMap = new HashMap<>();
private final ConcurrentMap<EstimateMapKey, LoadEstimateTask> estimateMap =
new ConcurrentHashMap<>();
/**
* when replacing a client's current server we do not move a client from a highly loaded server to
* a less loaded server until imbalance reaches this threshold. Then we aggressively move clients
* until balance is achieved.
*/
private float loadImbalanceThreshold;
/**
* when the loadImbalanceThreshold is hit this variable will be true and it will remain true until
* balance is achieved.
*/
private boolean rebalancing;
private final ScheduledExecutorService estimateTimeoutProcessor =
LoggingExecutors.newScheduledThreadPool(1, "loadEstimateTimeoutProcessor", false);
public LocatorLoadSnapshot() {
connectionLoadMap.put(null, new HashMap<>());
queueLoadMap.put(null, new HashMap<>());
String property = System.getProperty(LOAD_IMBALANCE_THRESHOLD_PROPERTY_NAME);
if (property != null) {
loadImbalanceThreshold = Float.parseFloat(property);
} else {
loadImbalanceThreshold = DEFAULT_LOAD_IMBALANCE_THRESHOLD;
}
}
/**
* Add a new server to the load snapshot.
*/
public synchronized void addServer(ServerLocation location, String memberId, String[] groups,
ServerLoad initialLoad, long loadPollInterval) {
serverGroupMap.put(location, groups);
LoadHolder connectionLoad =
new LoadHolder(location, initialLoad.getConnectionLoad(),
initialLoad.getLoadPerConnection(), loadPollInterval);
addGroups(connectionLoadMap, groups, connectionLoad, memberId);
LoadHolder queueLoad = new LoadHolder(location,
initialLoad.getSubscriptionConnectionLoad(),
initialLoad.getLoadPerSubscriptionConnection(), loadPollInterval);
addGroups(queueLoadMap, groups, queueLoad);
updateLoad(location, memberId, initialLoad);
}
/**
* Remove a server from the load snapshot.
*/
public synchronized void removeServer(ServerLocation location, String memberId) {
String[] groups = serverGroupMap.remove(location);
/*
* Adding null check for #41522 - we were getting a remove from a BridgeServer that was shutting
* down and the ServerLocation wasn't in this map. The root cause isn't 100% clear but it might
* be a race from profile add / remove from different channels.
*/
if (groups != null) {
removeFromMap(connectionLoadMap, groups, location, memberId);
removeFromMap(queueLoadMap, groups, location);
}
}
public void updateLoad(ServerLocation location, String memberId, ServerLoad newLoad) {
updateLoad(location, memberId, newLoad, null);
}
/**
* Update the load information for a server that was previously added.
*/
synchronized void updateLoad(ServerLocation location, String memberId, ServerLoad newLoad,
List<ClientProxyMembershipID> clientIds) {
String[] groups = serverGroupMap.get(location);
// the server was asynchronously removed, so don't do anything.
if (groups == null) {
return;
}
if (clientIds != null) {
for (ClientProxyMembershipID clientId : clientIds) {
cancelClientEstimate(clientId, location);
}
}
updateMap(connectionLoadMap, location, memberId, newLoad.getConnectionLoad(),
newLoad.getLoadPerConnection());
updateMap(queueLoadMap, location, newLoad.getSubscriptionConnectionLoad(),
newLoad.getLoadPerSubscriptionConnection());
}
public synchronized boolean hasBalancedConnections(String group) {
if ("".equals(group)) {
group = null;
}
Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group);
return isBalanced(groupServers);
}
private synchronized boolean isBalanced(Map<ServerLocationAndMemberId, LoadHolder> groupServers) {
return isBalanced(groupServers, false);
}
private synchronized boolean isBalanced(Map<ServerLocationAndMemberId, LoadHolder> groupServers,
boolean withThresholdCheck) {
if (groupServers == null || groupServers.isEmpty()) {
return true;
}
float bestLoad = Float.MAX_VALUE;
float largestLoadPerConnection = Float.MIN_VALUE;
float worstLoad = Float.MIN_VALUE;
for (Entry<ServerLocationAndMemberId, LoadHolder> loadHolderEntry : groupServers.entrySet()) {
LoadHolder nextLoadReference = loadHolderEntry.getValue();
float nextLoad = nextLoadReference.getLoad();
float nextLoadPerConnection = nextLoadReference.getLoadPerConnection();
if (nextLoad < bestLoad) {
bestLoad = nextLoad;
}
if (nextLoad > worstLoad) {
worstLoad = nextLoad;
}
if (nextLoadPerConnection > largestLoadPerConnection) {
largestLoadPerConnection = nextLoadPerConnection;
}
}
boolean balanced = (worstLoad - bestLoad) <= largestLoadPerConnection;
if (withThresholdCheck) {
balanced = thresholdCheck(bestLoad, worstLoad, largestLoadPerConnection, balanced);
}
return balanced;
}
/**
* In order to keep from ping-ponging clients around the cluster we don't move a client unless
* imbalance is greater than the loadImbalanceThreshold.
* <p>
* When the threshold is reached we report imbalance until proper balance is achieved.
* </p>
* <p>
* This method has the side-effect of setting the <code>rebalancing</code> instance variable
* which, at the time of this writing, is only used by this method.
* </p>
*/
private synchronized boolean thresholdCheck(float bestLoad, float worstLoad,
float largestLoadPerConnection, boolean balanced) {
if (rebalancing) {
if (balanced) {
rebalancing = false;
}
return balanced;
}
// see if we're out of balance enough to trigger rebalancing or whether we
// should tolerate the imbalance
if (!balanced) {
float imbalance = worstLoad - bestLoad;
if (imbalance >= (largestLoadPerConnection * loadImbalanceThreshold)) {
rebalancing = true;
} else {
// we're not in balance but are within the threshold
balanced = true;
}
}
return balanced;
}
synchronized boolean isRebalancing() {
return rebalancing;
}
/**
* Pick the least loaded server in the given group
*
* @param group the group, or null or "" if the client has no server group.
* @param excludedServers a list of servers to exclude as choices
* @return the least loaded server, or null if there are no servers that aren't excluded.
*/
public synchronized ServerLocation getServerForConnection(String group,
Set<ServerLocation> excludedServers) {
if ("".equals(group)) {
group = null;
}
Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group);
if (groupServers == null || groupServers.isEmpty()) {
return null;
}
{
List bestLHs = findBestServers(groupServers, excludedServers, 1);
if (bestLHs.isEmpty()) {
return null;
}
LoadHolder lh = (LoadHolder) bestLHs.get(0);
lh.incConnections();
return lh.getLocation();
}
}
public synchronized ArrayList getServers(String group) {
if ("".equals(group)) {
group = null;
}
Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group);
if (groupServers == null || groupServers.isEmpty()) {
return null;
}
ArrayList result = new ArrayList<>();
for (ServerLocationAndMemberId locationAndMemberId : groupServers.keySet()) {
result.add(locationAndMemberId.getServerLocation());
}
return result;
}
public void shutDown() {
estimateTimeoutProcessor.shutdown();
}
/**
* Pick the least loaded server in the given group if currentServer is the most loaded server.
*
* @param group the group, or null or "" if the client has no server group.
* @param excludedServers a list of servers to exclude as choices
* @return currentServer if it is not the most loaded, null if there are no servers that aren't
* excluded, otherwise the least loaded server in the group.
*/
public synchronized ServerLocation getReplacementServerForConnection(ServerLocation currentServer,
String group, Set<ServerLocation> excludedServers) {
if ("".equals(group)) {
group = null;
}
Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group);
if (groupServers == null || groupServers.isEmpty()) {
return null;
}
// check to see if we are currently balanced
if (isBalanced(groupServers, true)) {
// if we are then return currentServer
return currentServer;
}
LoadHolder currentServerLH = isCurrentServerMostLoaded(currentServer, groupServers);
if (currentServerLH == null) {
return currentServer;
}
{
List<LoadHolder> bestLHs = findBestServers(groupServers, excludedServers, 1);
if (bestLHs.isEmpty()) {
return null;
}
LoadHolder bestLH = bestLHs.get(0);
currentServerLH.decConnections();
bestLH.incConnections();
return bestLH.getLocation();
}
}
/**
* Pick the least loaded servers in the given group.
*
* @param group the group, or null or "" if the client has no server group.
* @param excludedServers a list of servers to exclude as choices
* @param count how many distinct servers to pick.
* @return a list containing the best servers. The size of the list will be less than or equal to
* count, depending on if there are enough servers available.
*/
public List getServersForQueue(String group, Set<ServerLocation> excludedServers, int count) {
return getServersForQueue(null, group, excludedServers, count);
}
/**
* Pick the least loaded servers in the given group.
*
* @param id the id of the client creating the queue
* @param group the group, or null or "" if the client has no server group.
* @param excludedServers a list of servers to exclude as choices
* @param count how many distinct servers to pick.
* @return a list containing the best servers. The size of the list will be less than or equal to
* count, depending on if there are enough servers available.
*/
synchronized List<ServerLocation> getServersForQueue(ClientProxyMembershipID id, String group,
Set<ServerLocation> excludedServers, int count) {
if ("".equals(group)) {
group = null;
}
Map<ServerLocation, LoadHolder> groupServers = queueLoadMap.get(group);
if (groupServers == null || groupServers.isEmpty()) {
return Collections.emptyList();
}
{
List<LoadHolder> bestLHs = findBestServers(groupServers, excludedServers, count);
ArrayList<ServerLocation> result = new ArrayList<>(bestLHs.size());
if (id != null) {
ClientProxyMembershipID.Identity actualId = id.getIdentity();
for (LoadHolder load : bestLHs) {
EstimateMapKey key = new EstimateMapKey(actualId, load.getLocation());
LoadEstimateTask task = new LoadEstimateTask(key, load);
try {
final long MIN_TIMEOUT = 60000; // 1 minute
long timeout = load.getLoadPollInterval() * 2;
if (timeout < MIN_TIMEOUT) {
timeout = MIN_TIMEOUT;
}
task.setFuture(estimateTimeoutProcessor.schedule(task, timeout, TimeUnit.MILLISECONDS));
addEstimate(key, task);
} catch (RejectedExecutionException e) {
// ignore, the timer has been cancelled, which means we're shutting
// down.
}
result.add(load.getLocation());
}
} else {
for (LoadHolder load : bestLHs) {
load.incConnections();
result.add(load.getLocation());
}
}
return result;
}
}
/**
* Test hook to get the current load for all servers Returns a map of ServerLocation->Load for
* each server.
*/
public synchronized Map<ServerLocation, ServerLoad> getLoadMap() {
Map<ServerLocationAndMemberId, LoadHolder> connectionMap = connectionLoadMap.get(null);
Map<ServerLocation, LoadHolder> queueMap = queueLoadMap.get(null);
Map<ServerLocation, ServerLoad> result = new HashMap<>();
for (Entry<ServerLocationAndMemberId, LoadHolder> entry : connectionMap
.entrySet()) {
ServerLocation location = entry.getKey().getServerLocation();
LoadHolder connectionLoad = entry.getValue();
LoadHolder queueLoad = queueMap.get(location);
// was asynchronously removed
if (queueLoad == null) {
continue;
}
result.put(location,
new ServerLoad(connectionLoad.getLoad(), connectionLoad.getLoadPerConnection(),
queueLoad.getLoad(), queueLoad.getLoadPerConnection()));
}
return result;
}
@VisibleForTesting
void addGroups(Map<String, Map<ServerLocation, LoadHolder>> map, String[] groups,
LoadHolder holder) {
for (String group : groups) {
Map<ServerLocation, LoadHolder> groupMap = map.computeIfAbsent(group, k -> new HashMap<>());
groupMap.put(holder.getLocation(), holder);
}
// Special case for GatewayReceiver where we don't put those serverlocation against holder
if (!(groups.length > 0 && groups[0].equals(GatewayReceiver.RECEIVER_GROUP))) {
Map<ServerLocation, LoadHolder> groupMap = map.computeIfAbsent(null, k -> new HashMap<>());
groupMap.put(holder.getLocation(), holder);
}
}
@VisibleForTesting
void addGroups(Map<String, Map<ServerLocationAndMemberId, LoadHolder>> map,
String[] groups,
LoadHolder holder, String memberId) {
for (String group : groups) {
Map<ServerLocationAndMemberId, LoadHolder> groupMap =
map.computeIfAbsent(group, k -> new HashMap<>());
groupMap.put(new ServerLocationAndMemberId(holder.getLocation(), memberId), holder);
}
// Special case for GatewayReceiver where we don't put those serverlocation against holder
if (!(groups.length > 0 && groups[0].equals(GatewayReceiver.RECEIVER_GROUP))) {
Map<ServerLocationAndMemberId, LoadHolder> groupMap =
map.computeIfAbsent(null, k -> new HashMap<>());
groupMap.put(new ServerLocationAndMemberId(holder.getLocation(), memberId), holder);
}
}
@VisibleForTesting
void removeFromMap(Map<String, Map<ServerLocation, LoadHolder>> map, String[] groups,
ServerLocation location) {
for (String group : groups) {
Map<ServerLocation, LoadHolder> groupMap = map.get(group);
if (groupMap != null) {
groupMap.remove(location);
if (groupMap.size() == 0) {
map.remove(group);
}
}
}
Map groupMap = map.get(null);
groupMap.remove(location);
}
@VisibleForTesting
void removeFromMap(Map<String, Map<ServerLocationAndMemberId, LoadHolder>> map,
String[] groups,
ServerLocation location, String memberId) {
ServerLocationAndMemberId locationAndMemberId =
new ServerLocationAndMemberId(location, memberId);
for (String group : groups) {
Map<ServerLocationAndMemberId, LoadHolder> groupMap = map.get(group);
if (groupMap != null) {
groupMap.remove(locationAndMemberId);
if (groupMap.size() == 0) {
map.remove(group);
}
}
}
Map groupMap = map.get(null);
groupMap.remove(locationAndMemberId);
}
@VisibleForTesting
void updateMap(Map map, ServerLocation location, float load, float loadPerConnection) {
updateMap(map, location, "", load, loadPerConnection);
}
@VisibleForTesting
void updateMap(Map map, ServerLocation location, String memberId, float load,
float loadPerConnection) {
Map groupMap = (Map) map.get(null);
LoadHolder holder;
if (memberId.equals("")) {
holder = (LoadHolder) groupMap.get(location);
} else {
ServerLocationAndMemberId locationAndMemberId =
new ServerLocationAndMemberId(location, memberId);
holder = (LoadHolder) groupMap.get(locationAndMemberId);
}
if (holder != null) {
holder.setLoad(load, loadPerConnection);
}
}
/**
*
* @param groupServers the servers to consider
* @param excludedServers servers to exclude
* @param count how many you want. a negative number means all of them in order of best to worst
* @return a list of best...worst server LoadHolders
*/
@VisibleForTesting
List<LoadHolder> findBestServers(
Map<?, LoadHolder> groupServers,
Set<ServerLocation> excludedServers, int count) {
if (count == 0) {
return new ArrayList<>();
}
TreeSet<LoadHolder> bestEntries = new TreeSet<>((l1, l2) -> {
int difference = Float.compare(l1.getLoad(), l2.getLoad());
if (difference != 0) {
return difference;
}
ServerLocation sl1 = l1.getLocation();
ServerLocation sl2 = l2.getLocation();
return sl1.compareTo(sl2);
});
boolean retainAll = (count < 0);
float lastBestLoad = Float.MAX_VALUE;
for (Map.Entry<?, LoadHolder> loadEntry : groupServers.entrySet()) {
ServerLocation location;
Object key = loadEntry.getKey();
if (key instanceof ServerLocationAndMemberId) {
location = ((ServerLocationAndMemberId) key).getServerLocation();
} else if (key instanceof ServerLocation) {
location = ((ServerLocation) key);
} else {
throw new InternalGemFireException(
"findBestServers method was called with incorrect type parameters.");
}
if (excludedServers.contains(location)) {
continue;
}
LoadHolder nextLoadReference = loadEntry.getValue();
float nextLoad = nextLoadReference.getLoad();
if ((bestEntries.size() < count) || retainAll || (nextLoad < lastBestLoad)) {
bestEntries.add(nextLoadReference);
if (!retainAll && (bestEntries.size() > count)) {
bestEntries.remove(bestEntries.last());
}
LoadHolder lastBestHolder = bestEntries.last();
lastBestLoad = lastBestHolder.getLoad();
}
}
return new ArrayList<>(bestEntries);
}
/**
* If it is most loaded then return its LoadHolder; otherwise return null;
*/
@VisibleForTesting
LoadHolder isCurrentServerMostLoaded(ServerLocation currentServer,
Map<ServerLocationAndMemberId, LoadHolder> groupServers) {
// Check if there are keys in the map that contains currentServer.
LoadHolder currentLH = null;
for (ServerLocationAndMemberId locationAndMemberId : groupServers.keySet()) {
if (currentServer.equals(locationAndMemberId.getServerLocation())) {
currentLH = groupServers.get(locationAndMemberId);
break;
}
}
if (currentLH == null) {
return null;
}
final float currentLoad = currentLH.getLoad();
for (Map.Entry<ServerLocationAndMemberId, LoadHolder> loadEntry : groupServers.entrySet()) {
ServerLocation location = loadEntry.getKey().getServerLocation();
if (location.equals(currentServer)) {
continue;
}
LoadHolder nextLoadReference = loadEntry.getValue();
float nextLoad = nextLoadReference.getLoad();
if (nextLoad > currentLoad) {
// found a server who has a higher load than us
return null;
}
}
return currentLH;
}
private void cancelClientEstimate(ClientProxyMembershipID id, ServerLocation location) {
if (id != null) {
removeAndCancelEstimate(new EstimateMapKey(id.getIdentity(), location));
}
}
/**
* Add the task to the estimate map at the given key and cancel any old task found
*/
private void addEstimate(EstimateMapKey key, LoadEstimateTask task) {
LoadEstimateTask oldTask;
oldTask = estimateMap.put(key, task);
if (oldTask != null) {
oldTask.cancel();
}
}
/**
* Remove the task from the estimate map at the given key.
*
* @return true it task was removed; false if it was not the task mapped to key
*/
private boolean removeIfPresentEstimate(EstimateMapKey key, LoadEstimateTask task) {
// no need to cancel task; it already fired
return estimateMap.remove(key, task);
}
/**
* Remove and cancel any task estimate mapped to the given key.
*/
private void removeAndCancelEstimate(EstimateMapKey key) {
LoadEstimateTask oldTask;
oldTask = estimateMap.remove(key);
if (oldTask != null) {
oldTask.cancel();
}
}
/**
* Used as a key on the estimateMap. These keys are made up of the identity of the client and
* server that will be connected by the resource (e.g. queue) that we are trying to create.
*/
private static class EstimateMapKey {
private final ClientProxyMembershipID.Identity clientId;
private final ServerLocation serverId;
EstimateMapKey(ClientProxyMembershipID.Identity clientId, ServerLocation serverId) {
this.clientId = clientId;
this.serverId = serverId;
}
@Override
public int hashCode() {
return clientId.hashCode() ^ serverId.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof EstimateMapKey)) {
return false;
}
EstimateMapKey that = (EstimateMapKey) obj;
return clientId.equals(that.clientId) && serverId.equals(that.serverId);
}
}
private class LoadEstimateTask implements Runnable {
private final EstimateMapKey key;
private final LoadHolder lh;
private ScheduledFuture future;
LoadEstimateTask(EstimateMapKey key, LoadHolder lh) {
this.key = key;
this.lh = lh;
lh.addEstimate();
}
@Override
public void run() {
if (removeIfPresentEstimate(key, this)) {
decEstimate();
}
}
public void setFuture(ScheduledFuture future) {
// Note this is always called once and only once
// and always before cancel can be called.
this.future = future;
}
public void cancel() {
future.cancel(false);
decEstimate();
}
private void decEstimate() {
synchronized (LocatorLoadSnapshot.this) {
lh.removeEstimate();
}
}
}
@VisibleForTesting
static class LoadHolder {
private float load;
private float loadPerConnection;
private int estimateCount;
private final ServerLocation location;
private final long loadPollInterval;
LoadHolder(ServerLocation location, float load, float loadPerConnection,
long loadPollInterval) {
this.location = location;
this.load = load;
this.loadPerConnection = loadPerConnection;
this.loadPollInterval = loadPollInterval;
}
void setLoad(float load, float loadPerConnection) {
this.loadPerConnection = loadPerConnection;
this.load = load + (estimateCount * loadPerConnection);
}
void incConnections() {
load += loadPerConnection;
}
void addEstimate() {
estimateCount++;
incConnections();
}
void removeEstimate() {
estimateCount--;
decConnections();
}
void decConnections() {
load -= loadPerConnection;
}
public float getLoad() {
return load;
}
public float getLoadPerConnection() {
return loadPerConnection;
}
public ServerLocation getLocation() {
return location;
}
public long getLoadPollInterval() {
return loadPollInterval;
}
@Override
public String toString() {
return "LoadHolder[" + getLoad() + ", " + getLocation() + ", loadPollInterval="
+ getLoadPollInterval()
+ ((estimateCount != 0) ? (", estimates=" + estimateCount) : "") + ", "
+ loadPerConnection + "]";
}
}
}
| |
/*
* Copyright 2013 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.protocols.channels;
import org.bitcoinj.core.*;
import org.bitcoinj.protocols.channels.PaymentChannelCloseException.CloseReason;
import org.bitcoinj.utils.Threading;
import org.bitcoinj.wallet.SendRequest;
import org.bitcoinj.wallet.Wallet;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import com.google.protobuf.ByteString;
import net.jcip.annotations.GuardedBy;
import org.bitcoin.paymentchannel.Protos;
import org.slf4j.LoggerFactory;
import org.spongycastle.crypto.params.KeyParameter;
import javax.annotation.Nullable;
import java.util.concurrent.locks.ReentrantLock;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* <p>A class which handles most of the complexity of creating a payment channel connection by providing a
* simple in/out interface which is provided with protobufs from the server and which generates protobufs which should
* be sent to the server.</p>
*
* <p>Does all required verification of server messages and properly stores state objects in the wallet-attached
* {@link StoredPaymentChannelClientStates} so that they are automatically closed when necessary and refund
* transactions are not lost if the application crashes before it unlocks.</p>
*
* <p>Though this interface is largely designed with stateful protocols (eg simple TCP connections) in mind, it is also
* possible to use it with stateless protocols (eg sending protobufs when required over HTTP headers). In this case, the
* "connection" translates roughly into the server-client relationship. See the javadocs for specific functions for more
* details.</p>
*/
public class PaymentChannelClient implements IPaymentChannelClient {
private static final org.slf4j.Logger log = LoggerFactory.getLogger(PaymentChannelClient.class);
protected final ReentrantLock lock = Threading.lock("channelclient");
protected final ClientChannelProperties clientChannelProperties;
// Used to track the negotiated version number
@GuardedBy("lock") private int majorVersion;
@GuardedBy("lock") private final ClientConnection conn;
// Used to keep track of whether or not the "socket" ie connection is open and we can generate messages
@VisibleForTesting @GuardedBy("lock") boolean connectionOpen = false;
// The state object used to step through initialization and pay the server
@GuardedBy("lock") private PaymentChannelClientState state;
// The step we are at in initialization, this is partially duplicated in the state object
private enum InitStep {
WAITING_FOR_CONNECTION_OPEN,
WAITING_FOR_VERSION_NEGOTIATION,
WAITING_FOR_INITIATE,
WAITING_FOR_REFUND_RETURN,
WAITING_FOR_CHANNEL_OPEN,
CHANNEL_OPEN,
WAITING_FOR_CHANNEL_CLOSE,
CHANNEL_CLOSED,
}
@GuardedBy("lock") private InitStep step = InitStep.WAITING_FOR_CONNECTION_OPEN;
public enum VersionSelector {
VERSION_1,
VERSION_2_ALLOW_1,
VERSION_2;
public int getRequestedMajorVersion() {
switch (this) {
case VERSION_1:
return 1;
case VERSION_2_ALLOW_1:
case VERSION_2:
default:
return 2;
}
}
public int getRequestedMinorVersion() {
return 0;
}
public boolean isServerVersionAccepted(int major, int minor) {
switch (this) {
case VERSION_1:
return major == 1;
case VERSION_2_ALLOW_1:
return major == 1 || major == 2;
case VERSION_2:
return major == 2;
default:
return false;
}
}
}
private final VersionSelector versionSelector;
// Will either hold the StoredClientChannel of this channel or null after connectionOpen
private StoredClientChannel storedChannel;
// An arbitrary hash which identifies this channel (specified by the API user)
private final Sha256Hash serverId;
// The wallet associated with this channel
private final Wallet wallet;
// Information used during channel initialization to send to the server or check what the server sends to us
private final ECKey myKey;
private final Coin maxValue;
private Coin missing;
// key to decrypt myKey, if it is encrypted, during setup.
private KeyParameter userKeySetup;
private final long timeWindow;
@GuardedBy("lock") private long minPayment;
@GuardedBy("lock") SettableFuture<PaymentIncrementAck> increasePaymentFuture;
@GuardedBy("lock") Coin lastPaymentActualAmount;
/**
* <p>The default maximum amount of time for which we will accept the server locking up our funds for the multisig
* contract.</p>
*
* <p>24 hours less a minute is the default as it is expected that clients limit risk exposure by limiting channel size instead of
* limiting lock time when dealing with potentially malicious servers.</p>
*/
public static final long DEFAULT_TIME_WINDOW = 24*60*60-60;
/**
* Constructs a new channel manager which waits for {@link PaymentChannelClient#connectionOpen()} before acting.
* A default time window of {@link #DEFAULT_TIME_WINDOW} will be used.
*
* @param wallet The wallet which will be paid from, and where completed transactions will be committed.
* Must already have a {@link StoredPaymentChannelClientStates} object in its extensions set.
* @param myKey A freshly generated keypair used for the multisig contract and refund output.
* @param maxValue The maximum value the server is allowed to request that we lock into this channel until the
* refund transaction unlocks. Note that if there is a previously open channel, the refund
* transaction used in this channel may be larger than maxValue. Thus, maxValue is not a method for
* limiting the amount payable through this channel.
* @param serverId An arbitrary hash representing this channel. This must uniquely identify the server. If an
* existing stored channel exists in the wallet's {@link StoredPaymentChannelClientStates}, then an
* attempt will be made to resume that channel.
* @param conn A callback listener which represents the connection to the server (forwards messages we generate to
* the server)
*/
public PaymentChannelClient(Wallet wallet, ECKey myKey, Coin maxValue, Sha256Hash serverId, ClientConnection conn) {
this(wallet,myKey,maxValue,serverId, null, conn);
}
/**
* Constructs a new channel manager which waits for {@link PaymentChannelClient#connectionOpen()} before acting.
*
* @param wallet The wallet which will be paid from, and where completed transactions will be committed.
* Must already have a {@link StoredPaymentChannelClientStates} object in its extensions set.
* @param myKey A freshly generated keypair used for the multisig contract and refund output.
* @param maxValue The maximum value the server is allowed to request that we lock into this channel until the
* refund transaction unlocks. Note that if there is a previously open channel, the refund
* transaction used in this channel may be larger than maxValue. Thus, maxValue is not a method for
* limiting the amount payable through this channel.
* @param serverId An arbitrary hash representing this channel. This must uniquely identify the server. If an
* existing stored channel exists in the wallet's {@link StoredPaymentChannelClientStates}, then an
* attempt will be made to resume that channel.
* @param userKeySetup Key derived from a user password, used to decrypt myKey, if it is encrypted, during setup.
* @param conn A callback listener which represents the connection to the server (forwards messages we generate to
* the server)
*/
public PaymentChannelClient(Wallet wallet, ECKey myKey, Coin maxValue, Sha256Hash serverId,
@Nullable KeyParameter userKeySetup, ClientConnection conn) {
this(wallet, myKey, maxValue, serverId, userKeySetup, defaultChannelProperties, conn);
}
/**
* Constructs a new channel manager which waits for {@link PaymentChannelClient#connectionOpen()} before acting.
*
* @param wallet The wallet which will be paid from, and where completed transactions will be committed.
* Must already have a {@link StoredPaymentChannelClientStates} object in its extensions set.
* @param myKey A freshly generated keypair used for the multisig contract and refund output.
* @param maxValue The maximum value the server is allowed to request that we lock into this channel until the
* refund transaction unlocks. Note that if there is a previously open channel, the refund
* transaction used in this channel may be larger than maxValue. Thus, maxValue is not a method for
* limiting the amount payable through this channel.
* @param serverId An arbitrary hash representing this channel. This must uniquely identify the server. If an
* existing stored channel exists in the wallet's {@link StoredPaymentChannelClientStates}, then an
* attempt will be made to resume that channel.
* @param userKeySetup Key derived from a user password, used to decrypt myKey, if it is encrypted, during setup.
* @param clientChannelProperties Modify the channel's properties. You may extend {@link DefaultClientChannelProperties}
* @param conn A callback listener which represents the connection to the server (forwards messages we generate to
* the server)
*/
public PaymentChannelClient(Wallet wallet, ECKey myKey, Coin maxValue, Sha256Hash serverId,
@Nullable KeyParameter userKeySetup, @Nullable ClientChannelProperties clientChannelProperties,
ClientConnection conn) {
this.wallet = checkNotNull(wallet);
this.myKey = checkNotNull(myKey);
this.maxValue = checkNotNull(maxValue);
this.serverId = checkNotNull(serverId);
this.conn = checkNotNull(conn);
this.userKeySetup = userKeySetup;
if (clientChannelProperties == null) {
this.clientChannelProperties = defaultChannelProperties;
} else {
this.clientChannelProperties = clientChannelProperties;
}
this.timeWindow = clientChannelProperties.timeWindow();
checkState(timeWindow >= 0);
this.versionSelector = clientChannelProperties.versionSelector();
}
/**
* <p>Returns the amount of satoshis missing when a server requests too much value.</p>
*
* <p>When InsufficientMoneyException is thrown due to the server requesting too much value, an instance of
* PaymentChannelClient needs access to how many satoshis are missing.</p>
*/
public Coin getMissing() {
return missing;
}
@Nullable
@GuardedBy("lock")
private CloseReason receiveInitiate(Protos.Initiate initiate, Coin contractValue, Protos.Error.Builder errorBuilder)
throws VerificationException, InsufficientMoneyException, ECKey.KeyIsEncryptedException {
log.info("Got INITIATE message:\n{}", initiate.toString());
if (wallet.isEncrypted() && this.userKeySetup == null)
throw new ECKey.KeyIsEncryptedException();
final long expireTime = initiate.getExpireTimeSecs();
checkState( expireTime >= 0 && initiate.getMinAcceptedChannelSize() >= 0);
if (! conn.acceptExpireTime(expireTime)) {
log.error("Server suggested expire time was out of our allowed bounds: {} ({} s)", Utils.dateTimeFormat(expireTime * 1000), expireTime);
errorBuilder.setCode(Protos.Error.ErrorCode.TIME_WINDOW_UNACCEPTABLE);
return CloseReason.TIME_WINDOW_UNACCEPTABLE;
}
Coin minChannelSize = Coin.valueOf(initiate.getMinAcceptedChannelSize());
if (contractValue.compareTo(minChannelSize) < 0) {
log.error("Server requested too much value");
errorBuilder.setCode(Protos.Error.ErrorCode.CHANNEL_VALUE_TOO_LARGE);
missing = minChannelSize.subtract(contractValue);
return CloseReason.SERVER_REQUESTED_TOO_MUCH_VALUE;
}
// For now we require a hard-coded value. In future this will have to get more complex and dynamic as the fees
// start to float.
final long maxMin = clientChannelProperties.acceptableMinPayment().value;
if (initiate.getMinPayment() > maxMin) {
log.error("Server requested a min payment of {} but we only accept up to {}", initiate.getMinPayment(), maxMin);
errorBuilder.setCode(Protos.Error.ErrorCode.MIN_PAYMENT_TOO_LARGE);
errorBuilder.setExpectedValue(maxMin);
missing = Coin.valueOf(initiate.getMinPayment() - maxMin);
return CloseReason.SERVER_REQUESTED_TOO_MUCH_VALUE;
}
final byte[] pubKeyBytes = initiate.getMultisigKey().toByteArray();
if (!ECKey.isPubKeyCanonical(pubKeyBytes))
throw new VerificationException("Server gave us a non-canonical public key, protocol error.");
switch (majorVersion) {
case 1:
state = new PaymentChannelV1ClientState(wallet, myKey, ECKey.fromPublicOnly(pubKeyBytes), contractValue, expireTime);
break;
case 2:
state = new PaymentChannelV2ClientState(wallet, myKey, ECKey.fromPublicOnly(pubKeyBytes), contractValue, expireTime);
break;
default:
return CloseReason.NO_ACCEPTABLE_VERSION;
}
try {
state.initiate(userKeySetup, clientChannelProperties);
} catch (ValueOutOfRangeException e) {
log.error("Value out of range when trying to initiate", e);
errorBuilder.setCode(Protos.Error.ErrorCode.CHANNEL_VALUE_TOO_LARGE);
return CloseReason.SERVER_REQUESTED_TOO_MUCH_VALUE;
}
minPayment = initiate.getMinPayment();
switch (majorVersion) {
case 1:
step = InitStep.WAITING_FOR_REFUND_RETURN;
Protos.ProvideRefund.Builder provideRefundBuilder = Protos.ProvideRefund.newBuilder()
.setMultisigKey(ByteString.copyFrom(myKey.getPubKey()))
.setTx(ByteString.copyFrom(((PaymentChannelV1ClientState)state).getIncompleteRefundTransaction().unsafeBitcoinSerialize()));
conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder()
.setProvideRefund(provideRefundBuilder)
.setType(Protos.TwoWayChannelMessage.MessageType.PROVIDE_REFUND)
.build());
break;
case 2:
step = InitStep.WAITING_FOR_CHANNEL_OPEN;
// Before we can send the server the contract (ie send it to the network), we must ensure that our refund
// transaction is safely in the wallet - thus we store it (this also keeps it up-to-date when we pay)
state.storeChannelInWallet(serverId);
Protos.ProvideContract.Builder provideContractBuilder = Protos.ProvideContract.newBuilder()
.setTx(ByteString.copyFrom(state.getContract().unsafeBitcoinSerialize()))
.setClientKey(ByteString.copyFrom(myKey.getPubKey()));
try {
// Make an initial payment of the dust limit, and put it into the message as well. The size of the
// server-requested dust limit was already sanity checked by this point.
PaymentChannelClientState.IncrementedPayment payment = state().incrementPaymentBy(Coin.valueOf(minPayment), userKeySetup);
Protos.UpdatePayment.Builder initialMsg = provideContractBuilder.getInitialPaymentBuilder();
initialMsg.setSignature(ByteString.copyFrom(payment.signature.encodeToBitcoin()));
initialMsg.setClientChangeValue(state.getValueRefunded().value);
} catch (ValueOutOfRangeException e) {
throw new IllegalStateException(e); // This cannot happen.
}
// Not used any more
userKeySetup = null;
final Protos.TwoWayChannelMessage.Builder msg = Protos.TwoWayChannelMessage.newBuilder();
msg.setProvideContract(provideContractBuilder);
msg.setType(Protos.TwoWayChannelMessage.MessageType.PROVIDE_CONTRACT);
conn.sendToServer(msg.build());
break;
default:
return CloseReason.NO_ACCEPTABLE_VERSION;
}
return null;
}
@GuardedBy("lock")
private void receiveRefund(Protos.TwoWayChannelMessage refundMsg, @Nullable KeyParameter userKey) throws VerificationException {
checkState(majorVersion == 1);
checkState(step == InitStep.WAITING_FOR_REFUND_RETURN && refundMsg.hasReturnRefund());
log.info("Got RETURN_REFUND message, providing signed contract");
Protos.ReturnRefund returnedRefund = refundMsg.getReturnRefund();
// Cast is safe since we've checked the version number
((PaymentChannelV1ClientState)state).provideRefundSignature(returnedRefund.getSignature().toByteArray(), userKey);
step = InitStep.WAITING_FOR_CHANNEL_OPEN;
// Before we can send the server the contract (ie send it to the network), we must ensure that our refund
// transaction is safely in the wallet - thus we store it (this also keeps it up-to-date when we pay)
state.storeChannelInWallet(serverId);
Protos.ProvideContract.Builder contractMsg = Protos.ProvideContract.newBuilder()
.setTx(ByteString.copyFrom(state.getContract().unsafeBitcoinSerialize()));
try {
// Make an initial payment of the dust limit, and put it into the message as well. The size of the
// server-requested dust limit was already sanity checked by this point.
PaymentChannelClientState.IncrementedPayment payment = state().incrementPaymentBy(Coin.valueOf(minPayment), userKey);
Protos.UpdatePayment.Builder initialMsg = contractMsg.getInitialPaymentBuilder();
initialMsg.setSignature(ByteString.copyFrom(payment.signature.encodeToBitcoin()));
initialMsg.setClientChangeValue(state.getValueRefunded().value);
} catch (ValueOutOfRangeException e) {
throw new IllegalStateException(e); // This cannot happen.
}
final Protos.TwoWayChannelMessage.Builder msg = Protos.TwoWayChannelMessage.newBuilder();
msg.setProvideContract(contractMsg);
msg.setType(Protos.TwoWayChannelMessage.MessageType.PROVIDE_CONTRACT);
conn.sendToServer(msg.build());
}
@GuardedBy("lock")
private void receiveChannelOpen() throws VerificationException {
checkState(step == InitStep.WAITING_FOR_CHANNEL_OPEN || (step == InitStep.WAITING_FOR_INITIATE && storedChannel != null), step);
log.info("Got CHANNEL_OPEN message, ready to pay");
boolean wasInitiated = true;
if (step == InitStep.WAITING_FOR_INITIATE) {
// We skipped the initiate step, because a previous channel that's still valid was resumed.
wasInitiated = false;
switch (majorVersion) {
case 1:
state = new PaymentChannelV1ClientState(storedChannel, wallet);
break;
case 2:
state = new PaymentChannelV2ClientState(storedChannel, wallet);
break;
default:
throw new IllegalStateException("Invalid version number " + majorVersion);
}
}
step = InitStep.CHANNEL_OPEN;
// channelOpen should disable timeouts, but
// TODO accomodate high latency between PROVIDE_CONTRACT and here
conn.channelOpen(wasInitiated);
}
/**
* {@inheritDoc}
*/
@Override
public void receiveMessage(Protos.TwoWayChannelMessage msg) throws InsufficientMoneyException {
lock.lock();
try {
checkState(connectionOpen);
// If we generate an error, we set errorBuilder and closeReason and break, otherwise we return
Protos.Error.Builder errorBuilder;
CloseReason closeReason;
try {
switch (msg.getType()) {
case SERVER_VERSION:
checkState(step == InitStep.WAITING_FOR_VERSION_NEGOTIATION && msg.hasServerVersion());
// Server might send back a major version lower than our own if they want to fallback to a
// lower version. We can't handle that, so we just close the channel.
majorVersion = msg.getServerVersion().getMajor();
if (!versionSelector.isServerVersionAccepted(majorVersion, msg.getServerVersion().getMinor())) {
errorBuilder = Protos.Error.newBuilder()
.setCode(Protos.Error.ErrorCode.NO_ACCEPTABLE_VERSION);
closeReason = CloseReason.NO_ACCEPTABLE_VERSION;
break;
}
log.info("Got version handshake, awaiting INITIATE or resume CHANNEL_OPEN");
step = InitStep.WAITING_FOR_INITIATE;
return;
case INITIATE:
checkState(step == InitStep.WAITING_FOR_INITIATE && msg.hasInitiate());
Protos.Initiate initiate = msg.getInitiate();
errorBuilder = Protos.Error.newBuilder();
closeReason = receiveInitiate(initiate, maxValue, errorBuilder);
if (closeReason == null)
return;
log.error("Initiate failed with error: {}", errorBuilder.build().toString());
break;
case RETURN_REFUND:
receiveRefund(msg, userKeySetup);
// Key not used anymore
userKeySetup = null;
return;
case CHANNEL_OPEN:
receiveChannelOpen();
return;
case PAYMENT_ACK:
receivePaymentAck(msg.getPaymentAck());
return;
case CLOSE:
receiveClose(msg);
return;
case ERROR:
checkState(msg.hasError());
log.error("Server sent ERROR {} with explanation {}", msg.getError().getCode().name(),
msg.getError().hasExplanation() ? msg.getError().getExplanation() : "");
setIncreasePaymentFutureIfNeeded(CloseReason.REMOTE_SENT_ERROR, msg.getError().getCode().name());
conn.destroyConnection(CloseReason.REMOTE_SENT_ERROR);
return;
default:
log.error("Got unknown message type or type that doesn't apply to clients.");
errorBuilder = Protos.Error.newBuilder()
.setCode(Protos.Error.ErrorCode.SYNTAX_ERROR);
setIncreasePaymentFutureIfNeeded(CloseReason.REMOTE_SENT_INVALID_MESSAGE, "");
closeReason = CloseReason.REMOTE_SENT_INVALID_MESSAGE;
break;
}
} catch (VerificationException e) {
log.error("Caught verification exception handling message from server", e);
errorBuilder = Protos.Error.newBuilder()
.setCode(Protos.Error.ErrorCode.BAD_TRANSACTION);
final String message = e.getMessage();
if (message != null)
errorBuilder.setExplanation(message);
closeReason = CloseReason.REMOTE_SENT_INVALID_MESSAGE;
} catch (IllegalStateException e) {
log.error("Caught illegal state exception handling message from server", e);
errorBuilder = Protos.Error.newBuilder()
.setCode(Protos.Error.ErrorCode.SYNTAX_ERROR);
closeReason = CloseReason.REMOTE_SENT_INVALID_MESSAGE;
}
conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder()
.setError(errorBuilder)
.setType(Protos.TwoWayChannelMessage.MessageType.ERROR)
.build());
conn.destroyConnection(closeReason);
} finally {
lock.unlock();
}
}
/*
* If this is an ongoing payment channel increase we need to call setException() on its future.
*
* @param reason is the reason for aborting
* @param message is the detailed message
*/
private void setIncreasePaymentFutureIfNeeded(PaymentChannelCloseException.CloseReason reason, String message) {
if (increasePaymentFuture != null && !increasePaymentFuture.isDone()) {
increasePaymentFuture.setException(new PaymentChannelCloseException(message, reason));
}
}
@GuardedBy("lock")
private void receiveClose(Protos.TwoWayChannelMessage msg) throws VerificationException {
checkState(lock.isHeldByCurrentThread());
if (msg.hasSettlement()) {
Transaction settleTx = wallet.getParams().getDefaultSerializer().makeTransaction(msg.getSettlement().getTx().toByteArray());
log.info("CLOSE message received with settlement tx {}", settleTx.getHash());
// TODO: set source
if (state != null && state().isSettlementTransaction(settleTx)) {
// The wallet has a listener on it that the state object will use to do the right thing at this
// point (like watching it for confirmations). The tx has been checked by now for syntactical validity
// and that it correctly spends the multisig contract.
wallet.receivePending(settleTx, null);
}
} else {
log.info("CLOSE message received without settlement tx");
}
if (step == InitStep.WAITING_FOR_CHANNEL_CLOSE)
conn.destroyConnection(CloseReason.CLIENT_REQUESTED_CLOSE);
else
conn.destroyConnection(CloseReason.SERVER_REQUESTED_CLOSE);
step = InitStep.CHANNEL_CLOSED;
}
/**
* <p>Called when the connection terminates. Notifies the {@link StoredClientChannel} object that we can attempt to
* resume this channel in the future and stops generating messages for the server.</p>
*
* <p>For stateless protocols, this translates to a client not using the channel for the immediate future, but
* intending to reopen the channel later. There is likely little reason to use this in a stateless protocol.</p>
*
* <p>Note that this <b>MUST</b> still be called even after either
* {@link org.bitcoinj.protocols.channels.IPaymentChannelClient.ClientConnection#destroyConnection(org.bitcoinj.protocols.channels.PaymentChannelCloseException.CloseReason)} or
* {@link PaymentChannelClient#settle()} is called, to actually handle the connection close logic.</p>
*/
@Override
public void connectionClosed() {
lock.lock();
try {
connectionOpen = false;
if (state != null)
state.disconnectFromChannel();
} finally {
lock.unlock();
}
}
/**
* <p>Closes the connection, notifying the server it should settle the channel by broadcasting the most recent
* payment transaction.</p>
*
* <p>Note that this only generates a CLOSE message for the server and calls
* {@link org.bitcoinj.protocols.channels.IPaymentChannelClient.ClientConnection#destroyConnection(org.bitcoinj.protocols.channels.PaymentChannelCloseException.CloseReason)} to settle the connection, it does not
* actually handle connection close logic, and {@link PaymentChannelClient#connectionClosed()} must still be called
* after the connection fully closes.</p>
*
* @throws IllegalStateException If the connection is not currently open (ie the CLOSE message cannot be sent)
*/
@Override
public void settle() throws IllegalStateException {
lock.lock();
try {
checkState(connectionOpen);
step = InitStep.WAITING_FOR_CHANNEL_CLOSE;
log.info("Sending a CLOSE message to the server and waiting for response indicating successful settlement.");
conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder()
.setType(Protos.TwoWayChannelMessage.MessageType.CLOSE)
.build());
} finally {
lock.unlock();
}
}
/**
* <p>Called to indicate the connection has been opened and messages can now be generated for the server.</p>
*
* <p>Attempts to find a channel to resume and generates a CLIENT_VERSION message for the server based on the
* result.</p>
*/
@Override
public void connectionOpen() {
lock.lock();
try {
connectionOpen = true;
StoredPaymentChannelClientStates channels = (StoredPaymentChannelClientStates) wallet.getExtensions().get(StoredPaymentChannelClientStates.EXTENSION_ID);
if (channels != null)
storedChannel = channels.getUsableChannelForServerID(serverId);
step = InitStep.WAITING_FOR_VERSION_NEGOTIATION;
Protos.ClientVersion.Builder versionNegotiationBuilder = Protos.ClientVersion.newBuilder()
.setMajor(versionSelector.getRequestedMajorVersion())
.setMinor(versionSelector.getRequestedMinorVersion())
.setTimeWindowSecs(timeWindow);
if (storedChannel != null) {
versionNegotiationBuilder.setPreviousChannelContractHash(ByteString.copyFrom(storedChannel.contract.getHash().getBytes()));
log.info("Begun version handshake, attempting to reopen channel with contract hash {}", storedChannel.contract.getHash());
} else
log.info("Begun version handshake creating new channel");
conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder()
.setType(Protos.TwoWayChannelMessage.MessageType.CLIENT_VERSION)
.setClientVersion(versionNegotiationBuilder)
.build());
} finally {
lock.unlock();
}
}
/**
* <p>Gets the {@link PaymentChannelClientState} object which stores the current state of the connection with the
* server.</p>
*
* <p>Note that if you call any methods which update state directly the server will not be notified and channel
* initialization logic in the connection may fail unexpectedly.</p>
*/
public PaymentChannelClientState state() {
lock.lock();
try {
return state;
} finally {
lock.unlock();
}
}
/**
* Increments the total value which we pay the server. Note that the amount of money sent may not be the same as the
* amount of money actually requested. It can be larger if the amount left over in the channel would be too small to
* be accepted by the Bitcoin network. ValueOutOfRangeException will be thrown, however, if there's not enough money
* left in the channel to make the payment at all. Only one payment can be in-flight at once. You have to ensure
* you wait for the previous increase payment future to complete before incrementing the payment again.
*
* @param size How many satoshis to increment the payment by (note: not the new total).
* @return a future that completes when the server acknowledges receipt and acceptance of the payment.
* @throws ValueOutOfRangeException If the size is negative or would pay more than this channel's total value
* ({@link PaymentChannelClientConnection#state()}.getTotalValue())
* @throws IllegalStateException If the channel has been closed or is not yet open
* (see {@link PaymentChannelClientConnection#getChannelOpenFuture()} for the second)
*/
public ListenableFuture<PaymentIncrementAck> incrementPayment(Coin size) throws ValueOutOfRangeException, IllegalStateException {
return incrementPayment(size, null, null);
}
/**
* Increments the total value which we pay the server. Note that the amount of money sent may not be the same as the
* amount of money actually requested. It can be larger if the amount left over in the channel would be too small to
* be accepted by the Bitcoin network. ValueOutOfRangeException will be thrown, however, if there's not enough money
* left in the channel to make the payment at all. Only one payment can be in-flight at once. You have to ensure
* you wait for the previous increase payment future to complete before incrementing the payment again.
*
* @param size How many satoshis to increment the payment by (note: not the new total).
* @param info Information about this update, used to extend this protocol.
* @param userKey Key derived from a user password, needed for any signing when the wallet is encrypted.
* The wallet KeyCrypter is assumed.
* @return a future that completes when the server acknowledges receipt and acceptance of the payment.
* @throws ValueOutOfRangeException If the size is negative or would pay more than this channel's total value
* ({@link PaymentChannelClientConnection#state()}.getTotalValue())
* @throws IllegalStateException If the channel has been closed or is not yet open
* (see {@link PaymentChannelClientConnection#getChannelOpenFuture()} for the second)
* @throws ECKey.KeyIsEncryptedException If the keys are encrypted and no AES key has been provided,
*/
@Override
public ListenableFuture<PaymentIncrementAck> incrementPayment(Coin size, @Nullable ByteString info, @Nullable KeyParameter userKey)
throws ValueOutOfRangeException, IllegalStateException, ECKey.KeyIsEncryptedException {
lock.lock();
try {
if (state() == null || !connectionOpen || step != InitStep.CHANNEL_OPEN)
throw new IllegalStateException("Channel is not fully initialized/has already been closed");
if (increasePaymentFuture != null)
throw new IllegalStateException("Already incrementing paying, wait for previous payment to complete.");
if (wallet.isEncrypted() && userKey == null)
throw new ECKey.KeyIsEncryptedException();
PaymentChannelV1ClientState.IncrementedPayment payment = state().incrementPaymentBy(size, userKey);
Protos.UpdatePayment.Builder updatePaymentBuilder = Protos.UpdatePayment.newBuilder()
.setSignature(ByteString.copyFrom(payment.signature.encodeToBitcoin()))
.setClientChangeValue(state.getValueRefunded().value);
if (info != null) updatePaymentBuilder.setInfo(info);
increasePaymentFuture = SettableFuture.create();
increasePaymentFuture.addListener(new Runnable() {
@Override
public void run() {
lock.lock();
increasePaymentFuture = null;
lock.unlock();
}
}, MoreExecutors.directExecutor());
conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder()
.setUpdatePayment(updatePaymentBuilder)
.setType(Protos.TwoWayChannelMessage.MessageType.UPDATE_PAYMENT)
.build());
lastPaymentActualAmount = payment.amount;
return increasePaymentFuture;
} finally {
lock.unlock();
}
}
private void receivePaymentAck(Protos.PaymentAck paymentAck) {
SettableFuture<PaymentIncrementAck> future;
Coin value;
lock.lock();
try {
if (increasePaymentFuture == null) return;
checkNotNull(increasePaymentFuture, "Server sent a PAYMENT_ACK with no outstanding payment");
log.info("Received a PAYMENT_ACK from the server");
future = increasePaymentFuture;
value = lastPaymentActualAmount;
} finally {
lock.unlock();
}
// Ensure the future runs without the client lock held.
future.set(new PaymentIncrementAck(value, paymentAck.getInfo()));
}
public static class DefaultClientChannelProperties implements ClientChannelProperties {
@Override
public SendRequest modifyContractSendRequest(SendRequest sendRequest) {
return sendRequest;
}
@Override
public Coin acceptableMinPayment() { return Transaction.REFERENCE_DEFAULT_MIN_TX_FEE; }
@Override
public long timeWindow() {
return DEFAULT_TIME_WINDOW;
}
@Override
public VersionSelector versionSelector() {
return VersionSelector.VERSION_2_ALLOW_1;
}
}
public static DefaultClientChannelProperties defaultChannelProperties = new DefaultClientChannelProperties();
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.lang.reflect.Constructor;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* This class both tests and demonstrates how to construct compound rowkeys
* from a POJO. The code under test is {@link Struct}.
* {@link SpecializedPojo1Type1} demonstrates how one might create their own
* custom data type extension for an application POJO.
*/
@RunWith(Parameterized.class)
@Category({MiscTests.class, SmallTests.class})
public class TestStruct {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestStruct.class);
@Parameterized.Parameter()
public Struct generic;
@SuppressWarnings("rawtypes")
@Parameterized.Parameter(value = 1)
public DataType specialized;
@Parameterized.Parameter(value = 2)
public Object[][] constructorArgs;
@Parameters
public static Collection<Object[]> params() {
Object[][] pojo1Args = {
new Object[] { "foo", 5, 10.001 },
new Object[] { "foo", 100, 7.0 },
new Object[] { "foo", 100, 10.001 },
new Object[] { "bar", 5, 10.001 },
new Object[] { "bar", 100, 10.001 },
new Object[] { "baz", 5, 10.001 },
};
Object[][] pojo2Args = {
new Object[] { new byte[0], Bytes.toBytes("it"), "was", Bytes.toBytes("the") },
new Object[] { Bytes.toBytes("best"), new byte[0], "of", Bytes.toBytes("times,") },
new Object[] { Bytes.toBytes("it"), Bytes.toBytes("was"), "", Bytes.toBytes("the") },
new Object[] { Bytes.toBytes("worst"), Bytes.toBytes("of"), "times,", new byte[0] },
new Object[] { new byte[0], new byte[0], "", new byte[0] },
};
Object[][] params = new Object[][] {
{ SpecializedPojo1Type1.GENERIC, new SpecializedPojo1Type1(), pojo1Args },
{ SpecializedPojo2Type1.GENERIC, new SpecializedPojo2Type1(), pojo2Args },
};
return Arrays.asList(params);
}
static final Comparator<byte[]> NULL_SAFE_BYTES_COMPARATOR = (o1, o2) -> {
if (o1 == o2) {
return 0;
}
if (null == o1) {
return -1;
}
if (null == o2) {
return 1;
}
return Bytes.compareTo(o1, o2);
};
/**
* A simple object to serialize.
*/
private static class Pojo1 implements Comparable<Pojo1> {
final String stringFieldAsc;
final int intFieldAsc;
final double doubleFieldAsc;
final transient String str;
public Pojo1(Object... argv) {
stringFieldAsc = (String) argv[0];
intFieldAsc = (Integer) argv[1];
doubleFieldAsc = (Double) argv[2];
str = new StringBuilder()
.append("{ ")
.append(null == stringFieldAsc ? "" : "\"")
.append(stringFieldAsc)
.append(null == stringFieldAsc ? "" : "\"").append(", ")
.append(intFieldAsc).append(", ")
.append(doubleFieldAsc)
.append(" }")
.toString();
}
@Override
public String toString() {
return str;
}
@Override
public int compareTo(Pojo1 o) {
int cmp = stringFieldAsc.compareTo(o.stringFieldAsc);
if (cmp != 0) {
return cmp;
}
cmp = Integer.compare(intFieldAsc, o.intFieldAsc);
if (cmp != 0) {
return cmp;
}
return Double.compare(doubleFieldAsc, o.doubleFieldAsc);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(doubleFieldAsc);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + intFieldAsc;
result = prime * result + ((stringFieldAsc == null) ? 0 : stringFieldAsc.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Pojo1 other = (Pojo1) obj;
if (Double.doubleToLongBits(doubleFieldAsc) !=
Double.doubleToLongBits(other.doubleFieldAsc)) {
return false;
}
if (intFieldAsc != other.intFieldAsc) {
return false;
}
if (stringFieldAsc == null) {
return other.stringFieldAsc == null;
} else {
return stringFieldAsc.equals(other.stringFieldAsc);
}
}
}
/**
* A simple object to serialize.
*/
private static class Pojo2 implements Comparable<Pojo2> {
final byte[] byteField1Asc;
final byte[] byteField2Dsc;
final String stringFieldDsc;
final byte[] byteField3Dsc;
final transient String str;
public Pojo2(Object... vals) {
byteField1Asc = vals.length > 0 ? (byte[]) vals[0] : null;
byteField2Dsc = vals.length > 1 ? (byte[]) vals[1] : null;
stringFieldDsc = vals.length > 2 ? (String) vals[2] : null;
byteField3Dsc = vals.length > 3 ? (byte[]) vals[3] : null;
str = new StringBuilder()
.append("{ ")
.append(Bytes.toStringBinary(byteField1Asc)).append(", ")
.append(Bytes.toStringBinary(byteField2Dsc)).append(", ")
.append(null == stringFieldDsc ? "" : "\"")
.append(stringFieldDsc)
.append(null == stringFieldDsc ? "" : "\"").append(", ")
.append(Bytes.toStringBinary(byteField3Dsc))
.append(" }")
.toString();
}
@Override
public String toString() {
return str;
}
@Override
public int compareTo(Pojo2 o) {
int cmp = NULL_SAFE_BYTES_COMPARATOR.compare(byteField1Asc, o.byteField1Asc);
if (cmp != 0) {
return cmp;
}
cmp = -NULL_SAFE_BYTES_COMPARATOR.compare(byteField2Dsc, o.byteField2Dsc);
if (cmp != 0) {
return cmp;
}
if (null == stringFieldDsc) {
cmp = 1;
} else if (null == o.stringFieldDsc) {
cmp = -1;
} else if (stringFieldDsc.equals(o.stringFieldDsc)) {
cmp = 0;
} else {
cmp = -stringFieldDsc.compareTo(o.stringFieldDsc);
}
if (cmp != 0) {
return cmp;
}
return -NULL_SAFE_BYTES_COMPARATOR.compare(byteField3Dsc, o.byteField3Dsc);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(byteField1Asc);
result = prime * result + Arrays.hashCode(byteField2Dsc);
result = prime * result + Arrays.hashCode(byteField3Dsc);
result = prime * result + ((stringFieldDsc == null) ? 0 : stringFieldDsc.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Pojo2 other = (Pojo2) obj;
if (!Arrays.equals(byteField1Asc, other.byteField1Asc)) {
return false;
}
if (!Arrays.equals(byteField2Dsc, other.byteField2Dsc)) {
return false;
}
if (!Arrays.equals(byteField3Dsc, other.byteField3Dsc)) {
return false;
}
if (stringFieldDsc == null) {
return other.stringFieldDsc == null;
} else {
return stringFieldDsc.equals(other.stringFieldDsc);
}
}
}
/**
* A custom data type implementation specialized for {@link Pojo1}.
*/
private static class SpecializedPojo1Type1 implements DataType<Pojo1> {
private static final RawStringTerminated stringField = new RawStringTerminated("/");
private static final RawInteger intField = new RawInteger();
private static final RawDouble doubleField = new RawDouble();
/**
* The {@link Struct} equivalent of this type.
*/
public static Struct GENERIC = new StructBuilder().add(stringField).add(intField)
.add(doubleField).toStruct();
@Override
public boolean isOrderPreserving() {
return true;
}
@Override
public Order getOrder() {
return null;
}
@Override
public boolean isNullable() {
return false;
}
@Override
public boolean isSkippable() {
return true;
}
@Override
public int encodedLength(Pojo1 val) {
return stringField.encodedLength(val.stringFieldAsc) +
intField.encodedLength(val.intFieldAsc) +
doubleField.encodedLength(val.doubleFieldAsc);
}
@Override
public Class<Pojo1> encodedClass() {
return Pojo1.class;
}
@Override
public int skip(PositionedByteRange src) {
int skipped = stringField.skip(src);
skipped += intField.skip(src);
skipped += doubleField.skip(src);
return skipped;
}
@Override
public Pojo1 decode(PositionedByteRange src) {
Object[] ret = new Object[3];
ret[0] = stringField.decode(src);
ret[1] = intField.decode(src);
ret[2] = doubleField.decode(src);
return new Pojo1(ret);
}
@Override
public int encode(PositionedByteRange dst, Pojo1 val) {
int written = stringField.encode(dst, val.stringFieldAsc);
written += intField.encode(dst, val.intFieldAsc);
written += doubleField.encode(dst, val.doubleFieldAsc);
return written;
}
}
/**
* A custom data type implementation specialized for {@link Pojo2}.
*/
private static class SpecializedPojo2Type1 implements DataType<Pojo2> {
private static RawBytesTerminated byteField1 = new RawBytesTerminated("/");
private static RawBytesTerminated byteField2 =
new RawBytesTerminated(Order.DESCENDING, "/");
private static RawStringTerminated stringField =
new RawStringTerminated(Order.DESCENDING, new byte[] { 0x00 });
private static RawBytes byteField3 = new RawBytes(Order.DESCENDING);
/**
* The {@link Struct} equivalent of this type.
*/
public static Struct GENERIC = new StructBuilder().add(byteField1).add(byteField2)
.add(stringField).add(byteField3).toStruct();
@Override
public boolean isOrderPreserving() {
return true;
}
@Override
public Order getOrder() {
return null;
}
@Override
public boolean isNullable() {
return false;
}
@Override
public boolean isSkippable() {
return true;
}
@Override
public int encodedLength(Pojo2 val) {
return byteField1.encodedLength(val.byteField1Asc) +
byteField2.encodedLength(val.byteField2Dsc) +
stringField.encodedLength(val.stringFieldDsc) +
byteField3.encodedLength(val.byteField3Dsc);
}
@Override
public Class<Pojo2> encodedClass() {
return Pojo2.class;
}
@Override
public int skip(PositionedByteRange src) {
int skipped = byteField1.skip(src);
skipped += byteField2.skip(src);
skipped += stringField.skip(src);
skipped += byteField3.skip(src);
return skipped;
}
@Override
public Pojo2 decode(PositionedByteRange src) {
Object[] ret = new Object[4];
ret[0] = byteField1.decode(src);
ret[1] = byteField2.decode(src);
ret[2] = stringField.decode(src);
ret[3] = byteField3.decode(src);
return new Pojo2(ret);
}
@Override
public int encode(PositionedByteRange dst, Pojo2 val) {
int written = byteField1.encode(dst, val.byteField1Asc);
written += byteField2.encode(dst, val.byteField2Dsc);
written += stringField.encode(dst, val.stringFieldDsc);
written += byteField3.encode(dst, val.byteField3Dsc);
return written;
}
}
@Test
@SuppressWarnings("unchecked")
public void testOrderPreservation() throws Exception {
Object[] vals = new Object[constructorArgs.length];
PositionedByteRange[] encodedGeneric = new PositionedByteRange[constructorArgs.length];
PositionedByteRange[] encodedSpecialized = new PositionedByteRange[constructorArgs.length];
Constructor<?> ctor = specialized.encodedClass().getConstructor(Object[].class);
for (int i = 0; i < vals.length; i++) {
vals[i] = ctor.newInstance(new Object[] { constructorArgs[i] });
encodedGeneric[i] = new SimplePositionedMutableByteRange(
generic.encodedLength(constructorArgs[i]));
encodedSpecialized[i] = new SimplePositionedMutableByteRange(
specialized.encodedLength(vals[i]));
}
// populate our arrays
for (int i = 0; i < vals.length; i++) {
generic.encode(encodedGeneric[i], constructorArgs[i]);
encodedGeneric[i].setPosition(0);
specialized.encode(encodedSpecialized[i], vals[i]);
encodedSpecialized[i].setPosition(0);
assertArrayEquals(encodedGeneric[i].getBytes(), encodedSpecialized[i].getBytes());
}
Arrays.sort(vals);
Arrays.sort(encodedGeneric);
Arrays.sort(encodedSpecialized);
for (int i = 0; i < vals.length; i++) {
assertEquals(
"Struct encoder does not preserve sort order at position " + i,
vals[i],
ctor.newInstance(new Object[] { generic.decode(encodedGeneric[i]) }));
assertEquals(
"Specialized encoder does not preserve sort order at position " + i,
vals[i], specialized.decode(encodedSpecialized[i]));
}
}
}
| |
/*
* RDV
* Real-time Data Viewer
* http://rdv.googlecode.com/
*
* Copyright (c) 2005-2007 University at Buffalo
* Copyright (c) 2005-2007 NEES Cyberinfrastructure Center
* Copyright (c) 2008 Palta Software
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* $URL$
* $Revision$
* $Date$
* $Author$
*/
package org.rdv.ui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import javax.swing.JEditorPane;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.rdv.DataViewer;
import org.rdv.data.Channel;
import org.rdv.rbnb.ChannelManager;
import org.rdv.rbnb.MetadataListener;
import org.rdv.rbnb.RBNBController;
import com.jgoodies.uif_lite.panel.SimpleInternalFrame;
import com.rbnb.sapi.ChannelTree;
/**
* @author Jason P. Hanley
*/
public class MetadataPanel extends JPanel implements MetadataListener, ChannelSelectionListener {
/** serialization version identifier */
private static final long serialVersionUID = 5913041782729190355L;
static Log log = LogFactory.getLog(ChannelManager.class.getName());
private RBNBController rbnb;
private ChannelTree ctree;
private String channel;
private int children;
private String ROOT_CHANNEL = new String();
private JEditorPane infoTextArea;
public MetadataPanel(RBNBController rbnb) {
this.rbnb = rbnb;
ctree = null;
channel = null;
initPanel();
}
private void initPanel() {
setBorder(null);
setLayout(new BorderLayout());
setMinimumSize(new Dimension(130, 27));
setPreferredSize(new Dimension(150, 150));
infoTextArea = new JEditorPane();
infoTextArea.setBackground(Color.white);
infoTextArea.setEditable(false);
infoTextArea.setContentType("text/html");
infoTextArea.setBorder(new EmptyBorder(5, 5, 5, 5));
SimpleInternalFrame infoViewFrame = new SimpleInternalFrame(
DataViewer.getIcon("icons/properties.gif"),
"Properties",
null,
infoTextArea);
add(infoViewFrame, BorderLayout.CENTER);
}
private void updatePanel() {
clearPanel();
if (channel == null || ctree == null) {
return;
} else if (channel == ROOT_CHANNEL) {
StringBuffer s = new StringBuffer();
s.append("<strong>" + rbnb.getServerName() + "</strong><br>");
s.append("<em>Data Server</em>");
s.append("<p style=\"font-size: 10px\">" + children + " Data Source");
if (children == 0 || children > 1) {
s.append("s");
}
s.append("</p>");
infoTextArea.setText(s.toString());
} else {
StringBuffer s = new StringBuffer();
ChannelTree.Node node = ctree.findNode(channel);
if (node.getType() == ChannelTree.CHANNEL) {
Channel channelMetadata = rbnb.getChannel(channel);
String unit = channelMetadata.getUnit();
String mime = channelMetadata.getMetadata("mime");
String description = channelMetadata.getMetadata("description");
double start = channelMetadata.getStart();
double duration = channelMetadata.getDuration();
int size = Integer.parseInt(channelMetadata.getMetadata("size"));
String formula = channelMetadata.getMetadata("formula");
s.append("<strong>" + channel + "</strong>");
if (unit != null) {
s.append(" (" + unit + ")");
}
if (description != null) {
s.append("<br>" + description);
}
if (mime != null) {
s.append("<br>");
if (mime.equals("application/octet-stream")) {
s.append("<em>Numeric Data</em>");
String sampleRate = channelMetadata.getMetadata("samplerate");
if (sampleRate != null) {
s.append("<br>" + sampleRate +" Hz");
}
} else if (mime.equals("image/jpeg")) {
s.append("<em>JPEG Images</em>");
String width = channelMetadata.getMetadata("width");
String height = channelMetadata.getMetadata("height");
if (width != null && height != null) {
s.append("<br>" + width + " x " + height);
String sampleRate = channelMetadata.getMetadata("framerate");
if (sampleRate != null) {
s.append(", " + sampleRate + " fps");
}
}
} else if (mime.equals("video/jpeg")) {
s.append("<em>JPEG Video</em>");
} else if (mime.equals("text/plain")) {
s.append("<em>Text</em>");
} else if (mime.startsWith("audio/")) {
s.append("<em>Audio</em>");
} else {
s.append("<em>" + mime + "</em>");
}
}
s.append("<p style=\"font-size: 10px\">Begins " + DataViewer.formatDateSmart(start) + "<br>");
s.append("Lasts " + DataViewer.formatSeconds(duration));
if (size != -1) {
s.append("<br>" + DataViewer.formatBytes(size));
}
if (mime != null) {
if (mime.startsWith("audio/")) {
String encoding = channelMetadata.getMetadata("encoding");
String channels = channelMetadata.getMetadata("channels");
String sampleRate = channelMetadata.getMetadata("samplerate");
String sampleSize = channelMetadata.getMetadata("samplesize");
String signed = channelMetadata.getMetadata("signed").equals("1")?"s":"u";
String endian = channelMetadata.getMetadata("endian").equals("1")?"be":"le";
if (encoding != null && channels != null && sampleRate != null && sampleSize != null && endian != null) {
s.append("<br>" + encoding + " (" + Float.parseFloat(sampleRate)/1000 + "kHz");
s.append("/" + channels + "ch");
s.append("/" + sampleSize + "b");
if (Integer.parseInt(sampleSize) > 1) {
s.append("/" + endian);
}
s.append("/" + signed);
s.append(")");
}
}
}
// metadata for local channels
if (formula != null) {
if (formula.length() > 50) {
formula = formula.substring(0, 47) + "...";
}
s.append("<br><br>" + formula);
String variablesString = channelMetadata.getMetadata("variables");
if (variablesString != null) {
s.append(" where ");
String[] variables = variablesString.split("\\|");
for (int i=0; i<variables.length; i++) {
String variable = variables[i];
String[] v = variable.split(":");
if (v.length == 2) {
s.append(v[0]).append('=').append(v[1]);
} else {
s.append(variable);
}
if (i < variables.length-1) {
s.append(", ");
}
}
}
}
s.append("</p>");
} else {
s.append("<strong>" + channel + "</strong><br>");
if (node.getType() == ChannelTree.SERVER) {
s.append("<em>Child Server</em>");
} else if (node.getType() == ChannelTree.SOURCE) {
s.append("<em>Data Source</em>");
} else {
s.append("<em>" + node.getType() + "</em>");
}
s.append("<p style=\"font-size: 10px\">" + children + " Children</p>");
}
infoTextArea.setText(s.toString());
}
infoTextArea.setCaretPosition(0);
}
private void clearPanel() {
infoTextArea.setText("");
}
public void channelTreeUpdated(ChannelTree ctree) {
this.ctree = ctree;
updatePanel();
}
public void channelSelected(ChannelSelectionEvent e) {
if (e.isRoot()) {
channel = ROOT_CHANNEL;
} else {
channel = e.getChannelName();
}
children = e.getChildren();
updatePanel();
}
public void channelSelectionCleared() {
channel = null;
children = 0;
clearPanel();
}
}
| |
package com.defano.wyldcard.window.layouts;
import com.defano.wyldcard.aspect.RunOnDispatch;
import com.defano.wyldcard.fonts.TextStyleSpecifier;
import com.defano.wyldcard.parts.field.FieldModel;
import com.defano.wyldcard.parts.field.FieldStyle;
import com.defano.wyldcard.runtime.context.ExecutionContext;
import com.defano.wyldcard.util.StringUtils;
import com.defano.wyldcard.window.ActionBindable;
import com.defano.wyldcard.window.WyldCardDialog;
import com.defano.hypertalk.ast.model.Owner;
import com.defano.hypertalk.ast.model.Value;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Spacer;
import com.l2fprod.common.swing.JFontChooser;
import javax.swing.*;
import java.awt.*;
@SuppressWarnings("unchecked")
public class FieldPropertyEditor extends WyldCardDialog<FieldModel> implements ActionBindable {
private FieldModel model;
private JPanel fieldEditor;
private JTextField fieldName;
private JLabel idLabelValue;
private JSpinner fieldHeight;
private JSpinner fieldWidth;
private JSpinner fieldTop;
private JSpinner fieldLeft;
private JCheckBox isVisible;
private JCheckBox isWrapText;
private JCheckBox isLockText;
private JButton saveButton;
private JButton editScriptButton;
private JComboBox style;
private JCheckBox showLines;
private JLabel fieldLabel;
private JLabel partLabel;
private JLabel partLabelValue;
private JLabel fieldLabelValue;
private JButton textStyleButton;
private JCheckBox sharedText;
private JCheckBox enabled;
private JCheckBox isWideMargins;
private JCheckBox autoTab;
private JCheckBox autoSelect;
private JCheckBox multipleLines;
private JCheckBox scrolling;
public FieldPropertyEditor() {
editScriptButton.addActionListener(e -> {
dispose();
SwingUtilities.invokeLater(() -> model.editScript(new ExecutionContext()));
});
saveButton.addActionListener(e -> {
updateProperties();
dispose();
});
DefaultComboBoxModel model = new DefaultComboBoxModel();
for (FieldStyle thisStyle : FieldStyle.values()) {
model.addElement(thisStyle.getName());
}
style.setModel(model);
enabled.addActionListener(e -> onEnabledChanged());
autoSelect.addActionListener(e -> onAutoSelectChanged());
}
@Override
public JButton getDefaultButton() {
return saveButton;
}
@Override
public JPanel getWindowPanel() {
return fieldEditor;
}
@Override
@RunOnDispatch
public void bindModel(FieldModel data) {
ExecutionContext context = new ExecutionContext();
this.model = data;
long partNumber = model.getPartNumber(context);
long fieldNumber = model.getFieldNumber(context);
long fieldCount = model.getFieldCount(context);
long partCount = model.getPartCount(context);
String layer = model.getOwner().hyperTalkName;
fieldLabel.setText(layer + " Field:");
fieldLabelValue.setText(fieldNumber + " of " + fieldCount);
partLabel.setText(layer + " Part:");
partLabelValue.setText(partNumber + " of " + partCount);
idLabelValue.setText(String.valueOf(model.getId(context)));
fieldName.setText(model.getKnownProperty(context, FieldModel.PROP_NAME).toString());
idLabelValue.setText(model.getKnownProperty(context, FieldModel.PROP_ID).toString());
fieldTop.setValue(model.getKnownProperty(context, FieldModel.PROP_TOP).integerValue());
fieldLeft.setValue(model.getKnownProperty(context, FieldModel.PROP_LEFT).integerValue());
fieldHeight.setValue(model.getKnownProperty(context, FieldModel.PROP_HEIGHT).integerValue());
fieldWidth.setValue(model.getKnownProperty(context, FieldModel.PROP_WIDTH).integerValue());
isLockText.setSelected(model.getKnownProperty(context, FieldModel.PROP_LOCKTEXT).booleanValue());
isVisible.setSelected(model.getKnownProperty(context, FieldModel.PROP_VISIBLE).booleanValue());
isWrapText.setSelected(model.getKnownProperty(context, FieldModel.PROP_DONTWRAP).booleanValue());
showLines.setSelected(model.getKnownProperty(context, FieldModel.PROP_SHOWLINES).booleanValue());
style.setSelectedItem(StringUtils.capitalize(model.getKnownProperty(context, FieldModel.PROP_STYLE).toString()));
enabled.setSelected(model.getKnownProperty(context, FieldModel.PROP_ENABLED).booleanValue());
isWideMargins.setSelected(model.getKnownProperty(context, FieldModel.PROP_WIDEMARGINS).booleanValue());
autoTab.setSelected(model.getKnownProperty(context, FieldModel.PROP_AUTOTAB).booleanValue());
autoSelect.setSelected(model.getKnownProperty(context, FieldModel.PROP_AUTOSELECT).booleanValue());
multipleLines.setSelected(model.getKnownProperty(context, FieldModel.PROP_MULTIPLELINES).booleanValue());
scrolling.setSelected(model.getKnownProperty(context, FieldModel.PROP_SCROLLING).booleanValue());
sharedText.setEnabled(model.getOwner() == Owner.BACKGROUND);
sharedText.setSelected(model.getKnownProperty(context, FieldModel.PROP_SHAREDTEXT).booleanValue());
multipleLines.setEnabled(model.getKnownProperty(context, FieldModel.PROP_AUTOSELECT).booleanValue());
textStyleButton.addActionListener(e -> {
dispose();
Font selection = JFontChooser.showDialog(getWindowPanel(), "Choose Font", model.getTextStyle(context).toFont());
if (selection != null) {
model.setTextStyle(context, TextStyleSpecifier.fromFont(selection));
}
});
onEnabledChanged();
onAutoSelectChanged();
bindActions(a -> updateProperties(),
fieldTop,
fieldLeft,
fieldHeight,
fieldWidth,
isWideMargins,
isWrapText,
showLines,
style,
enabled,
autoSelect,
multipleLines,
scrolling);
}
private void updateProperties() {
ExecutionContext context = new ExecutionContext();
model.setKnownProperty(context, FieldModel.PROP_NAME, new Value(fieldName.getText()));
model.setKnownProperty(context, FieldModel.PROP_TOP, new Value(fieldTop.getValue()));
model.setKnownProperty(context, FieldModel.PROP_LEFT, new Value(fieldLeft.getValue()));
model.setKnownProperty(context, FieldModel.PROP_HEIGHT, new Value(fieldHeight.getValue()));
model.setKnownProperty(context, FieldModel.PROP_WIDTH, new Value(fieldWidth.getValue()));
model.setKnownProperty(context, FieldModel.PROP_LOCKTEXT, new Value(isLockText.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_VISIBLE, new Value(isVisible.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_DONTWRAP, new Value(isWrapText.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_SHOWLINES, new Value(showLines.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_STYLE, new Value(String.valueOf(style.getSelectedItem())));
model.setKnownProperty(context, FieldModel.PROP_SHAREDTEXT, new Value(sharedText.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_ENABLED, new Value(enabled.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_WIDEMARGINS, new Value(isWideMargins.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_AUTOTAB, new Value(autoTab.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_AUTOSELECT, new Value(autoSelect.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_MULTIPLELINES, new Value(multipleLines.isSelected()));
model.setKnownProperty(context, FieldModel.PROP_SCROLLING, new Value(scrolling.isSelected()));
}
private void onEnabledChanged() {
if (!enabled.isSelected()) {
isLockText.setSelected(true);
isLockText.setEnabled(false);
} else {
isLockText.setEnabled(true);
}
}
private void onAutoSelectChanged() {
if (autoSelect.isSelected()) {
isLockText.setSelected(true);
isLockText.setEnabled(false);
isWrapText.setSelected(true);
isWrapText.setEnabled(false);
multipleLines.setEnabled(true);
} else {
isWrapText.setEnabled(true);
multipleLines.setEnabled(false);
isLockText.setEnabled(true);
}
}
{
// GUI initializer generated by IntelliJ IDEA GUI Designer
// >>> IMPORTANT!! <<<
// DO NOT EDIT OR ADD ANY CODE HERE!
$$$setupUI$$$();
}
/**
* Method generated by IntelliJ IDEA GUI Designer
* >>> IMPORTANT!! <<<
* DO NOT edit this method OR call it in your code!
*
* @noinspection ALL
*/
private void $$$setupUI$$$() {
final JPanel panel1 = new JPanel();
panel1.setLayout(new GridLayoutManager(1, 1, new Insets(0, 0, 0, 0), -1, -1));
fieldEditor = new JPanel();
fieldEditor.setLayout(new GridLayoutManager(5, 9, new Insets(10, 10, 10, 10), -1, -1));
fieldEditor.setMaximumSize(new Dimension(587, 257));
panel1.add(fieldEditor, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
final JPanel panel2 = new JPanel();
panel2.setLayout(new GridLayoutManager(4, 5, new Insets(5, 5, 5, 5), -1, -1));
fieldEditor.add(panel2, new GridConstraints(0, 0, 1, 9, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
panel2.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Identification"));
final JLabel label1 = new JLabel();
label1.setHorizontalAlignment(4);
label1.setText("Field Name:");
panel2.add(label1, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, new Dimension(90, -1), null, new Dimension(90, -1), 0, false));
fieldLabel = new JLabel();
fieldLabel.setText("Card Field:");
panel2.add(fieldLabel, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
partLabel = new JLabel();
partLabel.setText("Card Part:");
panel2.add(partLabel, new GridConstraints(3, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
partLabelValue = new JLabel();
partLabelValue.setText("Label");
panel2.add(partLabelValue, new GridConstraints(3, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
fieldLabelValue = new JLabel();
fieldLabelValue.setText("Label");
panel2.add(fieldLabelValue, new GridConstraints(2, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
fieldName = new JTextField();
panel2.add(fieldName, new GridConstraints(0, 1, 1, 4, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(150, -1), null, 0, false));
final Spacer spacer1 = new Spacer();
panel2.add(spacer1, new GridConstraints(2, 2, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
final JLabel label2 = new JLabel();
label2.setHorizontalAlignment(2);
label2.setInheritsPopupMenu(false);
label2.setText("Field ID:");
panel2.add(label2, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
idLabelValue = new JLabel();
idLabelValue.setText("Label");
panel2.add(idLabelValue, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JPanel panel3 = new JPanel();
panel3.setLayout(new GridLayoutManager(7, 3, new Insets(5, 5, 5, 5), -1, -1));
fieldEditor.add(panel3, new GridConstraints(1, 0, 2, 6, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
panel3.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Look and Feel"));
isWrapText = new JCheckBox();
isWrapText.setText("Don't Wrap");
isWrapText.setToolTipText("Do not wrap long lines; scroll horizontally instead.");
panel3.add(isWrapText, new GridConstraints(1, 2, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
isLockText = new JCheckBox();
isLockText.setText("Lock Text");
isLockText.setToolTipText("Make the text of this field uneditable to the user.");
panel3.add(isLockText, new GridConstraints(2, 2, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
style = new JComboBox();
final DefaultComboBoxModel defaultComboBoxModel1 = new DefaultComboBoxModel();
style.setModel(defaultComboBoxModel1);
panel3.add(style, new GridConstraints(0, 1, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JLabel label3 = new JLabel();
label3.setText("Style:");
panel3.add(label3, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
isVisible = new JCheckBox();
isVisible.setText("Visible");
isVisible.setToolTipText("Hide or show this field on the card.");
panel3.add(isVisible, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
showLines = new JCheckBox();
showLines.setText("Show Lines");
showLines.setToolTipText("Draw dottled rule underneath lines of text.");
panel3.add(showLines, new GridConstraints(2, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
sharedText = new JCheckBox();
sharedText.setText("Shared Text");
sharedText.setToolTipText("Share the text of this field across all cards in this background. (Applies only to background fields.)");
panel3.add(sharedText, new GridConstraints(3, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
enabled = new JCheckBox();
enabled.setText("Enabled");
enabled.setToolTipText("Enable or disable (grey-out) this field.");
panel3.add(enabled, new GridConstraints(3, 2, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
isWideMargins = new JCheckBox();
isWideMargins.setText("Wide Margins");
isWideMargins.setToolTipText("Inset the text 15px from the edges of the field.");
panel3.add(isWideMargins, new GridConstraints(4, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
autoTab = new JCheckBox();
autoTab.setText("Auto Tab");
autoTab.setToolTipText("Transfer focus to the next part when the tab key is pressed.");
panel3.add(autoTab, new GridConstraints(4, 2, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
autoSelect = new JCheckBox();
autoSelect.setText("Auto Select");
autoSelect.setToolTipText("Automatically select the entire line of text that was clicked; makes this a \"list field.\"");
panel3.add(autoSelect, new GridConstraints(5, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
multipleLines = new JCheckBox();
multipleLines.setText("Multiple Lines");
multipleLines.setToolTipText("Applies only to \"Auto Select\"; allows multiple lines to be selected together.");
panel3.add(multipleLines, new GridConstraints(5, 2, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
scrolling = new JCheckBox();
scrolling.setText("Scrolling");
scrolling.setToolTipText("Allow the field to scroll vertically if text exceeds visual bounds.");
panel3.add(scrolling, new GridConstraints(6, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
editScriptButton = new JButton();
editScriptButton.setText("Edit Script...");
fieldEditor.add(editScriptButton, new GridConstraints(4, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
textStyleButton = new JButton();
textStyleButton.setEnabled(true);
textStyleButton.setText("Text Style...");
fieldEditor.add(textStyleButton, new GridConstraints(3, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
saveButton = new JButton();
saveButton.setText("OK");
fieldEditor.add(saveButton, new GridConstraints(4, 8, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final Spacer spacer2 = new Spacer();
fieldEditor.add(spacer2, new GridConstraints(4, 7, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
final Spacer spacer3 = new Spacer();
fieldEditor.add(spacer3, new GridConstraints(4, 1, 1, 3, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
final JPanel panel4 = new JPanel();
panel4.setLayout(new GridLayoutManager(2, 4, new Insets(5, 5, 5, 5), -1, -1));
fieldEditor.add(panel4, new GridConstraints(1, 6, 1, 3, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
panel4.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Location"));
final JLabel label4 = new JLabel();
label4.setText("Height:");
panel4.add(label4, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JLabel label5 = new JLabel();
label5.setText("Width:");
panel4.add(label5, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
fieldHeight = new JSpinner();
panel4.add(fieldHeight, new GridConstraints(0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(75, -1), new Dimension(75, -1), 0, false));
fieldWidth = new JSpinner();
panel4.add(fieldWidth, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(75, -1), new Dimension(75, -1), 0, false));
final JLabel label6 = new JLabel();
label6.setText("Top:");
panel4.add(label6, new GridConstraints(0, 2, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JLabel label7 = new JLabel();
label7.setText("Left:");
panel4.add(label7, new GridConstraints(1, 2, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
fieldTop = new JSpinner();
panel4.add(fieldTop, new GridConstraints(0, 3, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(75, -1), new Dimension(75, -1), 0, false));
fieldLeft = new JSpinner();
panel4.add(fieldLeft, new GridConstraints(1, 3, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(75, -1), new Dimension(75, -1), 0, false));
final Spacer spacer4 = new Spacer();
fieldEditor.add(spacer4, new GridConstraints(2, 6, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false));
}
}
| |
package com.github.TKnudsen.ComplexDataObject.model.scoring.functions.Double;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.function.Function;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.github.TKnudsen.ComplexDataObject.data.complexDataObject.ComplexDataContainer;
import com.github.TKnudsen.ComplexDataObject.data.complexDataObject.ComplexDataObject;
import com.github.TKnudsen.ComplexDataObject.model.io.parsers.objects.IObjectParser;
import com.github.TKnudsen.ComplexDataObject.model.scoring.AttributeScoringFunctionChangeEvent;
import com.github.TKnudsen.ComplexDataObject.model.scoring.functions.AttributeScoringFunctions;
import com.github.TKnudsen.ComplexDataObject.model.tools.DataConversion;
import com.github.TKnudsen.ComplexDataObject.model.tools.StatisticsSupport;
import com.github.TKnudsen.ComplexDataObject.model.transformations.normalization.LinearNormalizationFunction;
import com.github.TKnudsen.ComplexDataObject.model.transformations.normalization.NormalizationFunction;
import com.github.TKnudsen.ComplexDataObject.model.transformations.normalization.QuantileNormalizationFunction;
public class DoubleAttributeBipolarScoringFunction extends DoubleAttributeScoringFunction {
@JsonIgnore
private StatisticsSupport statisticsSupportPositiveRaw;
@JsonIgnore
private StatisticsSupport statisticsSupportPositive;
@JsonIgnore
private StatisticsSupport statisticsSupportNegativeRaw;
@JsonIgnore
private StatisticsSupport statisticsSupportNegative;
private double neutralValue = 0.0;
private NormalizationFunction normalizationFunctionPositive;
private NormalizationFunction normalizationFunctionNegative;
private QuantileNormalizationFunction quantileNormalizationFunctionPositive;
private QuantileNormalizationFunction quantileNormalizationFunctionNegative;
/**
* for serialization purposes
*/
@SuppressWarnings("unused")
private DoubleAttributeBipolarScoringFunction() {
super();
}
public DoubleAttributeBipolarScoringFunction(ComplexDataContainer container, String attribute,
IObjectParser<Double> parser) {
this(container, parser, attribute, null, false, true, 1.0, null);
}
public DoubleAttributeBipolarScoringFunction(ComplexDataContainer container, IObjectParser<Double> parser,
String attribute, String abbreviation, boolean quantileBased, boolean highIsGood, double weight) {
this(container, parser, attribute, abbreviation, quantileBased, highIsGood, weight, null);
}
public DoubleAttributeBipolarScoringFunction(ComplexDataContainer container, IObjectParser<Double> parser,
String attribute, String abbreviation, boolean quantileBased, boolean highIsGood, double weight,
Function<ComplexDataObject, Double> uncertaintyFunction) {
super(container, parser, attribute, abbreviation, quantileBased, highIsGood, weight, uncertaintyFunction);
}
public DoubleAttributeBipolarScoringFunction(ComplexDataContainer container, IObjectParser<Double> parser,
String attribute, String abbreviation, boolean quantileBased, boolean highIsGood, double weight,
Function<ComplexDataObject, Double> uncertaintyFunction, double neutralValue) {
super(container, parser, attribute, abbreviation, quantileBased, highIsGood, weight, uncertaintyFunction);
setNeutralValue(neutralValue);
}
protected void initializeStdOutlierTreatment(Collection<Double> doubleValues) {
if (Double.isNaN(neutralValue))
super.initializeStdOutlierTreatment(doubleValues);
else {
Collection<Double> values = new ArrayList<>();
for (Double d : doubleValues)
if (d != null && !Double.isNaN(d))
values.add(d);
StatisticsSupport statisticsSupport = new StatisticsSupport(values);
double mean = neutralValue;// statisticsSupport.getMean();
double standardDeviation = statisticsSupport.getStandardDeviation();
double min = statisticsSupport.getMin();
double max = statisticsSupport.getMax();
double outlierStd = (this.outlierStd != null && !Double.isNaN(this.outlierStd)) ? this.outlierStd : 10.0;
double outlierStdTop = (this.outlierStdTop != null && !Double.isNaN(this.outlierStdTop))
? this.outlierStdTop
: 10.0;
outlierPruningMinValue = (outlierPruningMinValueExternal != null) ? outlierPruningMinValueExternal
: Math.max(min, mean - outlierStd * standardDeviation);
outlierPruningMaxValue = (outlierPruningMaxValueExternal != null) ? outlierPruningMaxValueExternal
: Math.min(max, mean + outlierStdTop * standardDeviation);
}
}
@Override
/**
* given double values must not be null or NaN!
*/
protected void initializeStatisticsSupport(Collection<Double> doubleValues) {
StatisticsSupport[] initializeStatistics = initializeStatistics(doubleValues);
statisticsSupportNegative = initializeStatistics[0];
statisticsSupportPositive = initializeStatistics[1];
}
@Override
protected void initializeRawValuesStatisticsSupport(Collection<Double> doubleValues) {
StatisticsSupport[] initializeStatistics = initializeStatistics(doubleValues);
statisticsSupportNegativeRaw = initializeStatistics[0];
statisticsSupportPositiveRaw = initializeStatistics[1];
}
/**
*
* @param doubleValues
* @return StatisticsSupport[0] is negative StatisticsSupport[1] is positive
*/
private final StatisticsSupport[] initializeStatistics(Collection<Double> doubleValues) {
// build positive and negative collections
Collection<Double> negative = new ArrayList<>();
Collection<Double> positive = new ArrayList<>();
for (Double value : doubleValues)
if (value != null && !Double.isNaN(value))
if (value >= neutralValue)
positive.add(value);
else
negative.add(value);
return new StatisticsSupport[] { new StatisticsSupport(negative), new StatisticsSupport(positive) };
}
@Override
protected void initializeNormalizationFunctions() {
quantileNormalizationFunctionPositive = null;
normalizationFunctionPositive = null;
// if the entire value domain is NaN no normalizationFunction can be built
if (!Double.isNaN(statisticsSupportPositiveRaw.getMean()))
if (getQuantileNormalizationRate() > 0)
quantileNormalizationFunctionPositive = new QuantileNormalizationFunction(statisticsSupportPositiveRaw,
true);
if (!Double.isNaN(statisticsSupportPositive.getMean()))
normalizationFunctionPositive = new LinearNormalizationFunction(statisticsSupportPositive, true);
else
System.err.println(getClass().getSimpleName()
+ ": positive value range above neutral value did not contain entries for attribute "
+ getAttribute() + ". check input data or adjust neutral value");
quantileNormalizationFunctionNegative = null;
normalizationFunctionNegative = null;
// if the entire value domain is NaN no normalizationFunction can be built
if (!Double.isNaN(statisticsSupportNegativeRaw.getMean()) && statisticsSupportNegativeRaw.getCount() > 0)
if (getQuantileNormalizationRate() > 0)
quantileNormalizationFunctionNegative = new QuantileNormalizationFunction(statisticsSupportNegativeRaw,
true);
if (!Double.isNaN(statisticsSupportNegative.getMean()) && statisticsSupportNegative.getCount() > 0)
normalizationFunctionNegative = new LinearNormalizationFunction(statisticsSupportNegative, true);
else
System.err.println(getClass().getSimpleName()
+ ": negative value range below neutra value did not contain entries for attribute "
+ getAttribute() + ". check input data or adjust neutral value");
// close the region around 0.0
if (quantileNormalizationFunctionPositive != null)
quantileNormalizationFunctionPositive.setGlobalMin(neutralValue);
if (normalizationFunctionPositive != null)
normalizationFunctionPositive.setGlobalMin(neutralValue);
if (quantileNormalizationFunctionNegative != null)
quantileNormalizationFunctionNegative.setGlobalMax(neutralValue);
if (normalizationFunctionNegative != null)
normalizationFunctionNegative.setGlobalMax(neutralValue);
}
@Override
protected double normalizeLinear(double value) {
try {
if (Double.isNaN(neutralValue) || value >= neutralValue)
return normalizationFunctionPositive.apply(value).doubleValue();
else if (normalizationFunctionNegative != null)
return normalizationFunctionNegative.apply(value).doubleValue() - 1;
} catch (NullPointerException e) {
System.err.println(getClass().getSimpleName() + ".normalizeLinear did not work for value " + value
+ ", neutral value: " + neutralValue + ": normalization function null, returning 0.0 for attribute"
+ getAttribute());
return 0.0;
}
return Double.NaN;
}
@Override
protected double normalizeQuantiles(double value) {
if (Double.isNaN(neutralValue) || value >= neutralValue)
return quantileNormalizationFunctionPositive.apply(value).doubleValue();
else if (quantileNormalizationFunctionNegative != null)
return quantileNormalizationFunctionNegative.apply(value).doubleValue() - 1;
return Double.NaN;
}
protected double calculateAverageScore() {
double score = AttributeScoringFunctions.calculateAverageScoreWithoutMissingValues(this, true);
// clear scoresBuffer as it contains old missing value data now
scoresBuffer.clear();
if (Double.isNaN(score))
System.err.println(this.getClass().getSimpleName() + ": NaN value detected for the average score!");
return score;
}
@Override
/**
* this is expensive. is it really needed?
*/
public StatisticsSupport getStatisticsSupport() {
Collection<Double> values = new ArrayList<>(
DataConversion.doublePrimitivesToList(statisticsSupportPositive.getValues()));
if (statisticsSupportNegative != null)
values.addAll(DataConversion.doublePrimitivesToList(statisticsSupportNegative.getValues()));
return new StatisticsSupport(values);
}
@Override
protected Double toDouble(Double t) {
return t;
}
@Override
protected double invertScore(double output) {
if (normalizationFunctionNegative != null)
return -output;
else
return 1 - output;
}
public double getNeutralValue() {
return neutralValue;
}
public void setNeutralValue(double neutralValue) {
this.neutralValue = neutralValue;
this.scoresBuffer = new HashMap<>();
refreshScoringFunction();
AttributeScoringFunctionChangeEvent event = new AttributeScoringFunctionChangeEvent(this, getAttribute(), this);
notifyListeners(event);
}
}
| |
/*
* Copyright 2021 Hazelcast Inc.
*
* Licensed under the Hazelcast Community License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://hazelcast.com/hazelcast-community-license
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.python;
import com.hazelcast.internal.nio.IOUtil;
import com.hazelcast.jet.JetException;
import com.hazelcast.jet.core.ProcessorSupplier;
import com.hazelcast.jet.impl.util.Util;
import com.hazelcast.logging.ILogger;
import javax.annotation.Nonnull;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermission;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.jet.impl.util.IOUtil.copyStream;
import static com.hazelcast.jet.impl.util.IOUtil.readFully;
import static com.hazelcast.jet.impl.util.Util.editPermissionsRecursively;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.attribute.PosixFilePermission.GROUP_WRITE;
import static java.nio.file.attribute.PosixFilePermission.OTHERS_WRITE;
import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE;
import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
import static java.util.Arrays.asList;
/**
* The context object used by the "map using Python" pipeline stage. As a
* user you don't have to deal with this class directly. It is used when
* you write {@link PythonTransforms#mapUsingPython
* stage.apply(PythonService.mapUsingPython(pyConfig))}
*/
class PythonServiceContext {
private static final String JET_TO_PYTHON_PREFIX = "jet_to_python_";
private static final String MAIN_SHELL_SCRIPT = JET_TO_PYTHON_PREFIX + "main.sh";
private static final String PARAMS_SCRIPT = JET_TO_PYTHON_PREFIX + "params.sh";
private static final String INIT_SHELL_SCRIPT = JET_TO_PYTHON_PREFIX + "init.sh";
private static final String CLEANUP_SHELL_SCRIPT = JET_TO_PYTHON_PREFIX + "cleanup.sh";
private static final String USER_INIT_SHELL_SCRIPT = "init.sh";
private static final String USER_CLEANUP_SHELL_SCRIPT = "cleanup.sh";
private static final String PYTHON_GRPC_SCRIPT = JET_TO_PYTHON_PREFIX + "grpc_server.py";
private static final List<String> EXECUTABLE_SCRIPTS = asList(
INIT_SHELL_SCRIPT, MAIN_SHELL_SCRIPT, CLEANUP_SHELL_SCRIPT);
private static final List<String> USER_EXECUTABLE_SCRIPTS = asList(
USER_INIT_SHELL_SCRIPT, USER_CLEANUP_SHELL_SCRIPT);
private static final EnumSet<PosixFilePermission> WRITE_PERMISSIONS =
EnumSet.of(OWNER_WRITE, GROUP_WRITE, OTHERS_WRITE);
private static final Object INIT_LOCK = new Object();
private final ILogger logger;
private final Path runtimeBaseDir;
PythonServiceContext(ProcessorSupplier.Context context, PythonServiceConfig cfg) {
logger = context.hazelcastInstance().getLoggingService()
.getLogger(getClass().getPackage().getName());
checkIfPythonIsAvailable();
try {
long start = System.nanoTime();
runtimeBaseDir = recreateRuntimeBaseDir(context, cfg);
setupBaseDir(cfg);
synchronized (INIT_LOCK) {
// synchronized: the script will run pip which is not concurrency-safe
Process initProcess = new ProcessBuilder("/bin/sh", "-c", "./" + INIT_SHELL_SCRIPT)
.directory(runtimeBaseDir.toFile())
.redirectErrorStream(true)
.start();
Thread stdoutLoggingThread = logStdOut(logger, initProcess, "python-init");
initProcess.waitFor();
if (initProcess.exitValue() != 0) {
try {
performCleanup();
} catch (Exception e) {
logger.warning("Cleanup failed with exception", e);
}
throw new Exception(
"Initialization script finished with non-zero exit code: " + initProcess.exitValue()
);
}
stdoutLoggingThread.join();
}
makeFilesReadOnly(runtimeBaseDir);
context.logger().info(String.format("Initialization script took %,d ms",
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start)));
} catch (Exception e) {
throw new JetException("PythonService initialization failed: " + e, e);
}
}
private void checkIfPythonIsAvailable() {
try {
Process process = new ProcessBuilder("python3", "--version").redirectErrorStream(true).start();
process.waitFor();
try (InputStream inputStream = process.getInputStream()) {
String output = new String(readFully(inputStream), UTF_8);
if (process.exitValue() != 0) {
logger.severe("python3 version check returned non-zero exit value, output: " + output);
throw new IllegalStateException("python3 is not available");
}
if (!output.startsWith("Python 3")) {
logger.severe("python3 version check returned unknown version, output: " + output);
throw new IllegalStateException("python3 is not available");
}
}
} catch (Exception e) {
throw new IllegalStateException("python3 is not available", e);
}
}
private void makeFilesReadOnly(@Nonnull Path basePath) throws IOException {
List<String> filesNotMarked = editPermissionsRecursively(
basePath, perms -> perms.removeAll(WRITE_PERMISSIONS));
if (!filesNotMarked.isEmpty()) {
logger.info("Couldn't 'chmod -w' these files: " + filesNotMarked);
}
}
private static void makeExecutable(@Nonnull Path path) throws IOException {
Util.editPermissions(path, perms -> perms.add(OWNER_EXECUTE));
}
Path recreateRuntimeBaseDir(ProcessorSupplier.Context context, PythonServiceConfig cfg) {
File baseDir = cfg.baseDir();
if (baseDir != null) {
return context.recreateAttachedDirectory(baseDir.toString()).toPath();
}
File handlerFile = cfg.handlerFile();
if (handlerFile != null) {
return context.recreateAttachedFile(handlerFile.toString()).toPath().getParent();
}
throw new IllegalArgumentException("PythonServiceConfig has neither baseDir nor handlerFile set");
}
void destroy() {
try {
performCleanup();
} finally {
IOUtil.delete(runtimeBaseDir);
}
}
ILogger logger() {
return logger;
}
Path runtimeBaseDir() {
return runtimeBaseDir;
}
private void setupBaseDir(PythonServiceConfig cfg) throws IOException {
createParamsScript(runtimeBaseDir.resolve(PARAMS_SCRIPT),
"HANDLER_MODULE", cfg.handlerModule(),
"HANDLER_FUNCTION", cfg.handlerFunction()
);
for (String fname : asList(
JET_TO_PYTHON_PREFIX + "pb2.py",
JET_TO_PYTHON_PREFIX + "pb2_grpc.py",
INIT_SHELL_SCRIPT,
MAIN_SHELL_SCRIPT,
CLEANUP_SHELL_SCRIPT,
PYTHON_GRPC_SCRIPT)
) {
Path destPath = runtimeBaseDir.resolve(fname);
try (InputStream in = Objects.requireNonNull(
PythonServiceContext.class.getClassLoader().getResourceAsStream(fname), fname);
OutputStream out = Files.newOutputStream(destPath)
) {
copyStream(in, out);
}
if (EXECUTABLE_SCRIPTS.contains(fname)) {
makeExecutable(destPath);
}
for (String userScript : USER_EXECUTABLE_SCRIPTS) {
Path scriptPath = runtimeBaseDir.resolve(userScript);
if (Files.exists(scriptPath)) {
makeExecutable(scriptPath);
}
}
}
}
private void performCleanup() {
try {
List<String> filesNotMarked = editPermissionsRecursively(runtimeBaseDir, perms -> perms.add(OWNER_WRITE));
if (!filesNotMarked.isEmpty()) {
logger.info("Couldn't 'chmod u+w' these files: " + filesNotMarked);
}
Path cleanupScriptPath = runtimeBaseDir.resolve(USER_CLEANUP_SHELL_SCRIPT);
if (Files.exists(cleanupScriptPath)) {
Process cleanupProcess = new ProcessBuilder("/bin/sh", "-c", "./" + CLEANUP_SHELL_SCRIPT)
.directory(runtimeBaseDir.toFile())
.redirectErrorStream(true)
.start();
logStdOut(logger, cleanupProcess, "python-cleanup-" + cleanupProcess);
cleanupProcess.waitFor();
if (cleanupProcess.exitValue() != 0) {
logger.warning("Cleanup script finished with non-zero exit code: " + cleanupProcess.exitValue());
}
}
} catch (Exception e) {
throw new JetException("PythonService cleanup failed: " + e, e);
}
}
static Thread logStdOut(ILogger logger, Process process, String taskName) {
Thread thread = new Thread(() -> {
try (BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream(), UTF_8))) {
for (String line; (line = in.readLine()) != null; ) {
logger.fine(line);
}
} catch (IOException e) {
logger.severe("Reading init script output failed", e);
}
}, taskName + "-logger_" + processPid(process));
thread.start();
return thread;
}
static String processPid(Process process) {
try {
// Process.pid() is @since 9
return Process.class.getMethod("pid").invoke(process).toString();
} catch (Exception e) {
return process.toString().replaceFirst("^.*pid=(\\d+).*$", "$1");
}
}
private static void createParamsScript(@Nonnull Path paramsFile, String... namesAndVals) throws IOException {
try (PrintWriter out = new PrintWriter(Files.newBufferedWriter(paramsFile))) {
String jetToPython = JET_TO_PYTHON_PREFIX.toUpperCase(Locale.ROOT);
for (int i = 0; i < namesAndVals.length; i += 2) {
String name = namesAndVals[i];
String value = namesAndVals[i + 1];
if (value != null && !value.isEmpty()) {
out.println(jetToPython + name + "='" + value + '\'');
}
}
}
}
}
| |
package edu.ucdenver.ccp.datasource.fileparsers.ncbi.gene;
import org.apache.log4j.Logger;
import edu.ucdenver.ccp.common.file.reader.Line;
import edu.ucdenver.ccp.datasource.fileparsers.CcpExtensionOntology;
import edu.ucdenver.ccp.datasource.fileparsers.License;
import edu.ucdenver.ccp.datasource.fileparsers.Record;
import edu.ucdenver.ccp.datasource.fileparsers.RecordField;
import edu.ucdenver.ccp.datasource.fileparsers.SingleLineFileRecord;
import edu.ucdenver.ccp.datasource.identifiers.DataSource;
import edu.ucdenver.ccp.datasource.identifiers.impl.bio.GiNumberID;
import edu.ucdenver.ccp.datasource.identifiers.impl.bio.NcbiGeneId;
import edu.ucdenver.ccp.datasource.identifiers.impl.bio.NcbiTaxonomyID;
import edu.ucdenver.ccp.datasource.identifiers.impl.bio.RefSeqID;
/*
* #%L
* Colorado Computational Pharmacology's common module
* %%
* Copyright (C) 2012 - 2015 Regents of the University of Colorado
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import lombok.Data;
/**
* This class represents data contained in the EntrezGene gene2accession file.
*
* @author Bill Baumgartner
*
*/
@Record(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD, dataSource = DataSource.NCBI_GENE, comment = "", license = License.NCBI, citation = "The NCBI handbook [Internet]. Bethesda (MD): National Library of Medicine (US), National Center for Biotechnology Information; 2002 Oct. Chapter 19 Gene: A Directory of Genes. Available from http://www.ncbi.nlm.nih.gov/books/NBK21091", label = "gene2refseq record")
@Data
public class NcbiGene2RefseqFileData extends SingleLineFileRecord {
public static final String RECORD_NAME_PREFIX = "ENTREZ_GENE2ACCESSION_RECORD_";
/*
* #Format: tax_id GeneID status RNA_nucleotide_accession.version RNA_nucleotide_gi
* protein_accession.version protein_gi genomic_nucleotide_accession.version
* genomic_nucleotide_gi start_position_on_the_genomic_accession
* end_position_on_the_genomic_accession orientation assembly (tab is used as a separator, pound
* sign - start of a comment)
*/
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___TAXON_IDENTIFIER_FIELD_VALUE)
private final NcbiTaxonomyID taxonID;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___GENE_IDENTIFIER_FIELD_VALUE)
private final NcbiGeneId geneID;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___STATUS_FIELD_VALUE)
private final String status;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___RNA_NUCLEOTIDE_ACCESSION_DOT_VERSION_IDENTIFIER_FIELD_VALUE)
private final RefSeqID RNA_nucleotide_accession_dot_version;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___RNA_NUCLEOTIDE_GENEINFO_IDENTIFIER_FIELD_VALUE)
private final GiNumberID RNA_nucleotide_gi;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___PROTEIN_ACCESSION_DOT_VERSION_IDENTIFIER_FIELD_VALUE)
private final RefSeqID protein_accession_dot_version;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___PROTEIN_GENEINFO_IDENTIFIER_FIELD_VALUE)
private final GiNumberID protein_gi;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___GENOMIC_NUCLEOTIDE_ACCESSION_DOT_VERSION_IDENTIFIER_FIELD_VALUE)
private final RefSeqID genomic_nucleotide_accession_dot_version;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___GENOMIC_NUCLEOTIDE_GENEINFO_IDENTIFIER_FIELD_VALUE)
private final GiNumberID genomic_nucleotide_gi;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___START_POSITION_ON_THE_GENOMIC_ACCESSION_FIELD_VALUE)
private final Integer start_position_on_the_genomic_accession;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___END_POSITION_ON_THE_GENOMIC_ACCESSION_FIELD_VALUE)
private final Integer end_position_on_the_genomic_accession;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___ORIENTATION_FIELD_VALUE)
private final char orientation;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___ASSEMBLY_FIELD_VALUE)
private final String assembly;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___MATURE_PEPTIDE_ACCESSION_DOT_VERSTION_IDENTIFIER_FIELD_VALUE)
private final RefSeqID mature_peptide_accession_dot_version;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___MATURE_PEPTIDE_GENEINFO_IDENTIFIER_FIELD_VALUE)
private final GiNumberID mature_peptide_gi;
@RecordField(ontClass = CcpExtensionOntology.NCBI_GENE_2_REFSEQ_RECORD___SYMBOL_FIELD_VALUE)
private final String symbol;
public NcbiGene2RefseqFileData(NcbiTaxonomyID taxonID, NcbiGeneId geneID, String status,
RefSeqID rNANucleotideAccessionDotVersion, GiNumberID rNANucleotideGi, RefSeqID proteinAccessionDotVersion,
GiNumberID proteinGi, RefSeqID genomicNucleotideAccessionDotVersion, GiNumberID genomicNucleotideGi,
Integer startPositionOnTheGenomicAccession, Integer endPositionOnTheGenomicAccession, char orientation,
String assembly, RefSeqID mature_peptide_accession_dot_version, GiNumberID mature_peptide_gi,
String symbol, long byteOffset, long lineNumber) {
super(byteOffset, lineNumber);
this.taxonID = taxonID;
this.geneID = geneID;
this.status = status;
RNA_nucleotide_accession_dot_version = rNANucleotideAccessionDotVersion;
RNA_nucleotide_gi = rNANucleotideGi;
protein_accession_dot_version = proteinAccessionDotVersion;
protein_gi = proteinGi;
genomic_nucleotide_accession_dot_version = genomicNucleotideAccessionDotVersion;
genomic_nucleotide_gi = genomicNucleotideGi;
start_position_on_the_genomic_accession = startPositionOnTheGenomicAccession;
end_position_on_the_genomic_accession = endPositionOnTheGenomicAccession;
this.orientation = orientation;
this.assembly = assembly;
this.mature_peptide_accession_dot_version = mature_peptide_accession_dot_version;
this.mature_peptide_gi = mature_peptide_gi;
this.symbol = symbol;
}
/**
* Parse a line from the EntrezGene gene2accession file
*
* @param line
* @return
*/
public static NcbiGene2RefseqFileData parseGene2AccessionLine(Line line) {
if (!line.getText().startsWith("#")) {
String[] toks = line.getText().split("\\t", -1);
Logger logger = Logger.getLogger(NcbiGene2RefseqFileData.class);
if (toks.length != 16) {
logger.error("Unexpected number of tokens (" + toks.length + ") on line:"
+ line.getText().replaceAll("\\t", " [TAB] "));
return null;
}
NcbiTaxonomyID taxonID = new NcbiTaxonomyID(toks[0]);
NcbiGeneId geneID = new NcbiGeneId(toks[1]);
String status = toks[2];
if (status.equals("-")) {
status = null;
}
RefSeqID RNA_nucleotide_accession_dot_version = null;
if (!toks[3].equals("-") && status != null) {
RNA_nucleotide_accession_dot_version = new RefSeqID(toks[3]);
}
String intStr = toks[4];
GiNumberID RNA_nucleotide_gi = null;
if (!intStr.equals("-")) {
RNA_nucleotide_gi = new GiNumberID(intStr);
}
RefSeqID protein_accession_dot_version = null;
if (!toks[5].equals("-") && status != null) {
protein_accession_dot_version = new RefSeqID(toks[5]);
}
intStr = toks[6];
GiNumberID protein_gi = null;
if (!intStr.equals("-")) {
protein_gi = new GiNumberID(intStr);
}
RefSeqID genomic_nucleotide_accession_dot_version = null;
if (!toks[7].equals("-") && status != null) {
genomic_nucleotide_accession_dot_version = new RefSeqID(toks[7]);
}
intStr = toks[8];
GiNumberID genomic_nucleotide_gi = null;
if (!intStr.equals("-")) {
genomic_nucleotide_gi = new GiNumberID(intStr);
}
intStr = toks[9];
Integer start_position_on_the_genomic_accession;
if (intStr.equals("-")) {
start_position_on_the_genomic_accession = null;
} else {
start_position_on_the_genomic_accession = new Integer(intStr);
}
intStr = toks[10];
Integer end_position_on_the_genomic_accession;
if (intStr.equals("-")) {
end_position_on_the_genomic_accession = null;
} else {
end_position_on_the_genomic_accession = new Integer(intStr);
}
if (toks[11].trim().length() > 1) {
logger.error("Expected a single character for the orientation on line: " + line);
return null;
}
char orientation = toks[11].trim().charAt(0);
String assembly = toks[12];
if (assembly.equals("-")) {
assembly = null;
}
RefSeqID mature_peptide_accession_dot_version = null;
if (!toks[13].equals("-") && status != null) {
mature_peptide_accession_dot_version = new RefSeqID(toks[13]);
}
intStr = toks[14];
GiNumberID mature_peptide_gi = null;
if (!intStr.equals("-")) {
mature_peptide_gi = new GiNumberID(intStr);
}
String symbol = null;
if (!toks[15].equals("-")) {
symbol = toks[15];
}
return new NcbiGene2RefseqFileData(taxonID, geneID, status, RNA_nucleotide_accession_dot_version,
RNA_nucleotide_gi, protein_accession_dot_version, protein_gi,
genomic_nucleotide_accession_dot_version, genomic_nucleotide_gi,
start_position_on_the_genomic_accession, end_position_on_the_genomic_accession, orientation,
assembly, mature_peptide_accession_dot_version, mature_peptide_gi, symbol, line.getByteOffset(),
line.getLineNumber());
}
return null;
}
}
| |
/*
* oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.gluu.oxtrust.ws.rs.scim2;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.wordnik.swagger.annotations.*;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.gluu.oxtrust.ldap.service.IPersonService;
import org.gluu.oxtrust.ldap.service.PersonService;
import org.gluu.oxtrust.model.GluuCustomPerson;
import org.gluu.oxtrust.model.scim.ScimPerson;
import org.gluu.oxtrust.model.scim.ScimPersonPatch;
import org.gluu.oxtrust.model.scim.ScimPersonSearch;
import org.gluu.oxtrust.model.scim2.ListResponse;
import org.gluu.oxtrust.model.scim2.User;
import org.gluu.oxtrust.service.UmaAuthenticationService;
import org.gluu.oxtrust.util.CopyUtils;
import org.gluu.oxtrust.util.CopyUtils2;
import org.gluu.oxtrust.util.OxTrustConstants;
import org.gluu.oxtrust.util.Utils;
import org.gluu.oxtrust.ws.rs.scim.BaseScimWebService;
import org.gluu.oxtrust.ws.rs.scim.PATCH;
import org.gluu.site.ldap.persistence.exception.EntryPersistenceException;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.log.Log;
/**
* scim2UserEndpoint Implementation
*
* @author Rahat Ali Date: 05.08.2015
*/
@Name("scim2UserEndpoint")
@Path("/scim/v2/Users")
@Api(value = "/scim/v2/Users", description = "SCIM2 User Endpoint (https://tools.ietf.org/html/draft-ietf-scim-api-19#section-3.4.1)", authorizations = { @Authorization(value = "Authorization", type = "oauth2") })
@Produces({ "application/json", "application/xml" })
public class UserWebService extends BaseScimWebService {
@Logger
private Log log;
@In
private IPersonService personService;
@In
private UmaAuthenticationService umaAuthenticationService;
@GET
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@ApiOperation(value = "List Users", notes = "Returns a list of users (https://tools.ietf.org/html/draft-ietf-scim-api-19#section-3.4)", response = ListResponse.class)
public Response listUsers(@HeaderParam("Authorization") String authorization,
@QueryParam(OxTrustConstants.QUERY_PARAMETER_FILTER) final String filterString,
@QueryParam(OxTrustConstants.QUERY_PARAMETER_SORT_BY) final String sortBy,
@QueryParam(OxTrustConstants.QUERY_PARAMETER_SORT_ORDER) final String sortOrder) throws Exception {
personService = PersonService.instance();
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
try {
log.info(" getting a list of all users from LDAP ");
List<GluuCustomPerson> personList = personService.findAllPersons(null);
ListResponse personsListResponse = new ListResponse();
if (personList != null) {
log.info(" LDAP person list is not empty ");
for (GluuCustomPerson gluuPerson : personList) {
log.info(" copying person from GluuPerson to ScimPerson ");
User person = CopyUtils2.copy(gluuPerson, null);
log.info(" adding ScimPerson to the AllPersonList ");
log.info(" person to be added userid : " + person.getUserName());
personsListResponse.getResources().add(person);
log.info(" person added? : " + personsListResponse.getResources().contains(person));
}
}
List<String> schema = new ArrayList<String>();
schema.add("urn:ietf:params:scim:api:messages:2.0:ListResponse");
log.info("setting schema");
personsListResponse.setSchemas(schema);
personsListResponse.setTotalResults(personsListResponse.getResources().size());
URI location = new URI("/v2/Users/");
return Response.ok(personsListResponse).location(location).build();
} catch (Exception ex) {
log.error("Exception: ", ex);
return getErrorResponse("Unexpected processing error, please check the input parameters", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Path("{uid}")
@GET
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@ApiOperation(value = "Find User by id", notes = "Returns a Users on the basis of provided uid as path param (https://tools.ietf.org/html/draft-ietf-scim-api-19#section-3.4.2.1)", response = User.class)
public Response getUserByUid(@HeaderParam("Authorization") String authorization, @PathParam("uid") String uid) throws Exception {
personService = PersonService.instance();
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
try {
GluuCustomPerson gluuPerson = personService.getPersonByInum(uid);
if (gluuPerson == null) {
// sets HTTP status code 404 Not Found
return getErrorResponse("Resource " + uid + " not found", Response.Status.NOT_FOUND.getStatusCode());
}
User person = CopyUtils2.copy(gluuPerson, null);
URI location = new URI("/v2/Users/" + uid);
return Response.ok(person).location(location).build();
} catch (EntryPersistenceException ex) {
log.error("Exception: ", ex);
return getErrorResponse("Resource " + uid + " not found", Response.Status.NOT_FOUND.getStatusCode());
} catch (Exception ex) {
log.error("Exception: ", ex);
System.out.println("UserWebService Ex: " + ex);
return getErrorResponse("Unexpected processing error, please check the input parameters", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@POST
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@ApiOperation(value = "Create User", notes = "Create User (https://tools.ietf.org/html/draft-ietf-scim-api-19#section-3.3)", response = User.class)
public Response createUser(@HeaderParam("Authorization") String authorization, @ApiParam(value = "User", required = true) User person)
throws WebApplicationException, JsonGenerationException, JsonMappingException, IOException, Exception {
personService = PersonService.instance();
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
// Return HTTP response with status code 201 Created
log.debug(" copying gluuperson ");
GluuCustomPerson gluuPerson = CopyUtils2.copy(person, null, false);
if (gluuPerson == null) {
return getErrorResponse("Failed to create user", Response.Status.BAD_REQUEST.getStatusCode());
}
try {
log.debug(" generating inum ");
String inum = personService.generateInumForNewPerson(); // inumService.generateInums(Configuration.INUM_TYPE_PEOPLE_SLUG);
// //personService.generateInumForNewPerson();
log.debug(" getting DN ");
String dn = personService.getDnForPerson(inum);
log.debug(" getting iname ");
String iname = personService.generateInameForNewPerson(person.getUserName());
log.debug(" setting dn ");
gluuPerson.setDn(dn);
log.debug(" setting inum ");
gluuPerson.setInum(inum);
log.debug(" setting iname ");
gluuPerson.setIname(iname);
log.debug(" setting commonName ");
gluuPerson.setCommonName(gluuPerson.getGivenName() + " " + gluuPerson.getSurname());
log.info("gluuPerson.getMemberOf().size() : " + gluuPerson.getMemberOf().size());
if (person.getGroups().size() > 0) {
log.info(" jumping to groupMemebersAdder ");
log.info("gluuPerson.getDn() : " + gluuPerson.getDn());
Utils.groupMemebersAdder(gluuPerson, gluuPerson.getDn());
}
log.debug("adding new GluuPerson");
personService.addPerson(gluuPerson);
User newPerson = CopyUtils2.copy(gluuPerson, null);
String uri = "/v2/Users/" + newPerson.getId();
return Response.created(URI.create(uri)).entity(newPerson).build();
} catch (Exception ex) {
log.error("Failed to add user", ex);
return getErrorResponse("Unexpected processing error, please check the input parameters", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Path("{uid}")
@PUT
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@ApiOperation(value = "Update User", notes = "Update User (https://tools.ietf.org/html/draft-ietf-scim-api-19#section-3.3)", response = User.class)
public Response updateUser(@HeaderParam("Authorization") String authorization, @PathParam("uid") String uid,
@ApiParam(value = "User", required = true) User person) throws Exception {
personService = PersonService.instance();
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
try {
GluuCustomPerson gluuPerson = personService.getPersonByInum(uid);
if (gluuPerson == null) {
return getErrorResponse("Resource " + uid + " not found", Response.Status.NOT_FOUND.getStatusCode());
}
GluuCustomPerson newGluuPesron = CopyUtils2.copy(person, gluuPerson, true);
if (person.getGroups().size() > 0) {
Utils.groupMemebersAdder(newGluuPesron, personService.getDnForPerson(uid));
}
personService.updatePerson(newGluuPesron);
log.debug(" person updated ");
User newPerson = CopyUtils2.copy(newGluuPesron, null);
// person_update = CopyUtils.copy(gluuPerson, null, attributes);
URI location = new URI("/Users/" + uid);
return Response.ok(newPerson).location(location).build();
} catch (EntryPersistenceException ex) {
return getErrorResponse("Resource " + uid + " not found", Response.Status.NOT_FOUND.getStatusCode());
} catch (Exception ex) {
log.error("Exception: ", ex);
ex.printStackTrace();
return getErrorResponse("Unexpected processing error, please check the input parameters", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Path("{uid}")
@DELETE
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@ApiOperation(value = "Delete User", notes = "Delete User (https://tools.ietf.org/html/draft-ietf-scim-api-19#section-3.3)")
public Response deleteUser(@HeaderParam("Authorization") String authorization, @PathParam("uid") String uid) throws Exception {
personService = PersonService.instance();
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
try {
GluuCustomPerson person = personService.getPersonByInum(uid);
if (person == null) {
return getErrorResponse("Resource " + uid + " not found", Response.Status.NOT_FOUND.getStatusCode());
} else {
log.info("person.getMemberOf().size() : " + person.getMemberOf().size());
if (person.getMemberOf() != null) {
if (person.getMemberOf().size() > 0) {
String dn = personService.getDnForPerson(uid);
log.info("DN : " + dn);
Utils.deleteUserFromGroup(person, dn);
}
}
personService.removePerson(person);
}
return Response.ok().build();
} catch (EntryPersistenceException ex) {
log.error("Exception: ", ex);
return getErrorResponse("Resource " + uid + " not found", Response.Status.NOT_FOUND.getStatusCode());
} catch (Exception ex) {
log.error("Exception: ", ex);
return getErrorResponse("Unexpected processing error, please check the input parameters", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Path("{uid}")
@PATCH
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public Response updateUserPatch(@HeaderParam("Authorization") String authorization, @PathParam("uid") String uid, ScimPersonPatch person) throws Exception {
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
return null;
}
@Path("/Search")
@POST
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public Response personSearch(@HeaderParam("Authorization") String authorization, ScimPersonSearch searchPattern) throws Exception {
personService = PersonService.instance();
Response authorizationResponse = processAuthorization(authorization);
if (authorizationResponse != null) {
return authorizationResponse;
}
try {
GluuCustomPerson gluuPerson = personService.getPersonByAttribute(searchPattern.getAttribute(), searchPattern.getValue());
if (gluuPerson == null) {
// sets HTTP status code 404 Not Found
return getErrorResponse("No result found for search pattern '" + searchPattern.getAttribute() + " = " + searchPattern.getValue()
+ "' please try again or use another pattern.", Response.Status.NOT_FOUND.getStatusCode());
}
ScimPerson person = CopyUtils.copy(gluuPerson, null);
URI location = new URI("/Users/" + gluuPerson.getInum());
return Response.ok(person).location(location).build();
} catch (EntryPersistenceException ex) {
log.error("Exception: ", ex);
return getErrorResponse("Resource not found", Response.Status.NOT_FOUND.getStatusCode());
} catch (Exception ex) {
log.error("Exception: ", ex);
return getErrorResponse("Unexpected processing error, please check the input parameters", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
}
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.psi.util;
import com.intellij.openapi.util.Condition;
import com.intellij.psi.*;
import com.intellij.util.Processor;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
public final class InheritanceUtil {
private InheritanceUtil() { }
/**
* @param aClass a class to check.
* @param baseClass supposed base class.
* @param checkDeep true to check deeper than aClass.super (see {@linkplain PsiClass#isInheritor(PsiClass, boolean)}).
* @return true if aClass is the baseClass or baseClass inheritor
*/
public static boolean isInheritorOrSelf(@Nullable PsiClass aClass, @Nullable PsiClass baseClass, boolean checkDeep) {
if (aClass == null || baseClass == null) return false;
PsiManager manager = aClass.getManager();
return manager.areElementsEquivalent(baseClass, aClass) || aClass.isInheritor(baseClass, checkDeep);
}
public static boolean processSupers(@Nullable PsiClass aClass, boolean includeSelf, @NotNull Processor<? super PsiClass> superProcessor) {
if (aClass == null) return true;
if (includeSelf && !superProcessor.process(aClass)) return false;
return processSupers(aClass, superProcessor, new HashSet<>());
}
private static boolean processSupers(@NotNull PsiClass aClass, @NotNull Processor<? super PsiClass> superProcessor, @NotNull Set<? super PsiClass> visited) {
if (!visited.add(aClass)) return true;
for (final PsiClass intf : aClass.getInterfaces()) {
if (!superProcessor.process(intf) || !processSupers(intf, superProcessor, visited)) return false;
}
final PsiClass superClass = aClass.getSuperClass();
if (superClass != null) {
if (!superProcessor.process(superClass) || !processSupers(superClass, superProcessor, visited)) return false;
}
return true;
}
@Contract("null, _ -> false")
public static boolean isInheritor(@Nullable PsiType type, @NotNull @NonNls final String baseClassName) {
if (type instanceof PsiClassType) {
PsiUtil.ensureValidType(type);
return isInheritor(((PsiClassType)type).resolve(), baseClassName);
}
if (type instanceof PsiIntersectionType) {
for (PsiType conjunct : ((PsiIntersectionType)type).getConjuncts()) {
if (isInheritor(conjunct, baseClassName)) return true;
}
}
return false;
}
@Contract("null, _ -> false")
public static boolean isInheritor(@Nullable PsiClass psiClass, @NotNull @NonNls String baseClassName) {
return isInheritor(psiClass, false, baseClassName);
}
@Contract("null, _, _ -> false")
public static boolean isInheritor(@Nullable PsiClass psiClass, final boolean strict, @NotNull @NonNls String baseClassName) {
if (psiClass == null) {
return false;
}
final PsiClass base = JavaPsiFacade.getInstance(psiClass.getProject()).findClass(baseClassName, psiClass.getResolveScope());
if (base == null) {
return false;
}
return strict ? psiClass.isInheritor(base, true) : isInheritorOrSelf(psiClass, base, true);
}
/**
* Gets all superclasses. Classes are added to result in DFS order
* @param aClass
* @param results
* @param includeNonProject
*/
public static void getSuperClasses(@NotNull PsiClass aClass, @NotNull Set<? super PsiClass> results, boolean includeNonProject) {
getSuperClassesOfList(aClass.getSuperTypes(), results, includeNonProject, new HashSet<>(), aClass.getManager());
}
public static LinkedHashSet<PsiClass> getSuperClasses(@NotNull PsiClass aClass) {
LinkedHashSet<PsiClass> result = new LinkedHashSet<>();
getSuperClasses(aClass, result, true);
return result;
}
private static void getSuperClassesOfList(PsiClassType @NotNull [] types,
@NotNull Set<? super PsiClass> results,
boolean includeNonProject,
@NotNull Set<? super PsiClass> visited,
@NotNull PsiManager manager) {
for (PsiClassType type : types) {
PsiClass resolved = type.resolve();
if (resolved != null && visited.add(resolved)) {
if (includeNonProject || manager.isInProject(resolved)) {
results.add(resolved);
}
getSuperClassesOfList(resolved.getSuperTypes(), results, includeNonProject, visited, manager);
}
}
}
public static boolean hasEnclosingInstanceInScope(@NotNull PsiClass aClass,
PsiElement scope,
boolean isSuperClassAccepted,
boolean isTypeParamsAccepted) {
return hasEnclosingInstanceInScope(aClass, scope, psiClass -> isSuperClassAccepted, isTypeParamsAccepted);
}
public static boolean hasEnclosingInstanceInScope(@NotNull PsiClass aClass,
PsiElement scope,
@NotNull Condition<? super PsiClass> isSuperClassAccepted,
boolean isTypeParamsAccepted) {
return findEnclosingInstanceInScope(aClass, scope, isSuperClassAccepted, isTypeParamsAccepted) != null;
}
@Nullable
public static PsiClass findEnclosingInstanceInScope(@NotNull PsiClass aClass,
PsiElement scope,
@NotNull Condition<? super PsiClass> isSuperClassAccepted,
boolean isTypeParamsAccepted) {
PsiManager manager = aClass.getManager();
PsiElement place = scope;
while (place != null && !(place instanceof PsiFile)) {
if (place instanceof PsiClass) {
if (isSuperClassAccepted.value((PsiClass)place)) {
if (isInheritorOrSelf((PsiClass)place, aClass, true)) return (PsiClass)place;
}
else {
if (manager.areElementsEquivalent(place, aClass)) return aClass;
}
if (isTypeParamsAccepted && place instanceof PsiTypeParameter) {
return (PsiClass)place;
}
}
if (place instanceof PsiModifierListOwner) {
final PsiModifierList modifierList = ((PsiModifierListOwner)place).getModifierList();
if (modifierList != null && modifierList.hasModifierProperty(PsiModifier.STATIC)) {
return null;
}
}
place = place.getParent();
}
return null;
}
public static boolean processSuperTypes(@NotNull PsiType type, boolean includeSelf, @NotNull Processor<? super PsiType> processor) {
if (includeSelf && !processor.process(type)) return false;
return processSuperTypes(type, processor, new HashSet<>());
}
private static boolean processSuperTypes(PsiType type, Processor<? super PsiType> processor, Set<? super PsiType> visited) {
if (!visited.add(type)) return true;
for (PsiType superType : type.getSuperTypes()) {
if (!processor.process(superType)) return false;
processSuperTypes(superType, processor, visited);
}
return true;
}
@Nullable
private static PsiClass getCircularClass(@NotNull PsiClass aClass, @NotNull Collection<? super PsiClass> usedClasses) {
if (usedClasses.contains(aClass)) {
return aClass;
}
try {
usedClasses.add(aClass);
PsiClassType[] superTypes = aClass.getSuperTypes();
for (PsiClassType superType : superTypes) {
PsiClass circularClass = getCircularClassInner(superType.resolve(), usedClasses);
if (circularClass != null) return circularClass;
for (PsiAnnotation annotation : superType.getAnnotations()) {
circularClass = getCircularClassInner(annotation.resolveAnnotationType(), usedClasses);
if (circularClass != null) return circularClass;
}
}
}
finally {
usedClasses.remove(aClass);
}
return null;
}
@Nullable
private static PsiClass getCircularClassInner(@Nullable PsiElement superType,
@NotNull Collection<? super PsiClass> usedClasses) {
while (superType instanceof PsiClass) {
if (!CommonClassNames.JAVA_LANG_OBJECT.equals(((PsiClass)superType).getQualifiedName())) {
PsiClass circularClass = getCircularClass((PsiClass)superType, usedClasses);
if (circularClass != null) return circularClass;
}
// check class qualifier
superType = superType.getParent();
}
return null;
}
/**
* Detects a circular inheritance
* @param aClass a class to check
* @return a class which is a part of the inheritance loop; null if no circular inheritance was detected
*/
@Nullable
public static PsiClass getCircularClass(@NotNull PsiClass aClass) {
return getCircularClass(aClass, new HashSet<>());
}
}
| |
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.server;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EventObject;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.logging.Logger;
import com.vaadin.event.EventRouter;
import com.vaadin.event.MethodEventSource;
import com.vaadin.shared.communication.ClientRpc;
import com.vaadin.shared.communication.ServerRpc;
import com.vaadin.shared.communication.SharedState;
import com.vaadin.shared.ui.ComponentStateUtil;
import com.vaadin.ui.Component;
import com.vaadin.ui.Component.Event;
import com.vaadin.ui.HasComponents;
import com.vaadin.ui.LegacyComponent;
import com.vaadin.ui.UI;
import elemental.json.JsonObject;
/**
* An abstract base class for ClientConnector implementations. This class
* provides all the basic functionality required for connectors.
*
* @author Vaadin Ltd
* @since 7.0.0
*/
public abstract class AbstractClientConnector implements ClientConnector,
MethodEventSource {
/**
* A map from client to server RPC interface class name to the RPC call
* manager that handles incoming RPC calls for that interface.
*/
private Map<String, ServerRpcManager<?>> rpcManagerMap = new HashMap<String, ServerRpcManager<?>>();
/**
* A map from server to client RPC interface class to the RPC proxy that
* sends ourgoing RPC calls for that interface.
*/
private Map<Class<?>, ClientRpc> rpcProxyMap = new HashMap<Class<?>, ClientRpc>();
/**
* Shared state object to be communicated from the server to the client when
* modified.
*/
private SharedState sharedState;
private Class<? extends SharedState> stateType;
/**
* Pending RPC method invocations to be sent.
*/
private ArrayList<ClientMethodInvocation> pendingInvocations = new ArrayList<ClientMethodInvocation>();
private String connectorId;
private ArrayList<Extension> extensions = new ArrayList<Extension>();
/**
* The EventRouter used for the event model.
*/
private EventRouter eventRouter = null;
private ErrorHandler errorHandler = null;
@Override
public void addAttachListener(AttachListener listener) {
addListener(AttachEvent.ATTACH_EVENT_IDENTIFIER, AttachEvent.class,
listener, AttachListener.attachMethod);
}
@Override
public void removeAttachListener(AttachListener listener) {
removeListener(AttachEvent.ATTACH_EVENT_IDENTIFIER, AttachEvent.class,
listener);
}
@Override
public void addDetachListener(DetachListener listener) {
addListener(DetachEvent.DETACH_EVENT_IDENTIFIER, DetachEvent.class,
listener, DetachListener.detachMethod);
}
@Override
public void removeDetachListener(DetachListener listener) {
removeListener(DetachEvent.DETACH_EVENT_IDENTIFIER, DetachEvent.class,
listener);
}
/**
* @deprecated As of 7.0, use {@link #markAsDirty()} instead. Note that you
* typically do not need to call {@link #markAsDirty()} as
* {@link #getState()} will mark the connector dirty and the
* framework will then check what, if anything, needs to be sent
* to the client. {@link LegacyComponent}s which rely on paint
* might still need to call this or {@link #markAsDirty()} .
*/
@Deprecated
@Override
public void requestRepaint() {
markAsDirty();
}
/* Documentation copied from interface */
@Override
public void markAsDirty() {
assert getSession() == null || getSession().hasLock() : buildLockAssertMessage("markAsDirty()");
UI uI = getUI();
if (uI != null) {
uI.getConnectorTracker().markDirty(this);
}
}
private String buildLockAssertMessage(String method) {
if (VaadinService.isOtherSessionLocked(getSession())) {
return "The session of this connecor is not locked, but there is another session that is locked. "
+ "This might be caused by accidentally using a connector that belongs to another session.";
} else {
return "Session must be locked when " + method + " is called";
}
}
/**
* Registers an RPC interface implementation for this component.
*
* A component can listen to multiple RPC interfaces, and subclasses can
* register additional implementations.
*
* @since 7.0
*
* @param implementation
* RPC interface implementation
* @param rpcInterfaceType
* RPC interface class for which the implementation should be
* registered
*/
protected <T extends ServerRpc> void registerRpc(T implementation,
Class<T> rpcInterfaceType) {
rpcManagerMap.put(rpcInterfaceType.getName(), new ServerRpcManager<T>(
implementation, rpcInterfaceType));
}
/**
* Registers an RPC interface implementation for this component.
*
* A component can listen to multiple RPC interfaces, and subclasses can
* register additional implementations.
*
* @since 7.0
*
* @param implementation
* RPC interface implementation. Also used to deduce the type.
*/
protected <T extends ServerRpc> void registerRpc(T implementation) {
// Search upwards until an interface is found. It must be found as T
// extends ServerRpc
Class<?> cls = implementation.getClass();
Class<ServerRpc> serverRpcClass = getServerRpcInterface(cls);
while (cls != null && serverRpcClass == null) {
cls = cls.getSuperclass();
serverRpcClass = getServerRpcInterface(cls);
}
if (serverRpcClass == null) {
throw new RuntimeException(
"No interface T extends ServerRpc found in the class hierarchy.");
}
registerRpc(implementation, serverRpcClass);
}
@SuppressWarnings("unchecked")
private Class<ServerRpc> getServerRpcInterface(Class<?> implementationClass) {
Class<ServerRpc> serverRpcClass = null;
if (implementationClass != null) {
for (Class<?> candidateInterface : implementationClass
.getInterfaces()) {
if (ServerRpc.class.isAssignableFrom(candidateInterface)) {
if (serverRpcClass != null) {
throw new RuntimeException(
"Use registerRpc(T implementation, Class<T> rpcInterfaceType) if the Rpc implementation implements more than one interface");
}
serverRpcClass = (Class<ServerRpc>) candidateInterface;
}
}
}
return serverRpcClass;
}
/**
* Returns the shared state for this connector. The shared state object is
* shared between the server connector and the client connector. Changes are
* only communicated from the server to the client and not in the other
* direction.
* <p>
* As a side effect, marks the connector dirty so any changes done to the
* state will be sent to the client. Use {@code getState(false)} to avoid
* marking the connector as dirty.
* </p>
*
* @return The shared state for this connector. Never null.
*/
protected SharedState getState() {
return getState(true);
}
/**
* Returns the shared state for this connector.
*
* @param markAsDirty
* true if the connector should automatically be marked dirty,
* false otherwise
*
* @return The shared state for this connector. Never null.
* @see #getState()
*/
protected SharedState getState(boolean markAsDirty) {
assert getSession() == null || getSession().hasLock() : buildLockAssertMessage("getState()");
if (null == sharedState) {
sharedState = createState();
}
if (markAsDirty) {
UI ui = getUI();
if (ui != null && !ui.getConnectorTracker().isDirty(this)
&& !ui.getConnectorTracker().isWritingResponse()) {
ui.getConnectorTracker().markDirty(this);
}
}
return sharedState;
}
@Override
public JsonObject encodeState() {
return LegacyCommunicationManager.encodeState(this, getState(false));
}
/**
* Creates the shared state bean to be used in server to client
* communication.
* <p>
* By default a state object of the defined return type of
* {@link #getState()} is created. Subclasses can override this method and
* return a new instance of the correct state class but this should rarely
* be necessary.
* </p>
* <p>
* No configuration of the values of the state should be performed in
* {@link #createState()}.
*
* @since 7.0
*
* @return new shared state object
*/
protected SharedState createState() {
try {
return getStateType().newInstance();
} catch (Exception e) {
throw new RuntimeException(
"Error creating state of type " + getStateType().getName()
+ " for " + getClass().getName(), e);
}
}
@Override
public Class<? extends SharedState> getStateType() {
// Lazy load because finding type can be expensive because of the
// exceptions flying around
if (stateType == null) {
stateType = findStateType();
}
return stateType;
}
private Class<? extends SharedState> findStateType() {
try {
Class<?> class1 = getClass();
while (class1 != null) {
try {
Method m = class1.getDeclaredMethod("getState",
(Class[]) null);
Class<?> type = m.getReturnType();
if (!m.isSynthetic()) {
return type.asSubclass(SharedState.class);
}
} catch (NoSuchMethodException nsme) {
}
// Try in superclass instead
class1 = class1.getSuperclass();
}
throw new NoSuchMethodException(getClass().getCanonicalName()
+ ".getState()");
} catch (Exception e) {
throw new RuntimeException("Error finding state type for "
+ getClass().getName(), e);
}
}
/**
* Returns an RPC proxy for a given server to client RPC interface for this
* component.
*
* TODO more javadoc, subclasses, ...
*
* @param rpcInterface
* RPC interface type
*
* @since 7.0
*/
protected <T extends ClientRpc> T getRpcProxy(final Class<T> rpcInterface) {
// create, initialize and return a dynamic proxy for RPC
try {
if (!rpcProxyMap.containsKey(rpcInterface)) {
Class<?> proxyClass = Proxy.getProxyClass(
rpcInterface.getClassLoader(), rpcInterface);
Constructor<?> constructor = proxyClass
.getConstructor(InvocationHandler.class);
T rpcProxy = rpcInterface.cast(constructor
.newInstance(new RpcInvocationHandler(rpcInterface)));
// cache the proxy
rpcProxyMap.put(rpcInterface, rpcProxy);
}
return (T) rpcProxyMap.get(rpcInterface);
} catch (Exception e) {
// TODO exception handling?
throw new RuntimeException(e);
}
}
private class RpcInvocationHandler implements InvocationHandler,
Serializable {
private String rpcInterfaceName;
public RpcInvocationHandler(Class<?> rpcInterface) {
rpcInterfaceName = rpcInterface.getName().replaceAll("\\$", ".");
}
@Override
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
if (method.getDeclaringClass() == Object.class) {
// Don't add Object methods such as toString and hashCode as
// invocations
return method.invoke(this, args);
}
addMethodInvocationToQueue(rpcInterfaceName, method, args);
return null;
}
}
/**
* For internal use: adds a method invocation to the pending RPC call queue.
*
* @param interfaceName
* RPC interface name
* @param method
* RPC method
* @param parameters
* RPC all parameters
*
* @since 7.0
*/
protected void addMethodInvocationToQueue(String interfaceName,
Method method, Object[] parameters) {
// add to queue
pendingInvocations.add(new ClientMethodInvocation(this, interfaceName,
method, parameters));
// TODO no need to do full repaint if only RPC calls
requestRepaint();
}
@Override
public ServerRpcManager<?> getRpcManager(String rpcInterfaceName) {
return rpcManagerMap.get(rpcInterfaceName);
}
@Override
public List<ClientMethodInvocation> retrievePendingRpcCalls() {
if (pendingInvocations.isEmpty()) {
return Collections.emptyList();
} else {
List<ClientMethodInvocation> result = pendingInvocations;
pendingInvocations = new ArrayList<ClientMethodInvocation>();
return Collections.unmodifiableList(result);
}
}
@Override
public String getConnectorId() {
if (connectorId == null) {
if (getSession() == null) {
throw new RuntimeException(
"Component must be attached to a session when getConnectorId() is called for the first time");
}
connectorId = getSession().createConnectorId(this);
}
return connectorId;
}
/**
* Finds the {@link VaadinSession} to which this connector belongs. If the
* connector has not been attached, <code>null</code> is returned.
*
* @return The connector's session, or <code>null</code> if not attached
*/
protected VaadinSession getSession() {
UI uI = getUI();
if (uI == null) {
return null;
} else {
return uI.getSession();
}
}
/**
* Finds a UI ancestor of this connector. <code>null</code> is returned if
* no UI ancestor is found (typically because the connector is not attached
* to a proper hierarchy).
*
* @return the UI ancestor of this connector, or <code>null</code> if none
* is found.
*/
@Override
public UI getUI() {
ClientConnector connector = this;
while (connector != null) {
if (connector instanceof UI) {
return (UI) connector;
}
connector = connector.getParent();
}
return null;
}
private static Logger getLogger() {
return Logger.getLogger(AbstractClientConnector.class.getName());
}
/**
* @deprecated As of 7.0, use {@link #markAsDirtyRecursive()} instead
*/
@Override
@Deprecated
public void requestRepaintAll() {
markAsDirtyRecursive();
}
@Override
public void markAsDirtyRecursive() {
markAsDirty();
for (ClientConnector connector : getAllChildrenIterable(this)) {
connector.markAsDirtyRecursive();
}
}
/**
* Get an Iterable for iterating over all child connectors, including both
* extensions and child components.
*
* @param connector
* the connector to get children for
* @return an Iterable giving all child connectors.
*/
public static Iterable<? extends ClientConnector> getAllChildrenIterable(
final ClientConnector connector) {
Collection<Extension> extensions = connector.getExtensions();
boolean hasComponents = connector instanceof HasComponents;
boolean hasExtensions = extensions.size() > 0;
if (!hasComponents && !hasExtensions) {
// If has neither component nor extensions, return immutable empty
// list as iterable.
return Collections.emptyList();
}
if (hasComponents && !hasExtensions) {
// only components
return (HasComponents) connector;
}
if (!hasComponents && hasExtensions) {
// only extensions
return extensions;
}
// combine the iterators of extensions and components to a new iterable.
final Iterator<Component> componentsIterator = ((HasComponents) connector)
.iterator();
final Iterator<Extension> extensionsIterator = extensions.iterator();
Iterable<? extends ClientConnector> combinedIterable = new Iterable<ClientConnector>() {
@Override
public Iterator<ClientConnector> iterator() {
return new Iterator<ClientConnector>() {
@Override
public boolean hasNext() {
return componentsIterator.hasNext()
|| extensionsIterator.hasNext();
}
@Override
public ClientConnector next() {
if (componentsIterator.hasNext()) {
return componentsIterator.next();
}
if (extensionsIterator.hasNext()) {
return extensionsIterator.next();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
return combinedIterable;
}
@Override
public Collection<Extension> getExtensions() {
return Collections.unmodifiableCollection(extensions);
}
/**
* Add an extension to this connector. This method is protected to allow
* extensions to select which targets they can extend.
*
* @param extension
* the extension to add
*/
protected void addExtension(Extension extension) {
ClientConnector previousParent = extension.getParent();
if (equals(previousParent)) {
// Nothing to do, already attached
return;
} else if (previousParent != null) {
throw new IllegalStateException(
"Moving an extension from one parent to another is not supported");
}
extensions.add(extension);
extension.setParent(this);
markAsDirty();
}
@Override
public void removeExtension(Extension extension) {
extension.setParent(null);
extensions.remove(extension);
markAsDirty();
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.ClientConnector#isAttached()
*/
@Override
public boolean isAttached() {
return getSession() != null;
}
@Override
public void attach() {
markAsDirty();
getUI().getConnectorTracker().registerConnector(this);
fireEvent(new AttachEvent(this));
for (ClientConnector connector : getAllChildrenIterable(this)) {
connector.attach();
}
}
/**
* {@inheritDoc}
*
* <p>
* The {@link #getSession()} and {@link #getUI()} methods might return
* <code>null</code> after this method is called.
* </p>
*/
@Override
public void detach() {
for (ClientConnector connector : getAllChildrenIterable(this)) {
connector.detach();
}
fireEvent(new DetachEvent(this));
getUI().getConnectorTracker().unregisterConnector(this);
}
@Override
public boolean isConnectorEnabled() {
if (getParent() == null) {
// No parent -> the component cannot receive updates from the client
return false;
} else {
return getParent().isConnectorEnabled();
}
}
@Override
public void beforeClientResponse(boolean initial) {
// Do nothing by default
}
@Override
public boolean handleConnectorRequest(VaadinRequest request,
VaadinResponse response, String path) throws IOException {
DownloadStream stream = null;
String[] parts = path.split("/", 2);
String key = parts[0];
VaadinSession session = getSession();
session.lock();
try {
ConnectorResource resource = (ConnectorResource) getResource(key);
if (resource == null) {
return false;
}
stream = resource.getStream();
} finally {
session.unlock();
}
stream.writeResponse(request, response);
return true;
}
/**
* Gets a resource defined using {@link #setResource(String, Resource)} with
* the corresponding key.
*
* @param key
* the string identifier of the resource
* @return a resource, or <code>null</code> if there's no resource
* associated with the given key
*
* @see #setResource(String, Resource)
*/
protected Resource getResource(String key) {
return ResourceReference
.getResource(getState(false).resources.get(key));
}
/**
* Registers a resource with this connector using the given key. This will
* make the URL for retrieving the resource available to the client-side
* connector using
* {@link com.vaadin.terminal.gwt.client.ui.AbstractConnector#getResourceUrl(String)}
* with the same key.
*
* @param key
* the string key to associate the resource with
* @param resource
* the resource to set, or <code>null</code> to clear a previous
* association.
*/
protected void setResource(String key, Resource resource) {
ResourceReference resourceReference = ResourceReference.create(
resource, this, key);
if (resourceReference == null) {
getState().resources.remove(key);
} else {
getState().resources.put(key, resourceReference);
}
}
/* Listener code starts. Should be refactored. */
/**
* <p>
* Registers a new listener with the specified activation method to listen
* events generated by this component. If the activation method does not
* have any arguments the event object will not be passed to it when it's
* called.
* </p>
*
* <p>
* This method additionally informs the event-api to route events with the
* given eventIdentifier to the components handleEvent function call.
* </p>
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* @param eventIdentifier
* the identifier of the event to listen for
* @param eventType
* the type of the listened event. Events of this type or its
* subclasses activate the listener.
* @param target
* the object instance who owns the activation method.
* @param method
* the activation method.
*
* @since 6.2
*/
protected void addListener(String eventIdentifier, Class<?> eventType,
Object target, Method method) {
if (eventRouter == null) {
eventRouter = new EventRouter();
}
boolean needRepaint = !eventRouter.hasListeners(eventType);
eventRouter.addListener(eventType, target, method);
if (needRepaint) {
ComponentStateUtil.addRegisteredEventListener(getState(),
eventIdentifier);
}
}
/**
* Checks if the given {@link Event} type is listened for this component.
*
* @param eventType
* the event type to be checked
* @return true if a listener is registered for the given event type
*/
protected boolean hasListeners(Class<?> eventType) {
return eventRouter != null && eventRouter.hasListeners(eventType);
}
/**
* Removes all registered listeners matching the given parameters. Since
* this method receives the event type and the listener object as
* parameters, it will unregister all <code>object</code>'s methods that are
* registered to listen to events of type <code>eventType</code> generated
* by this component.
*
* <p>
* This method additionally informs the event-api to stop routing events
* with the given eventIdentifier to the components handleEvent function
* call.
* </p>
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* @param eventIdentifier
* the identifier of the event to stop listening for
* @param eventType
* the exact event type the <code>object</code> listens to.
* @param target
* the target object that has registered to listen to events of
* type <code>eventType</code> with one or more methods.
*
* @since 6.2
*/
protected void removeListener(String eventIdentifier, Class<?> eventType,
Object target) {
if (eventRouter != null) {
eventRouter.removeListener(eventType, target);
if (!eventRouter.hasListeners(eventType)) {
ComponentStateUtil.removeRegisteredEventListener(getState(),
eventIdentifier);
}
}
}
/**
* <p>
* Registers a new listener with the specified activation method to listen
* events generated by this component. If the activation method does not
* have any arguments the event object will not be passed to it when it's
* called.
* </p>
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* @param eventType
* the type of the listened event. Events of this type or its
* subclasses activate the listener.
* @param target
* the object instance who owns the activation method.
* @param method
* the activation method.
*
*/
@Override
public void addListener(Class<?> eventType, Object target, Method method) {
if (eventRouter == null) {
eventRouter = new EventRouter();
}
eventRouter.addListener(eventType, target, method);
}
/**
* <p>
* Convenience method for registering a new listener with the specified
* activation method to listen events generated by this component. If the
* activation method does not have any arguments the event object will not
* be passed to it when it's called.
* </p>
*
* <p>
* This version of <code>addListener</code> gets the name of the activation
* method as a parameter. The actual method is reflected from
* <code>object</code>, and unless exactly one match is found,
* <code>java.lang.IllegalArgumentException</code> is thrown.
* </p>
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* <p>
* Note: Using this method is discouraged because it cannot be checked
* during compilation. Use {@link #addListener(Class, Object, Method)} or
* {@link #addListener(com.vaadin.ui.Component.Listener)} instead.
* </p>
*
* @param eventType
* the type of the listened event. Events of this type or its
* subclasses activate the listener.
* @param target
* the object instance who owns the activation method.
* @param methodName
* the name of the activation method.
* @deprecated As of 7.0. This method should be avoided. Use
* {@link #addListener(Class, Object, Method)} or
* {@link #addListener(String, Class, Object, Method)} instead.
*/
@Override
@Deprecated
public void addListener(Class<?> eventType, Object target, String methodName) {
if (eventRouter == null) {
eventRouter = new EventRouter();
}
eventRouter.addListener(eventType, target, methodName);
}
/**
* Removes all registered listeners matching the given parameters. Since
* this method receives the event type and the listener object as
* parameters, it will unregister all <code>object</code>'s methods that are
* registered to listen to events of type <code>eventType</code> generated
* by this component.
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* @param eventType
* the exact event type the <code>object</code> listens to.
* @param target
* the target object that has registered to listen to events of
* type <code>eventType</code> with one or more methods.
*/
@Override
public void removeListener(Class<?> eventType, Object target) {
if (eventRouter != null) {
eventRouter.removeListener(eventType, target);
}
}
/**
* Removes one registered listener method. The given method owned by the
* given object will no longer be called when the specified events are
* generated by this component.
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* @param eventType
* the exact event type the <code>object</code> listens to.
* @param target
* target object that has registered to listen to events of type
* <code>eventType</code> with one or more methods.
* @param method
* the method owned by <code>target</code> that's registered to
* listen to events of type <code>eventType</code>.
*/
@Override
public void removeListener(Class<?> eventType, Object target, Method method) {
if (eventRouter != null) {
eventRouter.removeListener(eventType, target, method);
}
}
/**
* <p>
* Removes one registered listener method. The given method owned by the
* given object will no longer be called when the specified events are
* generated by this component.
* </p>
*
* <p>
* This version of <code>removeListener</code> gets the name of the
* activation method as a parameter. The actual method is reflected from
* <code>target</code>, and unless exactly one match is found,
* <code>java.lang.IllegalArgumentException</code> is thrown.
* </p>
*
* <p>
* For more information on the inheritable event mechanism see the
* {@link com.vaadin.event com.vaadin.event package documentation}.
* </p>
*
* @param eventType
* the exact event type the <code>object</code> listens to.
* @param target
* the target object that has registered to listen to events of
* type <code>eventType</code> with one or more methods.
* @param methodName
* the name of the method owned by <code>target</code> that's
* registered to listen to events of type <code>eventType</code>.
* @deprecated As of 7.0. This method should be avoided. Use
* {@link #removeListener(Class, Object, Method)} instead.
*/
@Deprecated
@Override
public void removeListener(Class<?> eventType, Object target,
String methodName) {
if (eventRouter != null) {
eventRouter.removeListener(eventType, target, methodName);
}
}
/**
* Returns all listeners that are registered for the given event type or one
* of its subclasses.
*
* @param eventType
* The type of event to return listeners for.
* @return A collection with all registered listeners. Empty if no listeners
* are found.
*/
public Collection<?> getListeners(Class<?> eventType) {
if (eventRouter == null) {
return Collections.EMPTY_LIST;
}
return eventRouter.getListeners(eventType);
}
/**
* Sends the event to all listeners.
*
* @param event
* the Event to be sent to all listeners.
*/
protected void fireEvent(EventObject event) {
if (eventRouter != null) {
eventRouter.fireEvent(event);
}
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.ClientConnector#getErrorHandler()
*/
@Override
public ErrorHandler getErrorHandler() {
return errorHandler;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.ClientConnector#setErrorHandler(com.vaadin.server.
* ErrorHandler)
*/
@Override
public void setErrorHandler(ErrorHandler errorHandler) {
this.errorHandler = errorHandler;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
/*
* This equals method must return true when we're comparing an object to
* its proxy. This happens a lot with CDI (and possibly Spring) when
* we're injecting Components. See #14639
*/
if (obj instanceof AbstractClientConnector) {
AbstractClientConnector connector = (AbstractClientConnector) obj;
return connector.isThis(this);
}
return false;
}
/**
* For internal use only, may be changed or removed in future versions.
* <p>
* This method must be protected, because otherwise it will not be redefined
* by the proxy to actually be called on the underlying instance.
* <p>
* See #14639
*
* @deprecated only defined for framework hacks, do not use.
*/
@Deprecated
protected boolean isThis(Object that) {
return this == that;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return super.hashCode();
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.firstrun;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.firstrun.ImageCarousel.ImageCarouselPositionChangeListener;
import org.chromium.chrome.browser.profiles.ProfileDownloader;
import org.chromium.chrome.browser.signin.SigninManager;
import org.chromium.chrome.browser.widget.ButtonCompat;
import org.chromium.sync.signin.AccountManagerHelper;
import java.util.List;
/**
* This view allows the user to select an account to log in to, add an account,
* cancel account selection, etc. Users of this class should
* {@link AccountFirstRunView#setListener(Listener)} after the view has been
* inflated.
*/
public class AccountFirstRunView extends FrameLayout
implements ImageCarouselPositionChangeListener, ProfileDownloader.Observer {
/**
* Callbacks for various account selection events.
*/
public interface Listener {
/**
* The user selected an account.
* @param accountName The name of the account
*/
public void onAccountSelectionConfirmed(String accountName);
/**
* The user canceled account selection.
*/
public void onAccountSelectionCanceled();
/**
* The user wants to make a new account.
*/
public void onNewAccount();
/**
* The user has been signed in and pressed 'Done' button.
* @param accountName The name of the account
*/
public void onSigningInCompleted(String accountName);
/**
* The user has signed in and pressed 'Settings' button.
* @param accountName The name of the account
*/
public void onSettingsButtonClicked(String accountName);
/**
* Failed to set the forced account because it wasn't found.
* @param forcedAccountName The name of the forced-sign-in account
*/
public void onFailedToSetForcedAccount(String forcedAccountName);
}
private class SpinnerOnItemSelectedListener implements AdapterView.OnItemSelectedListener {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
String accountName = parent.getItemAtPosition(pos).toString();
if (accountName.equals(mAddAnotherAccount)) {
// Don't allow "add account" to remain selected. http://crbug.com/421052
int oldPosition = mArrayAdapter.getPosition(mAccountName);
if (oldPosition == -1) oldPosition = 0;
mSpinner.setSelection(oldPosition, false);
mListener.onNewAccount();
} else {
mAccountName = accountName;
if (!mPositionSetProgrammatically) mImageCarousel.scrollTo(pos, false, false);
mPositionSetProgrammatically = false;
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
mAccountName = parent.getItemAtPosition(0).toString();
}
}
private static final int EXPERIMENT_TITLE_VARIANT_MASK = 1;
private static final int EXPERIMENT_SUMMARY_VARIANT_MASK = 2;
private static final int EXPERIMENT_LAYOUT_VARIANT_MASK = 4;
private static final int EXPERIMENT_MAX_VALUE = 7;
private AccountManagerHelper mAccountManagerHelper;
private List<String> mAccountNames;
private ArrayAdapter<CharSequence> mArrayAdapter;
private ImageCarousel mImageCarousel;
private Button mPositiveButton;
private Button mNegativeButton;
private TextView mDescriptionText;
private Listener mListener;
private Spinner mSpinner;
private String mForcedAccountName;
private String mAccountName;
private String mAddAnotherAccount;
private ProfileDataCache mProfileData;
private boolean mSignedIn;
private boolean mPositionSetProgrammatically;
private int mDescriptionTextId;
private boolean mIsChildAccount;
private boolean mHorizontalModeEnabled = true;
public AccountFirstRunView(Context context, AttributeSet attrs) {
super(context, attrs);
}
/**
* Initializes this view with profile images and full names.
* @param profileData ProfileDataCache that will be used to call to retrieve user account info.
*/
public void init(ProfileDataCache profileData) {
setProfileDataCache(profileData);
}
/**
* Sets the profile data cache.
* @param profileData ProfileDataCache that will be used to call to retrieve user account info.
*/
public void setProfileDataCache(ProfileDataCache profileData) {
mProfileData = profileData;
mProfileData.setObserver(this);
updateProfileImages();
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mImageCarousel = (ImageCarousel) findViewById(R.id.image_slider);
mImageCarousel.setListener(this);
mPositiveButton = (Button) findViewById(R.id.positive_button);
mNegativeButton = (Button) findViewById(R.id.negative_button);
mNegativeButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
setButtonsEnabled(false);
mListener.onAccountSelectionCanceled();
}
});
// A workaround for Android support library ignoring padding set in XML. b/20307607
int padding = getResources().getDimensionPixelSize(R.dimen.fre_button_padding);
ApiCompatibilityUtils.setPaddingRelative(mPositiveButton, padding, 0, padding, 0);
ApiCompatibilityUtils.setPaddingRelative(mNegativeButton, padding, 0, padding, 0);
mDescriptionText = (TextView) findViewById(R.id.description);
mDescriptionTextId = R.string.fre_account_choice_description;
mAddAnotherAccount = getResources().getString(R.string.fre_add_account);
mSpinner = (Spinner) findViewById(R.id.google_accounts_spinner);
mArrayAdapter = new ArrayAdapter<CharSequence>(
getContext().getApplicationContext(), R.layout.fre_spinner_text);
updateAccounts();
mArrayAdapter.setDropDownViewResource(R.layout.fre_spinner_dropdown);
mSpinner.setAdapter(mArrayAdapter);
mSpinner.setOnItemSelectedListener(new SpinnerOnItemSelectedListener());
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
updateAccounts();
}
@Override
public void onWindowVisibilityChanged(int visibility) {
super.onWindowVisibilityChanged(visibility);
if (visibility == View.VISIBLE) {
updateAccounts();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// This assumes that view's layout_width is set to match_parent.
assert MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.EXACTLY;
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
LinearLayout content = (LinearLayout) findViewById(R.id.fre_content);
int paddingStart = 0;
if (mHorizontalModeEnabled
&& width >= 2 * getResources().getDimension(R.dimen.fre_image_carousel_width)
&& width > height) {
content.setOrientation(LinearLayout.HORIZONTAL);
paddingStart = getResources().getDimensionPixelSize(R.dimen.fre_margin);
} else {
content.setOrientation(LinearLayout.VERTICAL);
}
ApiCompatibilityUtils.setPaddingRelative(content,
paddingStart,
content.getPaddingTop(),
ApiCompatibilityUtils.getPaddingEnd(content),
content.getPaddingBottom());
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
/**
* Changes the visuals slightly for when this view appears in the recent tabs page instead of
* in first run. For example, the title text is changed as well as the button style.
*/
public void configureForRecentTabsPage() {
mHorizontalModeEnabled = false;
setBackgroundResource(R.color.ntp_bg);
TextView title = (TextView) findViewById(R.id.title);
title.setText(R.string.sign_in_to_chrome);
// Remove the border above the button, swap in a new button with a blue material background,
// and center the button.
View buttonBarSeparator = findViewById(R.id.button_bar_separator);
buttonBarSeparator.setVisibility(View.GONE);
LinearLayout buttonContainer = (LinearLayout) findViewById(R.id.button_bar);
buttonContainer.setGravity(Gravity.CENTER_HORIZONTAL);
setPadding(0, 0, 0, getResources().getDimensionPixelOffset(
R.dimen.sign_in_promo_padding_bottom));
ButtonCompat positiveButton = new ButtonCompat(getContext(),
getResources().getColor(R.color.light_active_color));
positiveButton.setTextColor(Color.WHITE);
positiveButton.setLayoutParams(new LinearLayout.LayoutParams(
LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
buttonContainer.removeView(mPositiveButton);
buttonContainer.addView(positiveButton);
mPositiveButton = positiveButton;
}
/**
* Changes the visuals slightly for when this view is shown in a subsequent run after user adds
* a Google account to the device.
*/
public void configureForAddAccountPromo() {
int experimentGroup = SigninManager.getAndroidSigninPromoExperimentGroup();
assert experimentGroup >= 0 && experimentGroup <= EXPERIMENT_MAX_VALUE;
TextView title = (TextView) findViewById(R.id.title);
if ((experimentGroup & EXPERIMENT_TITLE_VARIANT_MASK) != 0) {
title.setText(R.string.make_chrome_yours);
}
mDescriptionTextId = (experimentGroup & EXPERIMENT_SUMMARY_VARIANT_MASK) != 0
? R.string.sign_in_to_chrome_summary_variant : R.string.sign_in_to_chrome_summary;
if ((experimentGroup & EXPERIMENT_LAYOUT_VARIANT_MASK) != 0) {
mImageCarousel.setVisibility(GONE);
ImageView illustrationView = new ImageView(getContext());
illustrationView.setImageResource(R.drawable.signin_promo_illustration);
illustrationView.setBackgroundColor(getResources().getColor(
R.color.illustration_background_color));
LinearLayout linearLayout = (LinearLayout) findViewById(R.id.fre_account_linear_layout);
linearLayout.addView(illustrationView, 0);
}
}
/**
* Enable or disable UI elements so the user can't select an account, cancel, etc.
*
* @param enabled The state to change to.
*/
public void setButtonsEnabled(boolean enabled) {
mPositiveButton.setEnabled(enabled);
mNegativeButton.setEnabled(enabled);
}
/**
* Set the account selection event listener. See {@link Listener}
*
* @param listener The listener.
*/
public void setListener(Listener listener) {
mListener = listener;
}
/**
* Tell the view whether or not the user can cancel account selection. In
* wizards, it makes sense to allow the user to skip account selection.
* However, in other settings-type contexts it does not make sense to allow
* this.
*
* @param canCancel Whether or not account selection can be canceled.
*/
public void setCanCancel(boolean canCancel) {
mNegativeButton.setVisibility(canCancel ? View.VISIBLE : View.GONE);
mPositiveButton.setGravity(
canCancel ? Gravity.END | Gravity.CENTER_VERTICAL : Gravity.CENTER);
}
/**
* Refresh the list of available system account.
*/
private void updateAccounts() {
if (mSignedIn) return;
setButtonsEnabled(true);
mAccountManagerHelper = AccountManagerHelper.get(getContext().getApplicationContext());
List<String> oldAccountNames = mAccountNames;
mAccountNames = mAccountManagerHelper.getGoogleAccountNames();
int accountToSelect = 0;
if (mForcedAccountName != null) {
accountToSelect = mAccountNames.indexOf(mForcedAccountName);
if (accountToSelect < 0) {
mListener.onFailedToSetForcedAccount(mForcedAccountName);
return;
}
} else {
accountToSelect = getIndexOfNewElement(
oldAccountNames, mAccountNames, mSpinner.getSelectedItemPosition());
}
mArrayAdapter.clear();
if (!mAccountNames.isEmpty()) {
mSpinner.setVisibility(View.VISIBLE);
mArrayAdapter.addAll(mAccountNames);
mArrayAdapter.add(mAddAnotherAccount);
mPositiveButton.setText(R.string.choose_account_sign_in);
mPositiveButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mListener.onAccountSelectionConfirmed(mAccountName);
}
});
mDescriptionText.setText(mDescriptionTextId);
} else {
mSpinner.setVisibility(View.GONE);
mArrayAdapter.add(mAddAnotherAccount);
mPositiveButton.setText(R.string.fre_no_accounts);
mPositiveButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mListener.onNewAccount();
}
});
mDescriptionText.setText(R.string.fre_no_account_choice_description);
}
if (mProfileData != null) mProfileData.update();
updateProfileImages();
mSpinner.setSelection(accountToSelect);
mAccountName = mArrayAdapter.getItem(accountToSelect).toString();
mImageCarousel.scrollTo(accountToSelect, false, false);
}
/**
* Attempt to select a new element that is in the new list, but not in the old list.
* If no such element exist and both the new and the old lists are the same then keep
* the selection. Otherwise select the first element.
* @param oldList Old list of user accounts.
* @param newList New list of user accounts.
* @param oldIndex Index of the selected account in the old list.
* @return The index of the new element, if it does not exist but lists are the same the
* return the old index, otherwise return 0.
*/
private static int getIndexOfNewElement(
List<String> oldList, List<String> newList, int oldIndex) {
if (oldList == null || newList == null) return 0;
if (oldList.size() == newList.size() && oldList.containsAll(newList)) return oldIndex;
if (oldList.size() + 1 == newList.size()) {
for (int i = 0; i < newList.size(); i++) {
if (!oldList.contains(newList.get(i))) return i;
}
}
return 0;
}
@Override
public void onProfileDownloaded(String accountId, String fullName, String givenName,
Bitmap bitmap) {
updateProfileImages();
}
private void updateProfileImages() {
if (mProfileData == null) return;
int count = mAccountNames.size();
Bitmap[] images;
if (count == 0) {
images = new Bitmap[1];
images[0] = mProfileData.getImage(null);
} else {
images = new Bitmap[count];
for (int i = 0; i < count; ++i) {
images[i] = mProfileData.getImage(mAccountNames.get(i));
}
}
mImageCarousel.setImages(images);
updateProfileName();
}
private void updateProfileName() {
if (!mSignedIn) return;
String name = null;
if (mIsChildAccount) name = mProfileData.getGivenName(mAccountName);
if (name == null) name = mProfileData.getFullName(mAccountName);
if (name == null) name = mAccountName;
String text = String.format(getResources().getString(R.string.fre_hi_name), name);
((TextView) findViewById(R.id.title)).setText(text);
}
/**
* Updates the view to show that sign in has completed.
*/
public void switchToSignedMode() {
mSignedIn = true;
updateProfileName();
mSpinner.setEnabled(false);
ApiCompatibilityUtils.setBackgroundForView(mSpinner, null);
mPositiveButton.setText(getResources().getText(R.string.fre_done));
mPositiveButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mListener.onSigningInCompleted(mAccountName);
}
});
mNegativeButton.setText(getResources().getText(R.string.fre_settings));
mNegativeButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mListener.onSettingsButtonClicked(mAccountName);
}
});
setButtonsEnabled(true);
String text = getResources().getString(R.string.fre_signed_in_description);
if (mIsChildAccount) {
text += "\n" + getResources().getString(
R.string.fre_signed_in_description_uca_addendum);
}
mDescriptionText.setText(text);
mImageCarousel.setVisibility(VISIBLE);
mImageCarousel.setSignedInMode();
}
/**
* @param isChildAccount Whether this view is for a child account.
*/
public void setIsChildAccount(boolean isChildAccount) {
mIsChildAccount = isChildAccount;
}
/**
* Switches the view to "no choice, just a confirmation" forced-account mode.
* @param forcedAccountName An account that should be force-selected.
*/
public void switchToForcedAccountMode(String forcedAccountName) {
mForcedAccountName = forcedAccountName;
updateAccounts();
assert TextUtils.equals(mAccountName, mForcedAccountName);
switchToSignedMode();
assert TextUtils.equals(mAccountName, mForcedAccountName);
}
/**
* @return Whether the view is in signed in mode.
*/
public boolean isSignedIn() {
return mSignedIn;
}
/**
* @return Whether the view is in "no choice, just a confirmation" forced-account mode.
*/
public boolean isInForcedAccountMode() {
return mForcedAccountName != null;
}
@Override
public void onPositionChanged(int i) {
mPositionSetProgrammatically = true;
mSpinner.setSelection(i);
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.config;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import com.netflix.config.ChainedDynamicProperty.DynamicBooleanPropertyThatSupportsNull;
import com.netflix.config.ChainedDynamicProperty.IntProperty;
import com.netflix.config.ChainedDynamicProperty.StringProperty;
public class ChainedDynamicPropertyTest {
@Test
public void testString() throws Exception {
DynamicStringProperty pString = DynamicPropertyFactory.getInstance().getStringProperty("defaultString", "default-default");
ChainedDynamicProperty.StringProperty fString = new ChainedDynamicProperty.StringProperty("overrideString", pString);
assertTrue("default-default".equals(fString.get()));
ConfigurationManager.getConfigInstance().setProperty("defaultString", "default");
assertTrue("default".equals(fString.get()));
ConfigurationManager.getConfigInstance().setProperty("overrideString", "override");
assertTrue("override".equals(fString.get()));
ConfigurationManager.getConfigInstance().clearProperty("overrideString");
assertTrue("default".equals(fString.get()));
ConfigurationManager.getConfigInstance().clearProperty("defaultString");
assertTrue("default-default".equals(fString.get()));
assertEquals("default-default", fString.getDefaultValue());
}
@Test
public void testInteger() throws Exception {
DynamicIntProperty pInt = DynamicPropertyFactory.getInstance().getIntProperty("defaultInt", -1);
ConfigurationManager.getConfigInstance().setProperty("defaultInt", -1);
ChainedDynamicProperty.IntProperty fInt = new ChainedDynamicProperty.IntProperty("overrideInt", pInt);
assertTrue(-1 == fInt.get());
ConfigurationManager.getConfigInstance().setProperty("defaultInt", 10);
assertTrue(10 == fInt.get());
ConfigurationManager.getConfigInstance().setProperty("overrideInt", 11);
assertTrue(11 == fInt.get());
ConfigurationManager.getConfigInstance().clearProperty("overrideInt");
assertTrue(10 == fInt.get());
ConfigurationManager.getConfigInstance().clearProperty("defaultInt");
assertTrue(-1 == fInt.get());
assertEquals(Integer.valueOf(-1), fInt.getDefaultValue());
}
@Test
public void testBoolean() throws Exception {
ConfigurationManager.getConfigInstance().setProperty("defaultInt", 1234);
DynamicBooleanPropertyThatSupportsNull pBoolean = new DynamicBooleanPropertyThatSupportsNull("defaultBoolean", Boolean.FALSE);
ConfigurationManager.getConfigInstance().setProperty("defaultBoolean", Boolean.TRUE);
ChainedDynamicProperty.BooleanProperty fBoolean = new ChainedDynamicProperty.BooleanProperty("overrideBoolean", pBoolean);
assertTrue(fBoolean.get());
ConfigurationManager.getConfigInstance().setProperty("defaultBoolean", Boolean.FALSE);
assertFalse(fBoolean.get());
ConfigurationManager.getConfigInstance().setProperty("overrideBoolean", Boolean.TRUE);
assertTrue(fBoolean.get());
ConfigurationManager.getConfigInstance().clearProperty("overrideBoolean");
assertFalse(fBoolean.get());
ConfigurationManager.getConfigInstance().clearProperty("defaultBoolean");
assertFalse(fBoolean.get());
assertFalse(fBoolean.getDefaultValue());
}
@Test
public void testFloat() throws Exception {
DynamicFloatProperty pFloat = DynamicPropertyFactory.getInstance().getFloatProperty("defaultFloat", -1.0f);
ChainedDynamicProperty.FloatProperty fFloat = new ChainedDynamicProperty.FloatProperty("overrideFloat", pFloat);
assertTrue(-1.0f == fFloat.get());
ConfigurationManager.getConfigInstance().setProperty("defaultFloat", 10.0f);
assertTrue(10.0f == fFloat.get());
ConfigurationManager.getConfigInstance().setProperty("overrideFloat", 11.0f);
assertTrue(11.0f == fFloat.get());
ConfigurationManager.getConfigInstance().clearProperty("overrideFloat");
assertTrue(10.0f == fFloat.get());
ConfigurationManager.getConfigInstance().clearProperty("defaultFloat");
assertTrue(-1.0f == fFloat.get());
assertEquals(Float.valueOf(-1.0f), fFloat.getDefaultValue());
}
@Test
public void testChainingString() throws Exception {
ConfigurationManager.getConfigInstance().setProperty("node1", "v1");
ConfigurationManager.getConfigInstance().clearProperty("node2");
ConfigurationManager.getConfigInstance().clearProperty("node3");
DynamicStringProperty node1 = DynamicPropertyFactory.getInstance().getStringProperty("node1", "v1");
StringProperty node2 = new ChainedDynamicProperty.StringProperty("node2", node1);
ChainedDynamicProperty.StringProperty node3 = new ChainedDynamicProperty.StringProperty("node3", node2);
assertTrue("" + node3.get(), "v1".equals(node3.get()));
ConfigurationManager.getConfigInstance().setProperty("node1", "v11");
assertTrue("v11".equals(node3.get()));
ConfigurationManager.getConfigInstance().setProperty("node2", "v22");
assertTrue("v22".equals(node3.get()));
ConfigurationManager.getConfigInstance().clearProperty("node1");
assertTrue("v22".equals(node3.get()));
ConfigurationManager.getConfigInstance().setProperty("node3", "v33");
assertTrue("v33".equals(node3.get()));
ConfigurationManager.getConfigInstance().clearProperty("node2");
assertTrue("v33".equals(node3.get()));
ConfigurationManager.getConfigInstance().setProperty("node2", "v222");
assertTrue("v33".equals(node3.get()));
ConfigurationManager.getConfigInstance().clearProperty("node3");
assertTrue("v222".equals(node3.get()));
ConfigurationManager.getConfigInstance().clearProperty("node2");
assertTrue("v1".equals(node3.get()));
ConfigurationManager.getConfigInstance().setProperty("node2", "v2222");
assertTrue("v2222".equals(node3.get()));
assertEquals("v1", node3.getDefaultValue());
}
@Test
public void testChainingInteger() throws Exception {
DynamicIntProperty node1 = DynamicPropertyFactory.getInstance().getIntProperty("node1", 1);
IntProperty node2 = new ChainedDynamicProperty.IntProperty("node2", node1);
ChainedDynamicProperty.IntProperty node3 = new ChainedDynamicProperty.IntProperty("node3", node2);
assertTrue("" + node3.get(), 1 == node3.get());
ConfigurationManager.getConfigInstance().setProperty("node1", 11);
assertTrue(11 == node3.get());
ConfigurationManager.getConfigInstance().setProperty("node2", 22);
assertTrue(22 == node3.get());
ConfigurationManager.getConfigInstance().clearProperty("node1");
assertTrue(22 == node3.get());
ConfigurationManager.getConfigInstance().setProperty("node3", 33);
assertTrue(33 == node3.get());
ConfigurationManager.getConfigInstance().clearProperty("node2");
assertTrue(33 == node3.get());
ConfigurationManager.getConfigInstance().setProperty("node2", 222);
assertTrue(33 == node3.get());
ConfigurationManager.getConfigInstance().clearProperty("node3");
assertTrue(222 == node3.get());
ConfigurationManager.getConfigInstance().clearProperty("node2");
assertTrue(1 == node3.get());
ConfigurationManager.getConfigInstance().setProperty("node2", 2222);
assertTrue(2222== node3.get());
}
@Test
public void testAddCallback() throws Exception {
final DynamicStringProperty node1 = DynamicPropertyFactory.getInstance().getStringProperty("n1", "n1");
final ChainedDynamicProperty.StringProperty node2 = new ChainedDynamicProperty.StringProperty("n2", node1);
final AtomicInteger callbackCount = new AtomicInteger(0);
node2.addCallback(new Runnable() {
@Override
public void run() {
callbackCount.incrementAndGet();
}
});
assertTrue(0 == callbackCount.get());
assertTrue("n1".equals(node2.get()));
assertTrue(0 == callbackCount.get());
ConfigurationManager.getConfigInstance().setProperty("n1", "n11");
assertTrue("n11".equals(node2.get()));
assertTrue(0 == callbackCount.get());
ConfigurationManager.getConfigInstance().setProperty("n2", "n22");
assertTrue("n22".equals(node2.get()));
assertTrue(1 == callbackCount.get());
ConfigurationManager.getConfigInstance().clearProperty("n1");
assertTrue("n22".equals(node2.get()));
assertTrue(1 == callbackCount.get());
ConfigurationManager.getConfigInstance().setProperty("n2", "n222");
assertTrue("n222".equals(node2.get()));
assertTrue(2 == callbackCount.get());
ConfigurationManager.getConfigInstance().clearProperty("n2");
assertTrue("n1".equals(node2.get()));
assertTrue(3 == callbackCount.get());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Collection;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.IgniteUtils;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.SB;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.DHT_LOCAL;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.LOCAL;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.NEAR_LOCAL;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.OWNER;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.READY;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.REENTRY;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.REMOVED;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.SINGLE_IMPLICIT;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.TX;
import static org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate.Mask.USED;
/**
* Lock candidate.
*/
public class GridCacheMvccCandidate implements Externalizable,
Comparable<GridCacheMvccCandidate> {
/** */
private static final long serialVersionUID = 0L;
/** ID generator. */
private static final AtomicLong IDGEN = new AtomicLong();
/** Locking node ID. */
@GridToStringInclude
private UUID nodeId;
/** Lock version. */
@GridToStringInclude
private GridCacheVersion ver;
/** Maximum wait time. */
@GridToStringInclude
private long timeout;
/** Candidate timestamp. */
@GridToStringInclude
private long ts;
/** Thread ID. */
@GridToStringInclude
private long threadId;
/** Use flags approach to preserve space. */
@GridToStringExclude
private short flags;
/** ID. */
private long id;
/** Topology version. */
@SuppressWarnings( {"TransientFieldNotInitialized"})
@GridToStringInclude
private transient volatile AffinityTopologyVersion topVer = AffinityTopologyVersion.NONE;
/** Linked reentry. */
private GridCacheMvccCandidate reentry;
/** Previous lock for the thread. */
@GridToStringExclude
private transient volatile GridCacheMvccCandidate prev;
/** Next lock for the thread. */
@GridToStringExclude
private transient volatile GridCacheMvccCandidate next;
/** Parent entry. */
@GridToStringExclude
private transient GridCacheEntryEx parent;
/** Alternate node ID specifying additional node involved in this lock. */
private transient volatile UUID otherNodeId;
/** Other lock version (near version vs dht version). */
private transient GridCacheVersion otherVer;
/** Mapped DHT node IDs. */
@GridToStringInclude
private transient volatile Collection<ClusterNode> mappedDhtNodes;
/** Mapped near node IDs. */
@GridToStringInclude
private transient volatile Collection<ClusterNode> mappedNearNodes;
/** Owned lock version by the moment this candidate was added. */
@GridToStringInclude
private transient volatile GridCacheVersion ownerVer;
/**
* Empty constructor required by {@link Externalizable}.
*/
public GridCacheMvccCandidate() {
/* No-op. */
}
/**
* @param parent Parent entry.
* @param nodeId Requesting node ID.
* @param otherNodeId Near node ID.
* @param otherVer Other version.
* @param threadId Requesting thread ID.
* @param ver Cache version.
* @param timeout Maximum wait time.
* @param loc {@code True} if the lock is local.
* @param reentry {@code True} if candidate is for reentry.
* @param tx Transaction flag.
* @param singleImplicit Single-key-implicit-transaction flag.
* @param nearLoc Near-local flag.
* @param dhtLoc DHT local flag.
*/
public GridCacheMvccCandidate(
GridCacheEntryEx parent,
UUID nodeId,
@Nullable UUID otherNodeId,
@Nullable GridCacheVersion otherVer,
long threadId,
GridCacheVersion ver,
long timeout,
boolean loc,
boolean reentry,
boolean tx,
boolean singleImplicit,
boolean nearLoc,
boolean dhtLoc) {
assert nodeId != null;
assert ver != null;
assert parent != null;
this.parent = parent;
this.nodeId = nodeId;
this.otherNodeId = otherNodeId;
this.otherVer = otherVer;
this.threadId = threadId;
this.ver = ver;
this.timeout = timeout;
mask(LOCAL, loc);
mask(REENTRY, reentry);
mask(TX, tx);
mask(SINGLE_IMPLICIT, singleImplicit);
mask(NEAR_LOCAL, nearLoc);
mask(DHT_LOCAL, dhtLoc);
ts = U.currentTimeMillis();
id = IDGEN.incrementAndGet();
}
/**
* Sets mask value.
*
* @param mask Mask.
* @param on Flag.
*/
private void mask(Mask mask, boolean on) {
flags = mask.set(flags, on);
}
/**
* @return Flags.
*/
public short flags() {
return flags;
}
/**
* @return Parent entry.
*/
@SuppressWarnings({"unchecked"})
public <V> GridCacheEntryEx parent() {
return parent;
}
/**
* @return Topology for which this lock was acquired.
*/
public AffinityTopologyVersion topologyVersion() {
return topVer;
}
/**
* @param topVer Topology version.
*/
public void topologyVersion(AffinityTopologyVersion topVer) {
this.topVer = topVer;
}
/**
* @return Reentry candidate.
*/
public GridCacheMvccCandidate reenter() {
GridCacheMvccCandidate old = reentry;
GridCacheMvccCandidate reentry = new GridCacheMvccCandidate(
parent,
nodeId,
otherNodeId,
otherVer,
threadId,
ver,
timeout,
local(),
/*reentry*/true,
tx(),
singleImplicit(),
nearLocal(),
dhtLocal());
reentry.topVer = topVer;
if (old != null)
reentry.reentry = old;
this.reentry = reentry;
return reentry;
}
/**
* @return Removed reentry candidate or {@code null}.
*/
@Nullable public GridCacheMvccCandidate unenter() {
if (reentry != null) {
GridCacheMvccCandidate old = reentry;
// Link to next.
reentry = reentry.reentry;
return old;
}
return null;
}
/**
* @param parent Sets locks parent entry.
*/
public void parent(GridCacheEntryEx parent) {
assert parent != null;
this.parent = parent;
}
/**
* @return Node ID.
*/
public UUID nodeId() {
return nodeId;
}
/**
* @return Near or DHT node ID.
*/
public UUID otherNodeId() {
return otherNodeId;
}
/**
* @param otherNodeId Near or DHT node ID.
*/
public void otherNodeId(UUID otherNodeId) {
this.otherNodeId = otherNodeId;
}
/**
* @return Mapped node IDs.
*/
public Collection<ClusterNode> mappedDhtNodes() {
return mappedDhtNodes;
}
/**
* @return Mapped node IDs.
*/
public Collection<ClusterNode> mappedNearNodes() {
return mappedNearNodes;
}
/**
* @param mappedDhtNodes Mapped DHT node IDs.
*/
public void mappedNodeIds(Collection<ClusterNode> mappedDhtNodes, Collection<ClusterNode> mappedNearNodes) {
this.mappedDhtNodes = mappedDhtNodes;
this.mappedNearNodes = mappedNearNodes;
}
/**
* @param node Node to remove.
*/
public void removeMappedNode(ClusterNode node) {
if (mappedDhtNodes.contains(node))
mappedDhtNodes = new ArrayList<>(F.view(mappedDhtNodes, F.notEqualTo(node)));
if (mappedNearNodes != null && mappedNearNodes.contains(node))
mappedNearNodes = new ArrayList<>(F.view(mappedNearNodes, F.notEqualTo(node)));
}
/**
* @return Near version.
*/
public GridCacheVersion otherVersion() {
return otherVer;
}
/**
* Sets mapped version for candidate. For dht local candidates {@code otherVer} is near local candidate version.
* For near local candidates {@code otherVer} is dht mapped candidate version.
*
* @param otherVer Alternative candidate version.
* @return {@code True} if other version was set, {@code false} if other version is already set.
*/
public boolean otherVersion(GridCacheVersion otherVer) {
assert otherVer != null;
if (this.otherVer == null) {
this.otherVer = otherVer;
return true;
}
return this.otherVer.equals(otherVer);
}
/**
* Sets owned version for proper lock ordering when remote candidate is added.
*
* @param ownerVer Version of owned candidate by the moment this candidate was added.
* @return {@code True} if owned version was set, {@code false} otherwise.
*/
public boolean ownerVersion(GridCacheVersion ownerVer) {
assert ownerVer != null;
if (this.ownerVer == null) {
this.ownerVer = ownerVer;
return true;
}
return this.ownerVer.equals(ownerVer);
}
/**
* @return Version of owned candidate by the time this candidate was added, or {@code null}
* if there were no owned candidates.
*/
@Nullable public GridCacheVersion ownerVersion() {
return ownerVer;
}
/**
* @return Thread ID.
* @see Thread#getId()
*/
public long threadId() {
return threadId;
}
/**
* @return Lock version.
*/
public GridCacheVersion version() {
return ver;
}
/**
* @return Maximum wait time.
*/
public long timeout() {
return timeout;
}
/**
* @return Timestamp at the time of entering pending set.
*/
public long timestamp() {
return ts;
}
/**
* @return {@code True} if lock is local.
*/
public boolean local() {
return LOCAL.get(flags());
}
/**
* @return {@code True} if transaction flag is set.
*/
public boolean tx() {
return TX.get(flags());
}
/**
* @return {@code True} if implicit transaction.
*/
public boolean singleImplicit() {
return SINGLE_IMPLICIT.get(flags());
}
/**
* @return Near local flag.
*/
public boolean nearLocal() {
return NEAR_LOCAL.get(flags());
}
/**
* @return Near local flag.
*/
public boolean dhtLocal() {
return DHT_LOCAL.get(flags());
}
/**
* @return {@code True} if this candidate is a reentry.
*/
public boolean reentry() {
return REENTRY.get(flags());
}
/**
* Sets reentry flag.
*/
public void setReentry() {
mask(REENTRY, true);
}
/**
* @return Ready flag.
*/
public boolean ready() {
return READY.get(flags());
}
/**
* Sets ready flag.
*/
public void setReady() {
mask(READY, true);
}
/**
* @return {@code True} if lock was released.
*/
public boolean used() {
return USED.get(flags());
}
/**
* Sets used flag.
*/
public void setUsed() {
mask(USED, true);
}
/**
* @return Removed flag.
*/
public boolean removed() {
return REMOVED.get(flags());
}
/**
* Sets removed flag.
*/
public void setRemoved() {
mask(REMOVED, true);
}
/**
* @return {@code True} if is or was an owner.
*/
public boolean owner() {
return OWNER.get(flags());
}
/**
* Sets owner flag.
*/
public void setOwner() {
mask(OWNER, true);
}
/**
* @return Lock that comes before in the same thread, possibly <tt>null</tt>.
*/
@Nullable public GridCacheMvccCandidate previous() {
return prev;
}
/**
* @param prev Lock that comes before in the same thread.
*/
public void previous(GridCacheMvccCandidate prev) {
assert threadId == prev.threadId : "Invalid threadId [this=" + this + ", prev=" + prev + ']';
this.prev = prev;
}
/**
*
* @return Gets next candidate in this thread.
*/
public GridCacheMvccCandidate next() {
return next;
}
/**
* @param next Next candidate in this thread.
*/
public void next(GridCacheMvccCandidate next) {
this.next = next;
}
/**
* @return Key.
*/
public IgniteTxKey key() {
GridCacheEntryEx parent0 = parent;
if (parent0 == null)
throw new IllegalStateException("Parent entry was not initialized for MVCC candidate: " + this);
return parent0.txKey();
}
/**
* Checks if this candidate matches version or thread-nodeId combination.
*
* @param nodeId Node ID to check.
* @param ver Version to check.
* @param threadId Thread ID to check.
* @return {@code True} if matched.
*/
public boolean matches(GridCacheVersion ver, UUID nodeId, long threadId) {
return ver.equals(this.ver) || (nodeId.equals(this.nodeId) && threadId == this.threadId);
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
IgniteUtils.writeUuid(out, nodeId);
CU.writeVersion(out, ver);
out.writeLong(timeout);
out.writeLong(threadId);
out.writeLong(id);
out.writeShort(flags());
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
nodeId = IgniteUtils.readUuid(in);
ver = CU.readVersion(in);
timeout = in.readLong();
threadId = in.readLong();
id = in.readLong();
short flags = in.readShort();
mask(OWNER, OWNER.get(flags));
mask(USED, USED.get(flags));
mask(TX, TX.get(flags));
ts = U.currentTimeMillis();
}
/** {@inheritDoc} */
@Override public int compareTo(GridCacheMvccCandidate o) {
if (o == this)
return 0;
int c = ver.compareTo(o.ver);
// This is done, so compare and equals methods will be consistent.
if (c == 0)
return key().equals(o.key()) ? 0 : id < o.id ? -1 : 1;
return c;
}
/** {@inheritDoc} */
@SuppressWarnings({"unchecked"})
@Override public boolean equals(Object o) {
if (o == null)
return false;
if (o == this)
return true;
GridCacheMvccCandidate other = (GridCacheMvccCandidate)o;
assert key() != null && other.key() != null : "Key is null [this=" + this + ", other=" + o + ']';
return ver.equals(other.ver) && key().equals(other.key());
}
/** {@inheritDoc} */
@Override public int hashCode() {
return ver.hashCode();
}
/** {@inheritDoc} */
@Override public String toString() {
GridCacheMvccCandidate prev = previous();
GridCacheMvccCandidate next = next();
return S.toString(GridCacheMvccCandidate.class, this,
"key", parent == null ? null : parent.key(),
"masks", Mask.toString(flags()),
"prevVer", (prev == null ? null : prev.version()),
"nextVer", (next == null ? null : next.version()));
}
/**
* Mask.
*/
@SuppressWarnings({"PackageVisibleInnerClass"})
enum Mask {
/** */
LOCAL(0x01),
/** */
OWNER(0x02),
/** */
READY(0x04),
/** */
REENTRY(0x08),
/** */
USED(0x10),
/** */
TX(0x40),
/** */
SINGLE_IMPLICIT(0x80),
/** */
DHT_LOCAL(0x100),
/** */
NEAR_LOCAL(0x200),
/** */
REMOVED(0x400);
/** All mask values. */
private static final Mask[] MASKS = values();
/** Mask bit. */
private final short bit;
/**
* @param bit Mask value.
*/
Mask(int bit) {
this.bit = (short)bit;
}
/**
* @param flags Flags to check.
* @return {@code True} if mask is set.
*/
boolean get(short flags) {
return (flags & bit) == bit;
}
/**
* @param flags Flags.
* @param on Mask to set.
* @return Updated flags.
*/
short set(short flags, boolean on) {
return (short)(on ? flags | bit : flags & ~bit);
}
/**
* @param flags Flags to check.
* @return {@code 1} if mask is set, {@code 0} otherwise.
*/
int bit(short flags) {
return get(flags) ? 1 : 0;
}
/**
* @param flags Flags.
* @return String builder containing all flags.
*/
static String toString(short flags) {
SB sb = new SB();
for (Mask m : MASKS) {
if (m.ordinal() != 0)
sb.a('|');
sb.a(m.name().toLowerCase()).a('=').a(m.bit(flags));
}
return sb.toString();
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/talent/v4beta1/application_service.proto
package com.google.cloud.talent.v4beta1;
/**
*
*
* <pre>
* Request for updating a specified application.
* </pre>
*
* Protobuf type {@code google.cloud.talent.v4beta1.UpdateApplicationRequest}
*/
public final class UpdateApplicationRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.talent.v4beta1.UpdateApplicationRequest)
UpdateApplicationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateApplicationRequest.newBuilder() to construct.
private UpdateApplicationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateApplicationRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateApplicationRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private UpdateApplicationRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.talent.v4beta1.Application.Builder subBuilder = null;
if (application_ != null) {
subBuilder = application_.toBuilder();
}
application_ =
input.readMessage(
com.google.cloud.talent.v4beta1.Application.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(application_);
application_ = subBuilder.buildPartial();
}
break;
}
case 18:
{
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ =
input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.talent.v4beta1.ApplicationServiceProto
.internal_static_google_cloud_talent_v4beta1_UpdateApplicationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.talent.v4beta1.ApplicationServiceProto
.internal_static_google_cloud_talent_v4beta1_UpdateApplicationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.talent.v4beta1.UpdateApplicationRequest.class,
com.google.cloud.talent.v4beta1.UpdateApplicationRequest.Builder.class);
}
public static final int APPLICATION_FIELD_NUMBER = 1;
private com.google.cloud.talent.v4beta1.Application application_;
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the application field is set.
*/
@java.lang.Override
public boolean hasApplication() {
return application_ != null;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The application.
*/
@java.lang.Override
public com.google.cloud.talent.v4beta1.Application getApplication() {
return application_ == null
? com.google.cloud.talent.v4beta1.Application.getDefaultInstance()
: application_;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.talent.v4beta1.ApplicationOrBuilder getApplicationOrBuilder() {
return getApplication();
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (application_ != null) {
output.writeMessage(1, getApplication());
}
if (updateMask_ != null) {
output.writeMessage(2, getUpdateMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (application_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getApplication());
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.talent.v4beta1.UpdateApplicationRequest)) {
return super.equals(obj);
}
com.google.cloud.talent.v4beta1.UpdateApplicationRequest other =
(com.google.cloud.talent.v4beta1.UpdateApplicationRequest) obj;
if (hasApplication() != other.hasApplication()) return false;
if (hasApplication()) {
if (!getApplication().equals(other.getApplication())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasApplication()) {
hash = (37 * hash) + APPLICATION_FIELD_NUMBER;
hash = (53 * hash) + getApplication().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.talent.v4beta1.UpdateApplicationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for updating a specified application.
* </pre>
*
* Protobuf type {@code google.cloud.talent.v4beta1.UpdateApplicationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.talent.v4beta1.UpdateApplicationRequest)
com.google.cloud.talent.v4beta1.UpdateApplicationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.talent.v4beta1.ApplicationServiceProto
.internal_static_google_cloud_talent_v4beta1_UpdateApplicationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.talent.v4beta1.ApplicationServiceProto
.internal_static_google_cloud_talent_v4beta1_UpdateApplicationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.talent.v4beta1.UpdateApplicationRequest.class,
com.google.cloud.talent.v4beta1.UpdateApplicationRequest.Builder.class);
}
// Construct using com.google.cloud.talent.v4beta1.UpdateApplicationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (applicationBuilder_ == null) {
application_ = null;
} else {
application_ = null;
applicationBuilder_ = null;
}
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.talent.v4beta1.ApplicationServiceProto
.internal_static_google_cloud_talent_v4beta1_UpdateApplicationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.UpdateApplicationRequest getDefaultInstanceForType() {
return com.google.cloud.talent.v4beta1.UpdateApplicationRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.UpdateApplicationRequest build() {
com.google.cloud.talent.v4beta1.UpdateApplicationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.UpdateApplicationRequest buildPartial() {
com.google.cloud.talent.v4beta1.UpdateApplicationRequest result =
new com.google.cloud.talent.v4beta1.UpdateApplicationRequest(this);
if (applicationBuilder_ == null) {
result.application_ = application_;
} else {
result.application_ = applicationBuilder_.build();
}
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.talent.v4beta1.UpdateApplicationRequest) {
return mergeFrom((com.google.cloud.talent.v4beta1.UpdateApplicationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.talent.v4beta1.UpdateApplicationRequest other) {
if (other == com.google.cloud.talent.v4beta1.UpdateApplicationRequest.getDefaultInstance())
return this;
if (other.hasApplication()) {
mergeApplication(other.getApplication());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.talent.v4beta1.UpdateApplicationRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.talent.v4beta1.UpdateApplicationRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.talent.v4beta1.Application application_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.talent.v4beta1.Application,
com.google.cloud.talent.v4beta1.Application.Builder,
com.google.cloud.talent.v4beta1.ApplicationOrBuilder>
applicationBuilder_;
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the application field is set.
*/
public boolean hasApplication() {
return applicationBuilder_ != null || application_ != null;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The application.
*/
public com.google.cloud.talent.v4beta1.Application getApplication() {
if (applicationBuilder_ == null) {
return application_ == null
? com.google.cloud.talent.v4beta1.Application.getDefaultInstance()
: application_;
} else {
return applicationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setApplication(com.google.cloud.talent.v4beta1.Application value) {
if (applicationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
application_ = value;
onChanged();
} else {
applicationBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setApplication(
com.google.cloud.talent.v4beta1.Application.Builder builderForValue) {
if (applicationBuilder_ == null) {
application_ = builderForValue.build();
onChanged();
} else {
applicationBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeApplication(com.google.cloud.talent.v4beta1.Application value) {
if (applicationBuilder_ == null) {
if (application_ != null) {
application_ =
com.google.cloud.talent.v4beta1.Application.newBuilder(application_)
.mergeFrom(value)
.buildPartial();
} else {
application_ = value;
}
onChanged();
} else {
applicationBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearApplication() {
if (applicationBuilder_ == null) {
application_ = null;
onChanged();
} else {
application_ = null;
applicationBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.talent.v4beta1.Application.Builder getApplicationBuilder() {
onChanged();
return getApplicationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.talent.v4beta1.ApplicationOrBuilder getApplicationOrBuilder() {
if (applicationBuilder_ != null) {
return applicationBuilder_.getMessageOrBuilder();
} else {
return application_ == null
? com.google.cloud.talent.v4beta1.Application.getDefaultInstance()
: application_;
}
}
/**
*
*
* <pre>
* Required. The application resource to replace the current resource in the system.
* </pre>
*
* <code>
* .google.cloud.talent.v4beta1.Application application = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.talent.v4beta1.Application,
com.google.cloud.talent.v4beta1.Application.Builder,
com.google.cloud.talent.v4beta1.ApplicationOrBuilder>
getApplicationFieldBuilder() {
if (applicationBuilder_ == null) {
applicationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.talent.v4beta1.Application,
com.google.cloud.talent.v4beta1.Application.Builder,
com.google.cloud.talent.v4beta1.ApplicationOrBuilder>(
getApplication(), getParentForChildren(), isClean());
application_ = null;
}
return applicationBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
* If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in
* [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated.
* A field mask to specify the application fields to be updated. Only
* top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.talent.v4beta1.UpdateApplicationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.UpdateApplicationRequest)
private static final com.google.cloud.talent.v4beta1.UpdateApplicationRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.talent.v4beta1.UpdateApplicationRequest();
}
public static com.google.cloud.talent.v4beta1.UpdateApplicationRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateApplicationRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateApplicationRequest>() {
@java.lang.Override
public UpdateApplicationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateApplicationRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UpdateApplicationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateApplicationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.UpdateApplicationRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rekognition.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Summary information for an Amazon Rekognition Custom Labels dataset. For more information, see
* <a>ProjectDescription</a>.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DatasetMetadata implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The Unix timestamp for the date and time that the dataset was created.
* </p>
*/
private java.util.Date creationTimestamp;
/**
* <p>
* The type of the dataset.
* </p>
*/
private String datasetType;
/**
* <p>
* The Amazon Resource Name (ARN) for the dataset.
* </p>
*/
private String datasetArn;
/**
* <p>
* The status for the dataset.
* </p>
*/
private String status;
/**
* <p>
* The status message for the dataset.
* </p>
*/
private String statusMessage;
/**
* <p>
* The status message code for the dataset operation. If a service error occurs, try the API call again later. If a
* client error occurs, check the input parameters to the dataset API call that failed.
* </p>
*/
private String statusMessageCode;
/**
* <p>
* The Unix timestamp for the date and time that the dataset was created.
* </p>
*
* @param creationTimestamp
* The Unix timestamp for the date and time that the dataset was created.
*/
public void setCreationTimestamp(java.util.Date creationTimestamp) {
this.creationTimestamp = creationTimestamp;
}
/**
* <p>
* The Unix timestamp for the date and time that the dataset was created.
* </p>
*
* @return The Unix timestamp for the date and time that the dataset was created.
*/
public java.util.Date getCreationTimestamp() {
return this.creationTimestamp;
}
/**
* <p>
* The Unix timestamp for the date and time that the dataset was created.
* </p>
*
* @param creationTimestamp
* The Unix timestamp for the date and time that the dataset was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DatasetMetadata withCreationTimestamp(java.util.Date creationTimestamp) {
setCreationTimestamp(creationTimestamp);
return this;
}
/**
* <p>
* The type of the dataset.
* </p>
*
* @param datasetType
* The type of the dataset.
* @see DatasetType
*/
public void setDatasetType(String datasetType) {
this.datasetType = datasetType;
}
/**
* <p>
* The type of the dataset.
* </p>
*
* @return The type of the dataset.
* @see DatasetType
*/
public String getDatasetType() {
return this.datasetType;
}
/**
* <p>
* The type of the dataset.
* </p>
*
* @param datasetType
* The type of the dataset.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DatasetType
*/
public DatasetMetadata withDatasetType(String datasetType) {
setDatasetType(datasetType);
return this;
}
/**
* <p>
* The type of the dataset.
* </p>
*
* @param datasetType
* The type of the dataset.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DatasetType
*/
public DatasetMetadata withDatasetType(DatasetType datasetType) {
this.datasetType = datasetType.toString();
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the dataset.
* </p>
*
* @param datasetArn
* The Amazon Resource Name (ARN) for the dataset.
*/
public void setDatasetArn(String datasetArn) {
this.datasetArn = datasetArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the dataset.
* </p>
*
* @return The Amazon Resource Name (ARN) for the dataset.
*/
public String getDatasetArn() {
return this.datasetArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the dataset.
* </p>
*
* @param datasetArn
* The Amazon Resource Name (ARN) for the dataset.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DatasetMetadata withDatasetArn(String datasetArn) {
setDatasetArn(datasetArn);
return this;
}
/**
* <p>
* The status for the dataset.
* </p>
*
* @param status
* The status for the dataset.
* @see DatasetStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The status for the dataset.
* </p>
*
* @return The status for the dataset.
* @see DatasetStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The status for the dataset.
* </p>
*
* @param status
* The status for the dataset.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DatasetStatus
*/
public DatasetMetadata withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The status for the dataset.
* </p>
*
* @param status
* The status for the dataset.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DatasetStatus
*/
public DatasetMetadata withStatus(DatasetStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The status message for the dataset.
* </p>
*
* @param statusMessage
* The status message for the dataset.
*/
public void setStatusMessage(String statusMessage) {
this.statusMessage = statusMessage;
}
/**
* <p>
* The status message for the dataset.
* </p>
*
* @return The status message for the dataset.
*/
public String getStatusMessage() {
return this.statusMessage;
}
/**
* <p>
* The status message for the dataset.
* </p>
*
* @param statusMessage
* The status message for the dataset.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DatasetMetadata withStatusMessage(String statusMessage) {
setStatusMessage(statusMessage);
return this;
}
/**
* <p>
* The status message code for the dataset operation. If a service error occurs, try the API call again later. If a
* client error occurs, check the input parameters to the dataset API call that failed.
* </p>
*
* @param statusMessageCode
* The status message code for the dataset operation. If a service error occurs, try the API call again
* later. If a client error occurs, check the input parameters to the dataset API call that failed.
* @see DatasetStatusMessageCode
*/
public void setStatusMessageCode(String statusMessageCode) {
this.statusMessageCode = statusMessageCode;
}
/**
* <p>
* The status message code for the dataset operation. If a service error occurs, try the API call again later. If a
* client error occurs, check the input parameters to the dataset API call that failed.
* </p>
*
* @return The status message code for the dataset operation. If a service error occurs, try the API call again
* later. If a client error occurs, check the input parameters to the dataset API call that failed.
* @see DatasetStatusMessageCode
*/
public String getStatusMessageCode() {
return this.statusMessageCode;
}
/**
* <p>
* The status message code for the dataset operation. If a service error occurs, try the API call again later. If a
* client error occurs, check the input parameters to the dataset API call that failed.
* </p>
*
* @param statusMessageCode
* The status message code for the dataset operation. If a service error occurs, try the API call again
* later. If a client error occurs, check the input parameters to the dataset API call that failed.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DatasetStatusMessageCode
*/
public DatasetMetadata withStatusMessageCode(String statusMessageCode) {
setStatusMessageCode(statusMessageCode);
return this;
}
/**
* <p>
* The status message code for the dataset operation. If a service error occurs, try the API call again later. If a
* client error occurs, check the input parameters to the dataset API call that failed.
* </p>
*
* @param statusMessageCode
* The status message code for the dataset operation. If a service error occurs, try the API call again
* later. If a client error occurs, check the input parameters to the dataset API call that failed.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DatasetStatusMessageCode
*/
public DatasetMetadata withStatusMessageCode(DatasetStatusMessageCode statusMessageCode) {
this.statusMessageCode = statusMessageCode.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCreationTimestamp() != null)
sb.append("CreationTimestamp: ").append(getCreationTimestamp()).append(",");
if (getDatasetType() != null)
sb.append("DatasetType: ").append(getDatasetType()).append(",");
if (getDatasetArn() != null)
sb.append("DatasetArn: ").append(getDatasetArn()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getStatusMessage() != null)
sb.append("StatusMessage: ").append(getStatusMessage()).append(",");
if (getStatusMessageCode() != null)
sb.append("StatusMessageCode: ").append(getStatusMessageCode());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DatasetMetadata == false)
return false;
DatasetMetadata other = (DatasetMetadata) obj;
if (other.getCreationTimestamp() == null ^ this.getCreationTimestamp() == null)
return false;
if (other.getCreationTimestamp() != null && other.getCreationTimestamp().equals(this.getCreationTimestamp()) == false)
return false;
if (other.getDatasetType() == null ^ this.getDatasetType() == null)
return false;
if (other.getDatasetType() != null && other.getDatasetType().equals(this.getDatasetType()) == false)
return false;
if (other.getDatasetArn() == null ^ this.getDatasetArn() == null)
return false;
if (other.getDatasetArn() != null && other.getDatasetArn().equals(this.getDatasetArn()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getStatusMessage() == null ^ this.getStatusMessage() == null)
return false;
if (other.getStatusMessage() != null && other.getStatusMessage().equals(this.getStatusMessage()) == false)
return false;
if (other.getStatusMessageCode() == null ^ this.getStatusMessageCode() == null)
return false;
if (other.getStatusMessageCode() != null && other.getStatusMessageCode().equals(this.getStatusMessageCode()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCreationTimestamp() == null) ? 0 : getCreationTimestamp().hashCode());
hashCode = prime * hashCode + ((getDatasetType() == null) ? 0 : getDatasetType().hashCode());
hashCode = prime * hashCode + ((getDatasetArn() == null) ? 0 : getDatasetArn().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getStatusMessage() == null) ? 0 : getStatusMessage().hashCode());
hashCode = prime * hashCode + ((getStatusMessageCode() == null) ? 0 : getStatusMessageCode().hashCode());
return hashCode;
}
@Override
public DatasetMetadata clone() {
try {
return (DatasetMetadata) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.rekognition.model.transform.DatasetMetadataMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.exceptions.*;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multiset;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.composites.*;
import org.apache.cassandra.db.ColumnFamilyType;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.AlreadyExistsException;
import org.apache.cassandra.io.compress.CompressionParameters;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.transport.Event;
import org.apache.cassandra.utils.ByteBufferUtil;
/** A <code>CREATE TABLE</code> parsed from a CQL query statement. */
public class CreateTableStatement extends SchemaAlteringStatement
{
public CellNameType comparator;
private AbstractType<?> defaultValidator;
private AbstractType<?> keyValidator;
private final List<ByteBuffer> keyAliases = new ArrayList<ByteBuffer>();
private final List<ByteBuffer> columnAliases = new ArrayList<ByteBuffer>();
private ByteBuffer valueAlias;
private boolean isDense;
private final Map<ColumnIdentifier, AbstractType> columns = new HashMap<ColumnIdentifier, AbstractType>();
private final Set<ColumnIdentifier> staticColumns;
private final CFPropDefs properties;
private final boolean ifNotExists;
public CreateTableStatement(CFName name, CFPropDefs properties, boolean ifNotExists, Set<ColumnIdentifier> staticColumns)
{
super(name);
this.properties = properties;
this.ifNotExists = ifNotExists;
this.staticColumns = staticColumns;
if (!this.properties.hasProperty(CFPropDefs.KW_COMPRESSION) && CFMetaData.DEFAULT_COMPRESSOR != null)
this.properties.addProperty(CFPropDefs.KW_COMPRESSION,
new HashMap<String, String>()
{{
put(CompressionParameters.SSTABLE_COMPRESSION, CFMetaData.DEFAULT_COMPRESSOR);
}});
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasKeyspaceAccess(keyspace(), Permission.CREATE);
}
public void validate(ClientState state)
{
// validated in announceMigration()
}
// Column definitions
private List<ColumnDefinition> getColumns(CFMetaData cfm)
{
List<ColumnDefinition> columnDefs = new ArrayList<>(columns.size());
Integer componentIndex = comparator.isCompound() ? comparator.clusteringPrefixSize() : null;
for (Map.Entry<ColumnIdentifier, AbstractType> col : columns.entrySet())
{
ColumnIdentifier id = col.getKey();
columnDefs.add(staticColumns.contains(id)
? ColumnDefinition.staticDef(cfm, col.getKey().bytes, col.getValue(), componentIndex)
: ColumnDefinition.regularDef(cfm, col.getKey().bytes, col.getValue(), componentIndex));
}
return columnDefs;
}
public boolean announceMigration(boolean isLocalOnly) throws RequestValidationException
{
try
{
MigrationManager.announceNewColumnFamily(getCFMetaData(), isLocalOnly);
return true;
}
catch (AlreadyExistsException e)
{
if (ifNotExists)
return false;
throw e;
}
}
public Event.SchemaChange changeEvent()
{
return new Event.SchemaChange(Event.SchemaChange.Change.CREATED, Event.SchemaChange.Target.TABLE, keyspace(), columnFamily());
}
/**
* Returns a CFMetaData instance based on the parameters parsed from this
* <code>CREATE</code> statement, or defaults where applicable.
*
* @return a CFMetaData instance corresponding to the values parsed from this statement
* @throws InvalidRequestException on failure to validate parsed parameters
*/
public CFMetaData getCFMetaData() throws RequestValidationException
{
CFMetaData newCFMD;
newCFMD = new CFMetaData(keyspace(),
columnFamily(),
ColumnFamilyType.Standard,
comparator);
applyPropertiesTo(newCFMD);
return newCFMD;
}
public void applyPropertiesTo(CFMetaData cfmd) throws RequestValidationException
{
cfmd.defaultValidator(defaultValidator)
.keyValidator(keyValidator)
.addAllColumnDefinitions(getColumns(cfmd))
.isDense(isDense);
addColumnMetadataFromAliases(cfmd, keyAliases, keyValidator, ColumnDefinition.Kind.PARTITION_KEY);
addColumnMetadataFromAliases(cfmd, columnAliases, comparator.asAbstractType(), ColumnDefinition.Kind.CLUSTERING_COLUMN);
if (valueAlias != null)
addColumnMetadataFromAliases(cfmd, Collections.singletonList(valueAlias), defaultValidator, ColumnDefinition.Kind.COMPACT_VALUE);
properties.applyToCFMetadata(cfmd);
}
private void addColumnMetadataFromAliases(CFMetaData cfm, List<ByteBuffer> aliases, AbstractType<?> comparator, ColumnDefinition.Kind kind)
{
if (comparator instanceof CompositeType)
{
CompositeType ct = (CompositeType)comparator;
for (int i = 0; i < aliases.size(); ++i)
if (aliases.get(i) != null)
cfm.addOrReplaceColumnDefinition(new ColumnDefinition(cfm, aliases.get(i), ct.types.get(i), i, kind));
}
else
{
assert aliases.size() <= 1;
if (!aliases.isEmpty() && aliases.get(0) != null)
cfm.addOrReplaceColumnDefinition(new ColumnDefinition(cfm, aliases.get(0), comparator, null, kind));
}
}
public static class RawStatement extends CFStatement
{
private final Map<ColumnIdentifier, CQL3Type.Raw> definitions = new HashMap<>();
public final CFPropDefs properties = new CFPropDefs();
private final List<List<ColumnIdentifier>> keyAliases = new ArrayList<List<ColumnIdentifier>>();
private final List<ColumnIdentifier> columnAliases = new ArrayList<ColumnIdentifier>();
private final Map<ColumnIdentifier, Boolean> definedOrdering = new LinkedHashMap<ColumnIdentifier, Boolean>(); // Insertion ordering is important
private final Set<ColumnIdentifier> staticColumns = new HashSet<ColumnIdentifier>();
private boolean useCompactStorage;
private final Multiset<ColumnIdentifier> definedNames = HashMultiset.create(1);
private final boolean ifNotExists;
public RawStatement(CFName name, boolean ifNotExists)
{
super(name);
this.ifNotExists = ifNotExists;
}
/**
* Transform this raw statement into a CreateTableStatement.
*/
public ParsedStatement.Prepared prepare() throws RequestValidationException
{
// Column family name
if (!columnFamily().matches("\\w+"))
throw new InvalidRequestException(String.format("\"%s\" is not a valid table name (must be alphanumeric character only: [0-9A-Za-z]+)", columnFamily()));
if (columnFamily().length() > Schema.NAME_LENGTH)
throw new InvalidRequestException(String.format("Table names shouldn't be more than %s characters long (got \"%s\")", Schema.NAME_LENGTH, columnFamily()));
for (Multiset.Entry<ColumnIdentifier> entry : definedNames.entrySet())
if (entry.getCount() > 1)
throw new InvalidRequestException(String.format("Multiple definition of identifier %s", entry.getElement()));
properties.validate();
CreateTableStatement stmt = new CreateTableStatement(cfName, properties, ifNotExists, staticColumns);
Map<ByteBuffer, CollectionType> definedMultiCellCollections = null;
for (Map.Entry<ColumnIdentifier, CQL3Type.Raw> entry : definitions.entrySet())
{
ColumnIdentifier id = entry.getKey();
CQL3Type pt = entry.getValue().prepare(keyspace());
if (pt.isCollection() && ((CollectionType) pt.getType()).isMultiCell())
{
if (definedMultiCellCollections == null)
definedMultiCellCollections = new HashMap<>();
definedMultiCellCollections.put(id.bytes, (CollectionType) pt.getType());
}
stmt.columns.put(id, pt.getType()); // we'll remove what is not a column below
}
if (keyAliases.isEmpty())
throw new InvalidRequestException("No PRIMARY KEY specifed (exactly one required)");
else if (keyAliases.size() > 1)
throw new InvalidRequestException("Multiple PRIMARY KEYs specifed (exactly one required)");
List<ColumnIdentifier> kAliases = keyAliases.get(0);
List<AbstractType<?>> keyTypes = new ArrayList<AbstractType<?>>(kAliases.size());
for (ColumnIdentifier alias : kAliases)
{
stmt.keyAliases.add(alias.bytes);
AbstractType<?> t = getTypeAndRemove(stmt.columns, alias);
if (t instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", alias));
if (staticColumns.contains(alias))
throw new InvalidRequestException(String.format("Static column %s cannot be part of the PRIMARY KEY", alias));
keyTypes.add(t);
}
stmt.keyValidator = keyTypes.size() == 1 ? keyTypes.get(0) : CompositeType.getInstance(keyTypes);
// Dense means that no part of the comparator stores a CQL column name. This means
// COMPACT STORAGE with at least one columnAliases (otherwise it's a thrift "static" CF).
stmt.isDense = useCompactStorage && !columnAliases.isEmpty();
// Handle column aliases
if (columnAliases.isEmpty())
{
if (useCompactStorage)
{
// There should remain some column definition since it is a non-composite "static" CF
if (stmt.columns.isEmpty())
throw new InvalidRequestException("No definition found that is not part of the PRIMARY KEY");
if (definedMultiCellCollections != null)
throw new InvalidRequestException("Non-frozen collection types are not supported with COMPACT STORAGE");
stmt.comparator = new SimpleSparseCellNameType(UTF8Type.instance);
}
else
{
stmt.comparator = definedMultiCellCollections == null
? new CompoundSparseCellNameType(Collections.<AbstractType<?>>emptyList())
: new CompoundSparseCellNameType.WithCollection(Collections.<AbstractType<?>>emptyList(), ColumnToCollectionType.getInstance(definedMultiCellCollections));
}
}
else
{
// If we use compact storage and have only one alias, it is a
// standard "dynamic" CF, otherwise it's a composite
if (useCompactStorage && columnAliases.size() == 1)
{
if (definedMultiCellCollections != null)
throw new InvalidRequestException("Collection types are not supported with COMPACT STORAGE");
ColumnIdentifier alias = columnAliases.get(0);
if (staticColumns.contains(alias))
throw new InvalidRequestException(String.format("Static column %s cannot be part of the PRIMARY KEY", alias));
stmt.columnAliases.add(alias.bytes);
AbstractType<?> at = getTypeAndRemove(stmt.columns, alias);
if (at instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", stmt.columnAliases.get(0)));
stmt.comparator = new SimpleDenseCellNameType(at);
}
else
{
List<AbstractType<?>> types = new ArrayList<AbstractType<?>>(columnAliases.size() + 1);
for (ColumnIdentifier t : columnAliases)
{
stmt.columnAliases.add(t.bytes);
AbstractType<?> type = getTypeAndRemove(stmt.columns, t);
if (type instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", t));
if (staticColumns.contains(t))
throw new InvalidRequestException(String.format("Static column %s cannot be part of the PRIMARY KEY", t));
types.add(type);
}
if (useCompactStorage)
{
if (definedMultiCellCollections != null)
throw new InvalidRequestException("Collection types are not supported with COMPACT STORAGE");
stmt.comparator = new CompoundDenseCellNameType(types);
}
else
{
stmt.comparator = definedMultiCellCollections == null
? new CompoundSparseCellNameType(types)
: new CompoundSparseCellNameType.WithCollection(types, ColumnToCollectionType.getInstance(definedMultiCellCollections));
}
}
}
if (!staticColumns.isEmpty())
{
// Only CQL3 tables can have static columns
if (useCompactStorage)
throw new InvalidRequestException("Static columns are not supported in COMPACT STORAGE tables");
// Static columns only make sense if we have at least one clustering column. Otherwise everything is static anyway
if (columnAliases.isEmpty())
throw new InvalidRequestException("Static columns are only useful (and thus allowed) if the table has at least one clustering column");
}
if (useCompactStorage && !stmt.columnAliases.isEmpty())
{
if (stmt.columns.isEmpty())
{
// The only value we'll insert will be the empty one, so the default validator don't matter
stmt.defaultValidator = BytesType.instance;
// We need to distinguish between
// * I'm upgrading from thrift so the valueAlias is null
// * I've defined my table with only a PK (and the column value will be empty)
// So, we use an empty valueAlias (rather than null) for the second case
stmt.valueAlias = ByteBufferUtil.EMPTY_BYTE_BUFFER;
}
else
{
if (stmt.columns.size() > 1)
throw new InvalidRequestException(String.format("COMPACT STORAGE with composite PRIMARY KEY allows no more than one column not part of the PRIMARY KEY (got: %s)", StringUtils.join(stmt.columns.keySet(), ", ")));
Map.Entry<ColumnIdentifier, AbstractType> lastEntry = stmt.columns.entrySet().iterator().next();
stmt.defaultValidator = lastEntry.getValue();
stmt.valueAlias = lastEntry.getKey().bytes;
stmt.columns.remove(lastEntry.getKey());
}
}
else
{
// For compact, we are in the "static" case, so we need at least one column defined. For non-compact however, having
// just the PK is fine since we have CQL3 row marker.
if (useCompactStorage && stmt.columns.isEmpty())
throw new InvalidRequestException("COMPACT STORAGE with non-composite PRIMARY KEY require one column not part of the PRIMARY KEY, none given");
// There is no way to insert/access a column that is not defined for non-compact storage, so
// the actual validator don't matter much (except that we want to recognize counter CF as limitation apply to them).
stmt.defaultValidator = !stmt.columns.isEmpty() && (stmt.columns.values().iterator().next() instanceof CounterColumnType)
? CounterColumnType.instance
: BytesType.instance;
}
// If we give a clustering order, we must explicitly do so for all aliases and in the order of the PK
if (!definedOrdering.isEmpty())
{
if (definedOrdering.size() > columnAliases.size())
throw new InvalidRequestException("Only clustering key columns can be defined in CLUSTERING ORDER directive");
int i = 0;
for (ColumnIdentifier id : definedOrdering.keySet())
{
ColumnIdentifier c = columnAliases.get(i);
if (!id.equals(c))
{
if (definedOrdering.containsKey(c))
throw new InvalidRequestException(String.format("The order of columns in the CLUSTERING ORDER directive must be the one of the clustering key (%s must appear before %s)", c, id));
else
throw new InvalidRequestException(String.format("Missing CLUSTERING ORDER for column %s", c));
}
++i;
}
}
return new ParsedStatement.Prepared(stmt);
}
private AbstractType<?> getTypeAndRemove(Map<ColumnIdentifier, AbstractType> columns, ColumnIdentifier t) throws InvalidRequestException
{
AbstractType type = columns.get(t);
if (type == null)
throw new InvalidRequestException(String.format("Unknown definition %s referenced in PRIMARY KEY", t));
if (type.isCollection() && type.isMultiCell())
throw new InvalidRequestException(String.format("Invalid collection type for PRIMARY KEY component %s", t));
columns.remove(t);
Boolean isReversed = definedOrdering.get(t);
return isReversed != null && isReversed ? ReversedType.getInstance(type) : type;
}
public void addDefinition(ColumnIdentifier def, CQL3Type.Raw type, boolean isStatic)
{
definedNames.add(def);
definitions.put(def, type);
if (isStatic)
staticColumns.add(def);
}
public void addKeyAliases(List<ColumnIdentifier> aliases)
{
keyAliases.add(aliases);
}
public void addColumnAlias(ColumnIdentifier alias)
{
columnAliases.add(alias);
}
public void setOrdering(ColumnIdentifier alias, boolean reversed)
{
definedOrdering.put(alias, reversed);
}
public void setCompactStorage()
{
useCompactStorage = true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.junit;
import java.util.Map;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
/**
* A JUnit Rule that embeds an ActiveMQ Artemis ClientProducer bound to a specific address into a test.
*
* This JUnit Rule is designed to simplify using ActiveMQ Artemis clients in unit tests. Adding the rule to a test will startup
* a ClientProducer, which can then be used to feed messages to the bound address on an ActiveMQ Artemis server.
*
* <pre><code>
* public class SimpleTest {
* @Rule
* public ActiveMQProducerResource producer = new ActiveMQProducerResource( "vm://0", "test.queue");
*
* @Test
* public void testSomething() throws Exception {
* // Use the embedded ClientProducer here
* producer.sendMessage( "String Body" );
* }
* }
* </code></pre>
*/
public class ActiveMQProducerResource extends AbstractActiveMQClientResource {
boolean useDurableMessage = true;
SimpleString address = null;
ClientProducer producer;
protected ActiveMQProducerResource(String url, String username, String password) {
super(url, username, password);
}
protected ActiveMQProducerResource(String url) {
super(url);
}
protected ActiveMQProducerResource(ServerLocator serverLocator, String username, String password) {
super(serverLocator, username, password);
}
protected ActiveMQProducerResource(ServerLocator serverLocator) {
super(serverLocator);
}
public ActiveMQProducerResource(String url, String address, String username, String password) {
this(url, SimpleString.toSimpleString(address), username, password);
}
public ActiveMQProducerResource(String url, String address) {
this(url, address, null, null);
}
public ActiveMQProducerResource(String url, SimpleString address, String username, String password) {
super(url, username, password);
if (address == null) {
throw new IllegalArgumentException(String.format("%s construction error - address cannot be null", this.getClass().getSimpleName()));
}
this.address = address;
}
public ActiveMQProducerResource(String url, SimpleString address) {
this(url, address, null, null);
}
public ActiveMQProducerResource(ServerLocator serverLocator, String address, String username, String password) {
this(serverLocator, SimpleString.toSimpleString(address), username, password);
}
public ActiveMQProducerResource(ServerLocator serverLocator, String address) {
this(serverLocator, SimpleString.toSimpleString(address));
}
public ActiveMQProducerResource(ServerLocator serverLocator, SimpleString address, String username, String password) {
super(serverLocator, username, password);
if (address == null) {
throw new IllegalArgumentException(String.format("%s construction error - address cannot be null", this.getClass().getSimpleName()));
}
this.address = address;
}
public ActiveMQProducerResource(ServerLocator serverLocator, SimpleString address) {
this(serverLocator, address, null, null);
}
public boolean isUseDurableMessage() {
return useDurableMessage;
}
/**
* Disables/Enables creating durable messages. By default, durable messages are created
*
* @param useDurableMessage if true, durable messages will be created
*/
public void setUseDurableMessage(boolean useDurableMessage) {
this.useDurableMessage = useDurableMessage;
}
@Override
protected void createClient() {
try {
if (!session.addressQuery(address).isExists() && autoCreateQueue) {
log.warn("{}: queue does not exist - creating queue: address = {}, name = {}", this.getClass().getSimpleName(), address.toString(), address.toString());
session.createQueue(address, address);
}
producer = session.createProducer(address);
} catch (ActiveMQException amqEx) {
throw new ActiveMQClientResourceException(String.format("Error creating producer for address %s", address.toString()), amqEx);
}
}
@Override
protected void stopClient() {
if (producer != null) {
try {
producer.close();
} catch (ActiveMQException amqEx) {
log.warn("ActiveMQException encountered closing InternalClient ClientProducer - ignoring", amqEx);
} finally {
producer = null;
}
}
}
/**
* Create a ClientMessage
* <p>
* If useDurableMessage is false, a non-durable message is created. Otherwise, a durable message is created
*
* @return a new ClientMessage
*/
public ClientMessage createMessage() {
if (session == null) {
throw new IllegalStateException("ClientSession is null");
}
return session.createMessage(isUseDurableMessage());
}
/**
* Create a ClientMessage with the specified body
* <p>
* If useDurableMessage is false, a non-durable message is created. Otherwise, a durable message is created
*
* @param body the body for the new message
* @return a new ClientMessage with the specified body
*/
public ClientMessage createMessage(byte[] body) {
ClientMessage message = createMessage();
if (body != null) {
message.writeBodyBufferBytes(body);
}
return message;
}
/**
* Create a ClientMessage with the specified body
* <p>
* If useDurableMessage is false, a non-durable message is created. Otherwise, a durable message is created
*
* @param body the body for the new message
* @return a new ClientMessage with the specified body
*/
public ClientMessage createMessage(String body) {
ClientMessage message = createMessage();
if (body != null) {
message.writeBodyBufferString(body);
}
return message;
}
/**
* Create a ClientMessage with the specified message properties
* <p>
* If useDurableMessage is false, a non-durable message is created. Otherwise, a durable message is created
*
* @param properties message properties for the new message
* @return a new ClientMessage with the specified message properties
*/
public ClientMessage createMessage(Map<String, Object> properties) {
ClientMessage message = createMessage();
addMessageProperties(message, properties);
return message;
}
/**
* Create a ClientMessage with the specified body and message properties
* <p>
* If useDurableMessage is false, a non-durable message is created. Otherwise, a durable message is created
*
* @param body the body for the new message
* @param properties message properties for the new message
* @return a new ClientMessage with the specified body and message properties
*/
public ClientMessage createMessage(byte[] body, Map<String, Object> properties) {
ClientMessage message = createMessage(body);
addMessageProperties(message, properties);
return message;
}
/**
* Create a ClientMessage with the specified body and message properties
* <p>
* If useDurableMessage is false, a non-durable message is created. Otherwise, a durable message is created
*
* @param body the body for the new message
* @param properties message properties for the new message
* @return a new ClientMessage with the specified body and message properties
*/
public ClientMessage createMessage(String body, Map<String, Object> properties) {
ClientMessage message = createMessage(body);
addMessageProperties(message, properties);
return message;
}
/**
* Send a ClientMessage to the server
*
* @param message the message to send
*/
public void sendMessage(ClientMessage message) {
try {
producer.send(message);
} catch (ActiveMQException amqEx) {
throw new ActiveMQClientResourceException(String.format("Failed to send message to %s", producer.getAddress().toString()), amqEx);
}
}
/**
* Create a new ClientMessage with the specified body and send to the server
*
* @param body the body for the new message
* @return the message that was sent
*/
public ClientMessage sendMessage(byte[] body) {
ClientMessage message = createMessage(body);
sendMessage(message);
return message;
}
/**
* Create a new ClientMessage with the specified body and send to the server
*
* @param body the body for the new message
* @return the message that was sent
*/
public ClientMessage sendMessage(String body) {
ClientMessage message = createMessage(body);
sendMessage(message);
return message;
}
/**
* Create a new ClientMessage with the specified properties and send to the server
*
* @param properties the properties for the new message
* @return the message that was sent
*/
public ClientMessage sendMessage(Map<String, Object> properties) {
ClientMessage message = createMessage(properties);
sendMessage(message);
return message;
}
/**
* Create a new ClientMessage with the specified body and and properties and send to the server
*
* @param properties the properties for the new message
* @return the message that was sent
*/
public ClientMessage sendMessage(byte[] body, Map<String, Object> properties) {
ClientMessage message = createMessage(body);
sendMessage(message);
return message;
}
/**
* Create a new ClientMessage with the specified body and and properties and send to the server
*
* @param properties the properties for the new message
* @return the message that was sent
*/
public ClientMessage sendMessage(String body, Map<String, Object> properties) {
ClientMessage message = createMessage(body);
sendMessage(message);
return message;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.diff.impl.incrementalMerge;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.DiffBundle;
import com.intellij.openapi.diff.impl.highlighting.FragmentSide;
import com.intellij.openapi.diff.impl.util.DocumentUtil;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Comparator;
/**
* Represents a change in diff or merge view.
* A change has two {@link com.intellij.openapi.diff.impl.incrementalMerge.Change.SimpleChangeSide sides} (left and right), each of them representing the text which has been changed and the original text
* shown in the diff/merge.
* Change can be applied, then its sides would be equal.
*/
public abstract class Change {
private static final Logger LOG = Logger.getInstance(Change.class);
public abstract ChangeSide getChangeSide(FragmentSide side);
public abstract ChangeType getType();
public abstract ChangeList getChangeList();
protected abstract void removeFromList();
/**
* Called when a change has been applied.
*/
public abstract void onApplied();
/**
* Called when a change has been removed from the list.
*/
public abstract void onRemovedFromList();
public abstract boolean isValid();
/**
* Apply the change, i.e. change the "Merge result" document and update range markers, highlighting, gutters, etc.
* @param original The source side of the change, which is being applied.
*/
private void apply(@NotNull FragmentSide original) {
FragmentSide targetSide = original.otherSide();
RangeMarker originalRangeMarker = getRangeMarker(original);
RangeMarker rangeMarker = getRangeMarker(targetSide);
TextRange textRange = modifyDocument(getProject(), originalRangeMarker, rangeMarker);
if (textRange != null && isValid()) {
updateTargetRangeMarker(targetSide, textRange);
}
onApplied();
}
/**
* Updates the target marker of a change after the change has been applied
* to allow highlighting of the document modification which has been performed.
* @param targetFragmentSide The side to be changed.
* @param updatedTextRange New text range to be applied to the side.
*/
protected final void updateTargetRangeMarker(@NotNull FragmentSide targetFragmentSide, @NotNull TextRange updatedTextRange) {
ChangeSide targetSide = getChangeSide(targetFragmentSide);
DiffRangeMarker originalRange = targetSide.getRange();
DiffRangeMarker updatedRange = new DiffRangeMarker(originalRange.getDocument(), updatedTextRange, null);
changeSide(targetSide, updatedRange);
}
/**
* Substitutes the specified side of this change to a new side which contains the given range.
* @param sideToChange The side to be changed.
* @param newRange New text range of the new side.
*/
protected abstract void changeSide(ChangeSide sideToChange, DiffRangeMarker newRange);
/**
* Applies the text from the original marker to the target marker.
* @return the resulting TextRange from the target document, or null if the document if not writable.
*/
@Nullable
private static TextRange modifyDocument(@Nullable Project project, @NotNull RangeMarker original, @NotNull RangeMarker target) {
Document document = target.getDocument();
if (project != null && !ReadonlyStatusHandler.ensureDocumentWritable(project, document)) {
return null;
}
if (DocumentUtil.isEmpty(original)) {
int offset = target.getStartOffset();
document.deleteString(offset, target.getEndOffset());
}
CharSequence text = original.getDocument().getImmutableCharSequence().subSequence(original.getStartOffset(), original.getEndOffset());
int startOffset = target.getStartOffset();
if (DocumentUtil.isEmpty(target)) {
document.insertString(startOffset, text);
} else {
document.replaceString(startOffset, target.getEndOffset(), text);
}
return new TextRange(startOffset, startOffset + text.length());
}
public void addMarkup(Editor[] editors) {
LOG.assertTrue(editors.length == 2);
highlight(editors, FragmentSide.SIDE1);
highlight(editors, FragmentSide.SIDE2);
}
private void highlight(Editor[] editors, FragmentSide side) {
getHighlighterHolder(side).highlight(getChangeSide(side), editors[side.getIndex()], getType());
}
private void updateHighlighter(FragmentSide side) {
getHighlighterHolder(side).updateHighlighter(getChangeSide(side), getType());
}
@Nullable
private Project getProject() {
return getChangeList().getProject();
}
@NotNull
private ChangeHighlighterHolder getHighlighterHolder(FragmentSide side) {
return getChangeSide(side).getHighlighterHolder();
}
@NotNull
private RangeMarker getRangeMarker(FragmentSide side) {
ChangeSide changeSide = getChangeSide(side);
LOG.assertTrue(changeSide != null);
return changeSide.getRange();
}
public static void apply(final Change change, final FragmentSide fromSide) {
ApplicationManager.getApplication().runWriteAction(() -> CommandProcessor.getInstance().executeCommand(change.getProject(), () -> doApply(change, fromSide), null, DiffBundle.message("save.merge.result.command.name")));
}
public static void doApply(final Change change, final FragmentSide fromSide) {
change.apply(fromSide);
}
public void updateMarkup() {
updateHighlighter(FragmentSide.SIDE1);
updateHighlighter(FragmentSide.SIDE2);
}
public boolean canHasActions(FragmentSide fromSide) {
FragmentSide targetSide = fromSide.otherSide();
Document targetDocument = getChangeList().getDocument(targetSide);
if (!targetDocument.isWritable()) return false;
Editor targetEditor = getHighlighterHolder(targetSide).getEditor();
return !targetEditor.isViewer();
}
public static class ChangeOrder implements Comparator<Change> {
private final FragmentSide myMainSide;
public ChangeOrder(FragmentSide mainSide) {
myMainSide = mainSide;
}
@Override
public int compare(@NotNull Change change, @NotNull Change change1) {
int result1 = compareSide(change, change1, myMainSide);
if (result1 != 0) return result1;
return compareSide(change, change1, myMainSide.otherSide());
}
private static int compareSide(Change change, Change change1, FragmentSide side) {
return RangeMarker.BY_START_OFFSET.compare(change.getRangeMarker(side), change1.getRangeMarker(side));
}
}
protected static class SimpleChangeSide extends ChangeSide {
private final FragmentSide mySide;
private final DiffRangeMarker myRange;
private final ChangeHighlighterHolder myHighlighterHolder;
public SimpleChangeSide(FragmentSide side, DiffRangeMarker rangeMarker) {
mySide = side;
myRange = rangeMarker;
myHighlighterHolder = new ChangeHighlighterHolder();
}
public SimpleChangeSide(@NotNull ChangeSide originalSide, @NotNull DiffRangeMarker newRange) {
mySide = ((SimpleChangeSide)originalSide).getFragmentSide();
myRange = newRange;
myHighlighterHolder = originalSide.getHighlighterHolder();
}
@NotNull
public FragmentSide getFragmentSide() {
return mySide;
}
@Override
@NotNull
public DiffRangeMarker getRange() {
return myRange;
}
@NotNull
@Override
public ChangeHighlighterHolder getHighlighterHolder() {
return myHighlighterHolder;
}
}
}
| |
/*
* Copyright Kay Stenschke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kstenschke.shifter.models.shiftable_types;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.ui.popup.PopupChooserBuilder;
import com.intellij.ui.components.JBList;
import com.kstenschke.shifter.models.ActionContainer;
import com.kstenschke.shifter.resources.StaticTexts;
import com.kstenschke.shifter.utils.UtilsFile;
import com.kstenschke.shifter.utils.UtilsTextual;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.apache.commons.lang.StringUtils.trim;
/**
* Included comment shiftable_types:
*
* 1. Single-line comment => // ...
* 2. Block comment => /* ... *\/
* 3. HTML comment => <!-- ... -->
*/
public class Comment {
public static final String ACTION_TEXT = "Shift Comment";
/**
* @param str String to be shifted currently
* @return boolean
*/
public static boolean isComment(String str) {
if (null == str) {
return false;
}
str = str.trim();
return str.startsWith("//")
? !str.contains("\n") || isMultipleSingleLineComments(str)
: isBlockComment(str);
}
public static boolean isBlockComment(String str) {
return isBlockComment(str, false, false);
}
static boolean isBlockComment(String str, boolean allowDocBlockComment, boolean commentSignsNeedSpaces) {
if (null == str) {
return false;
}
str = str.trim();
String innerWrap = commentSignsNeedSpaces ? " " : "";
boolean isBlockComment = str.startsWith("/*" + innerWrap) && str.endsWith(innerWrap + "*/")
&& str.indexOf("/*") != str.length() - 3;
return allowDocBlockComment
? isBlockComment || (str.startsWith("/**" + innerWrap) && str.endsWith(innerWrap + "*/"))
: isBlockComment;
}
public static boolean isMultipleSingleLineComments(String str) {
if (null == str) {
return false;
}
if (!str.contains("\n")) {
return false;
}
String[] lines = str.split("\n");
for (String line : lines) {
if (!trim(line).startsWith("//")) {
return false;
}
}
return true;
}
public static boolean isPhpBlockComment(String str) {
if (null == str) {
return false;
}
str = str.trim();
return (str.startsWith("<? /*") || str.startsWith("<?php /*"))
&& str.endsWith("*/ ?>")
&& str.indexOf("/*") != str.lastIndexOf("*/") - 1;
}
public static boolean isHtmlComment(String str) {
if (null == str) {
return false;
}
str = str.trim();
return str.startsWith("<!--") && str.endsWith("-->")
&& str.indexOf("<!--") != str.length() -5;
}
public static String getShifted(ActionContainer actionContainer) {
if (null != actionContainer.filename && UtilsFile.isPhpFile(actionContainer.filename) && isPhpBlockComment(actionContainer.selectedText)) {
// PHP Block-comment inside PHP or PHTML: convert to HTML comment
return "<!-- " + actionContainer.selectedText.substring(8, actionContainer.selectedText.length() - 5).trim() + " -->";
}
// Default comment shifting: toggle among single-line and block-comment style
String str = actionContainer.selectedText.trim();
if (str.startsWith("//")) {
if (!str.endsWith(" ")) {
str += " ";
}
// Convert single line comment to block comment
return "/*" + str.substring(2) + "*/";
}
str = str.substring(2, str.length() - 2);
// This is a single-lined block comment, otherwise shiftMultiLineBlockCommentInDocument() is called
// Convert block- to single line comment
if (str.contains("\n")) {
return "//" + str.replace("\n", " ");
}
return "//" + (str.startsWith("* ")
// Convert a single-lined block-comment in DOC format to "// ..." and not "//* ..."
? str.substring(1)
: str);
}
public static String getPhpBlockCommentFromHtmlComment(String str) {
if (null == str) {
return "<?php /* */ ?>";
}
int length = str.length();
return length > 3
? "<?php /* " + str.substring(4, length - 3).trim() + " */ ?>"
: "<?php /* " + str.trim() + " */ ?>";
}
/**
* Shift multi-lined block comment into single line comment(s)
* Show popup and perform selected shifting mode: join lines into 1 or convert into multiple single line comments
*
* @param actionContainer
*/
public static void shiftMultiLineBlockCommentInDocument(final ActionContainer actionContainer) {
List<String> shiftOptions = new ArrayList<>();
shiftOptions.add(StaticTexts.SHIFT_MULTILINE_BLOCK_COMMENT_TO_ONE_SINGLE_COMMENT);
shiftOptions.add(StaticTexts.SHIFT_MULTILINE_BLOCK_COMMENT_TO_MULTIPLE_SINGLE_COMMENTS);
final Object[] options = shiftOptions.toArray(new String[0]);
final JBList modes = new JBList(options);
PopupChooserBuilder popup = new PopupChooserBuilder(modes);
popup.setTitle(StaticTexts.POPUP_TITLE_SHIFT).setItemChoosenCallback(
() -> ApplicationManager.getApplication().runWriteAction(() -> {
// Callback when item chosen
CommandProcessor.getInstance().executeCommand(actionContainer.project, () -> {
final int index = modes.getSelectedIndex();
final String shiftedBlockCommentLines = 0 == index
? shiftMultipleBlockCommentLines(actionContainer.selectedText, true)
: shiftMultipleBlockCommentLines(actionContainer.selectedText, false);
actionContainer.writeUndoable(
actionContainer.getRunnableReplaceSelection(shiftedBlockCommentLines),
ACTION_TEXT);
},
null, null);
})).setMovable(true).createPopup().showCenteredInCurrentWindow(actionContainer.project);
}
public static void shiftMultipleSingleLineCommentsInDocument(final ActionContainer actionContainer) {
List<String> shiftOptions = new ArrayList<>();
shiftOptions.add(StaticTexts.SHIFT_MULTIPLE_LINE_COMMENTS_MERGE);
shiftOptions.add(StaticTexts.SHIFT_MULTIPLE_LINE_COMMENTS_TO_BLOCK_COMMENT);
shiftOptions.add(StaticTexts.SHIFT_MULTIPLE_LINE_SORT_ASCENDING);
shiftOptions.add(StaticTexts.SHIFT_MULTIPLE_LINE_SORT_DESCENDING);
final Object[] options = shiftOptions.toArray(new String[0]);
final JBList modes = new JBList(options);
//PopupChooserBuilder popup = JBPopupFactory.getInstance().createListPopupBuilder(modes);
PopupChooserBuilder popup = new PopupChooserBuilder(modes);
popup.setTitle(StaticTexts.POPUP_TITLE_SHIFT).setItemChoosenCallback(() -> ApplicationManager.getApplication().runWriteAction(() -> {
// Callback when item chosen
CommandProcessor.getInstance().executeCommand(actionContainer.project, () -> {
final int index = modes.getSelectedIndex();
String shifted;
switch (index) {
case 0:
shifted = mergeMultipleLineComments(actionContainer.selectedText);
break;
case 1:
shifted = convertMultipleLineCommentsToBlockComment(actionContainer.selectedText);
break;
case 2:
shifted = sortLineComments(actionContainer.selectedText, false);
break;
case 3:
default:
shifted = sortLineComments(actionContainer.selectedText, true);
break;
}
actionContainer.writeUndoable(actionContainer.getRunnableReplaceSelection(shifted), ACTION_TEXT);
},
null, null);
})).setMovable(true).createPopup().showCenteredInCurrentWindow(actionContainer.project);
}
private static String shiftMultipleBlockCommentLines(String str, boolean merge) {
str = trim(str).substring(2);
String[] lines = str.split("\n");
int index = 0;
StringBuilder result = new StringBuilder("//");
for (String line : lines) {
line = trim(line);
if (line.startsWith("* ")) {
line = line.substring(2);
}
line = trim(line);
if (0 == index && line.startsWith("*")) {
line = trim(line.substring(1));
}
if (!line.isEmpty()) {
result.append(merge
? " " + line
: (0 == index ? "" : "\n") + "// " + line);
}
index++;
}
// Remove trailing "*/"
result = new StringBuilder(result.substring(0, result.length() - 2));
if (!merge) {
// Remove empty comment lines
result = new StringBuilder(result.toString().replace("\n//\n", "\n"));
if (result.toString().startsWith("//\n")) {
result = new StringBuilder(result.substring(3));
}
if (result.toString().endsWith("\n// ")) {
result = new StringBuilder(result.substring(0, result.length() - 4));
}
}
return result.toString();
}
private static String convertMultipleLineCommentsToBlockComment(String str) {
String[] lines = str.split("\n");
StringBuilder result = new StringBuilder();
int index = 0;
for (String line : lines) {
result.append(0 == index ? "" : "\n").append(" * ").append(trim(trim(line).substring(2)));
index++;
}
return "/**\n" + result + "\n */";
}
private static String mergeMultipleLineComments(String str) {
String[] lines = str.split("\n");
StringBuilder result = new StringBuilder();
int index = 0;
for (String line : lines) {
result.append(0 == index ? "" : " ").append(trim(trim(line).substring(2)));
index++;
}
return "// " + result;
}
private static String sortLineComments(String str, boolean reverse) {
List<String> lines = Arrays.asList(str.split("\n"));
UtilsTextual.sortLinesNatural(lines, reverse);
StringBuilder result = new StringBuilder();
int index = 0;
for (String line : lines) {
result.append(0 == index ? "" : "\n").append(line);
index++;
}
return result.toString();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.AppTopics;
import com.intellij.CommonBundle;
import com.intellij.codeStyle.CodeStyleFacade;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.application.TransactionGuardImpl;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.PrioritizedDocumentListener;
import com.intellij.openapi.editor.impl.EditorFactoryImpl;
import com.intellij.openapi.editor.impl.TrailingSpacesStripper;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.impl.text.TextEditorImpl;
import com.intellij.openapi.fileTypes.BinaryFileTypeDecompilers;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.UnknownFileType;
import com.intellij.openapi.project.*;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem;
import com.intellij.pom.core.impl.PomModelImpl;
import com.intellij.psi.ExternalChangeAction;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SingleRootFileViewProvider;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.ui.UIBundle;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.nio.charset.Charset;
import java.util.*;
import java.util.List;
public class FileDocumentManagerImpl extends FileDocumentManager implements VirtualFileListener, VetoableProjectManagerListener, SafeWriteRequestor {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl");
public static final Key<Document> HARD_REF_TO_DOCUMENT_KEY = Key.create("HARD_REF_TO_DOCUMENT_KEY");
private static final Key<String> LINE_SEPARATOR_KEY = Key.create("LINE_SEPARATOR_KEY");
private static final Key<VirtualFile> FILE_KEY = Key.create("FILE_KEY");
private static final Key<Boolean> MUST_RECOMPUTE_FILE_TYPE = Key.create("Must recompute file type");
private final Set<Document> myUnsavedDocuments = ContainerUtil.newConcurrentSet();
private final MessageBus myBus;
private static final Object lock = new Object();
private final FileDocumentManagerListener myMultiCaster;
private final TrailingSpacesStripper myTrailingSpacesStripper = new TrailingSpacesStripper();
private boolean myOnClose;
private volatile MemoryDiskConflictResolver myConflictResolver = new MemoryDiskConflictResolver();
private final PrioritizedDocumentListener myPhysicalDocumentChangeTracker = new PrioritizedDocumentListener() {
@Override
public int getPriority() {
return Integer.MIN_VALUE;
}
@Override
public void documentChanged(DocumentEvent e) {
final Document document = e.getDocument();
if (!ApplicationManager.getApplication().hasWriteAction(ExternalChangeAction.ExternalDocumentChange.class)) {
myUnsavedDocuments.add(document);
}
final Runnable currentCommand = CommandProcessor.getInstance().getCurrentCommand();
Project project = currentCommand == null ? null : CommandProcessor.getInstance().getCurrentCommandProject();
if (project == null)
project = ProjectUtil.guessProjectForFile(getFile(document));
String lineSeparator = CodeStyleFacade.getInstance(project).getLineSeparator();
document.putUserData(LINE_SEPARATOR_KEY, lineSeparator);
// avoid documents piling up during batch processing
if (areTooManyDocumentsInTheQueue(myUnsavedDocuments)) {
saveAllDocumentsLater();
}
}
};
public FileDocumentManagerImpl(@NotNull VirtualFileManager virtualFileManager, @NotNull ProjectManager projectManager) {
virtualFileManager.addVirtualFileListener(this);
projectManager.addProjectManagerListener(this);
myBus = ApplicationManager.getApplication().getMessageBus();
myBus.connect().subscribe(ProjectManager.TOPIC, this);
InvocationHandler handler = (proxy, method, args) -> {
multiCast(method, args);
return null;
};
final ClassLoader loader = FileDocumentManagerListener.class.getClassLoader();
myMultiCaster = (FileDocumentManagerListener)Proxy.newProxyInstance(loader, new Class[]{FileDocumentManagerListener.class}, handler);
}
private static void unwrapAndRethrow(Exception e) {
Throwable unwrapped = e;
if (e instanceof InvocationTargetException) {
unwrapped = e.getCause() == null ? e : e.getCause();
}
ExceptionUtil.rethrowUnchecked(unwrapped);
LOG.error(unwrapped);
}
@SuppressWarnings("OverlyBroadCatchBlock")
private void multiCast(@NotNull Method method, Object[] args) {
try {
method.invoke(myBus.syncPublisher(AppTopics.FILE_DOCUMENT_SYNC), args);
}
catch (ClassCastException e) {
LOG.error("Arguments: "+ Arrays.toString(args), e);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
// Allows pre-save document modification
for (FileDocumentManagerListener listener : getListeners()) {
try {
method.invoke(listener, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
// stripping trailing spaces
try {
method.invoke(myTrailingSpacesStripper, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
@Override
@Nullable
public Document getDocument(@NotNull final VirtualFile file) {
ApplicationManager.getApplication().assertReadAccessAllowed();
DocumentEx document = (DocumentEx)getCachedDocument(file);
if (document == null) {
if (!file.isValid() || file.isDirectory() || isBinaryWithoutDecompiler(file)) return null;
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
if (file.getFileType().isBinary() && tooLarge) return null;
final CharSequence text = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
synchronized (lock) {
document = (DocumentEx)getCachedDocument(file);
if (document != null) return document; // Double checking
document = (DocumentEx)createDocument(text, file);
document.setModificationStamp(file.getModificationStamp());
final FileType fileType = file.getFileType();
document.setReadOnly(tooLarge || !file.isWritable() || fileType.isBinary());
if (!(file instanceof LightVirtualFile || file.getFileSystem() instanceof NonPhysicalFileSystem)) {
document.addDocumentListener(myPhysicalDocumentChangeTracker);
}
if (file instanceof LightVirtualFile) {
registerDocument(document, file);
}
else {
document.putUserData(FILE_KEY, file);
cacheDocument(file, document);
}
}
myMultiCaster.fileContentLoaded(file, document);
}
return document;
}
public static boolean areTooManyDocumentsInTheQueue(Collection<Document> documents) {
if (documents.size() > 100) return true;
int totalSize = 0;
for (Document document : documents) {
totalSize += document.getTextLength();
if (totalSize > FileUtilRt.LARGE_FOR_CONTENT_LOADING) return true;
}
return false;
}
private static Document createDocument(final CharSequence text, VirtualFile file) {
boolean acceptSlashR = file instanceof LightVirtualFile && StringUtil.indexOf(text, '\r') >= 0;
boolean freeThreaded = Boolean.TRUE.equals(file.getUserData(SingleRootFileViewProvider.FREE_THREADED));
return ((EditorFactoryImpl)EditorFactory.getInstance()).createDocument(text, acceptSlashR, freeThreaded);
}
@Override
@Nullable
public Document getCachedDocument(@NotNull VirtualFile file) {
Document hard = file.getUserData(HARD_REF_TO_DOCUMENT_KEY);
return hard != null ? hard : getDocumentFromCache(file);
}
public static void registerDocument(@NotNull final Document document, @NotNull VirtualFile virtualFile) {
synchronized (lock) {
document.putUserData(FILE_KEY, virtualFile);
virtualFile.putUserData(HARD_REF_TO_DOCUMENT_KEY, document);
}
}
@Override
@Nullable
public VirtualFile getFile(@NotNull Document document) {
return document.getUserData(FILE_KEY);
}
@TestOnly
public void dropAllUnsavedDocuments() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
throw new RuntimeException("This method is only for test mode!");
}
ApplicationManager.getApplication().assertWriteAccessAllowed();
if (!myUnsavedDocuments.isEmpty()) {
myUnsavedDocuments.clear();
fireUnsavedDocumentsDropped();
}
}
private void saveAllDocumentsLater() {
// later because some document might have been blocked by PSI right now
ApplicationManager.getApplication().invokeLater(() -> {
if (ApplicationManager.getApplication().isDisposed()) {
return;
}
final Document[] unsavedDocuments = getUnsavedDocuments();
for (Document document : unsavedDocuments) {
VirtualFile file = getFile(document);
if (file == null) continue;
Project project = ProjectUtil.guessProjectForFile(file);
if (project == null) continue;
if (PsiDocumentManager.getInstance(project).isDocumentBlockedByPsi(document)) continue;
saveDocument(document);
}
});
}
@Override
public void saveAllDocuments() {
saveAllDocuments(true);
}
/**
* @param isExplicit caused by user directly (Save action) or indirectly (e.g. Compile)
*/
public void saveAllDocuments(boolean isExplicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
myMultiCaster.beforeAllDocumentsSaving();
if (myUnsavedDocuments.isEmpty()) return;
final Map<Document, IOException> failedToSave = new HashMap<>();
final Set<Document> vetoed = new HashSet<>();
while (true) {
int count = 0;
for (Document document : myUnsavedDocuments) {
if (failedToSave.containsKey(document)) continue;
if (vetoed.contains(document)) continue;
try {
doSaveDocument(document, isExplicit);
}
catch (IOException e) {
//noinspection ThrowableResultOfMethodCallIgnored
failedToSave.put(document, e);
}
catch (SaveVetoException e) {
vetoed.add(document);
}
count++;
}
if (count == 0) break;
}
if (!failedToSave.isEmpty()) {
handleErrorsOnSave(failedToSave);
}
}
@Override
public void saveDocument(@NotNull final Document document) {
saveDocument(document, true);
}
public void saveDocument(@NotNull final Document document, final boolean explicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
if (!myUnsavedDocuments.contains(document)) return;
try {
doSaveDocument(document, explicit);
}
catch (IOException e) {
handleErrorsOnSave(Collections.singletonMap(document, e));
}
catch (SaveVetoException ignored) {
}
}
@Override
public void saveDocumentAsIs(@NotNull Document document) {
VirtualFile file = getFile(document);
boolean spaceStrippingEnabled = true;
if (file != null) {
spaceStrippingEnabled = TrailingSpacesStripper.isEnabled(file);
TrailingSpacesStripper.setEnabled(file, false);
}
try {
saveDocument(document);
}
finally {
if (file != null) {
TrailingSpacesStripper.setEnabled(file, spaceStrippingEnabled);
}
}
}
private static class SaveVetoException extends Exception {}
private void doSaveDocument(@NotNull final Document document, boolean isExplicit) throws IOException, SaveVetoException {
VirtualFile file = getFile(document);
if (file == null || file instanceof LightVirtualFile || file.isValid() && !isFileModified(file)) {
removeFromUnsaved(document);
return;
}
if (file.isValid() && needsRefresh(file)) {
file.refresh(false, false);
if (!myUnsavedDocuments.contains(document)) return;
}
if (!maySaveDocument(file, document, isExplicit)) {
throw new SaveVetoException();
}
WriteAction.run(() -> doSaveDocumentInWriteAction(document, file));
}
private boolean maySaveDocument(VirtualFile file, Document document, boolean isExplicit) {
return !myConflictResolver.hasConflict(file) &&
Arrays.stream(Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)).allMatch(vetoer -> vetoer.maySaveDocument(document, isExplicit));
}
private void doSaveDocumentInWriteAction(@NotNull final Document document, @NotNull final VirtualFile file) throws IOException {
if (!file.isValid()) {
removeFromUnsaved(document);
return;
}
if (!file.equals(getFile(document))) {
registerDocument(document, file);
}
boolean saveNeeded = false;
IOException ioException = null;
try {
saveNeeded = isSaveNeeded(document, file);
}
catch (IOException e) {
// in case of corrupted VFS try to stay consistent
ioException = e;
}
if (!saveNeeded) {
if (document instanceof DocumentEx) {
((DocumentEx)document).setModificationStamp(file.getModificationStamp());
}
removeFromUnsaved(document);
updateModifiedProperty(file);
if (ioException != null) throw ioException;
return;
}
PomModelImpl.guardPsiModificationsIn(() -> {
myMultiCaster.beforeDocumentSaving(document);
LOG.assertTrue(file.isValid());
String text = document.getText();
String lineSeparator = getLineSeparator(document, file);
if (!lineSeparator.equals("\n")) {
text = StringUtil.convertLineSeparators(text, lineSeparator);
}
Project project = ProjectLocator.getInstance().guessProjectForFile(file);
LoadTextUtil.write(project, file, this, text, document.getModificationStamp());
myUnsavedDocuments.remove(document);
LOG.assertTrue(!myUnsavedDocuments.contains(document));
myTrailingSpacesStripper.clearLineModificationFlags(document);
});
}
private static void updateModifiedProperty(@NotNull VirtualFile file) {
for (Project project : ProjectManager.getInstance().getOpenProjects()) {
FileEditorManager fileEditorManager = FileEditorManager.getInstance(project);
for (FileEditor editor : fileEditorManager.getAllEditors(file)) {
if (editor instanceof TextEditorImpl) {
((TextEditorImpl)editor).updateModifiedProperty();
}
}
}
}
private void removeFromUnsaved(@NotNull Document document) {
myUnsavedDocuments.remove(document);
fireUnsavedDocumentsDropped();
LOG.assertTrue(!myUnsavedDocuments.contains(document));
}
private static boolean isSaveNeeded(@NotNull Document document, @NotNull VirtualFile file) throws IOException {
if (file.getFileType().isBinary() || document.getTextLength() > 1000 * 1000) { // don't compare if the file is too big
return true;
}
byte[] bytes = file.contentsToByteArray();
CharSequence loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, file, false, false);
return !Comparing.equal(document.getCharsSequence(), loaded);
}
private static boolean needsRefresh(final VirtualFile file) {
final VirtualFileSystem fs = file.getFileSystem();
return fs instanceof NewVirtualFileSystem && file.getTimeStamp() != ((NewVirtualFileSystem)fs).getTimeStamp(file);
}
@NotNull
public static String getLineSeparator(@NotNull Document document, @NotNull VirtualFile file) {
String lineSeparator = LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
lineSeparator = document.getUserData(LINE_SEPARATOR_KEY);
assert lineSeparator != null : document;
}
return lineSeparator;
}
@Override
@NotNull
public String getLineSeparator(@Nullable VirtualFile file, @Nullable Project project) {
String lineSeparator = file == null ? null : LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
CodeStyleFacade settingsManager = project == null
? CodeStyleFacade.getInstance()
: CodeStyleFacade.getInstance(project);
lineSeparator = settingsManager.getLineSeparator();
}
return lineSeparator;
}
@Override
public boolean requestWriting(@NotNull Document document, Project project) {
final VirtualFile file = getInstance().getFile(document);
if (project != null && file != null && file.isValid()) {
return !file.getFileType().isBinary() && ReadonlyStatusHandler.ensureFilesWritable(project, file);
}
if (document.isWritable()) {
return true;
}
document.fireReadOnlyModificationAttempt();
return false;
}
@Override
public void reloadFiles(@NotNull final VirtualFile... files) {
for (VirtualFile file : files) {
if (file.exists()) {
final Document doc = getCachedDocument(file);
if (doc != null) {
reloadFromDisk(doc);
}
}
}
}
@Override
@NotNull
public Document[] getUnsavedDocuments() {
if (myUnsavedDocuments.isEmpty()) {
return Document.EMPTY_ARRAY;
}
List<Document> list = new ArrayList<>(myUnsavedDocuments);
return list.toArray(new Document[list.size()]);
}
@Override
public boolean isDocumentUnsaved(@NotNull Document document) {
return myUnsavedDocuments.contains(document);
}
@Override
public boolean isFileModified(@NotNull VirtualFile file) {
final Document doc = getCachedDocument(file);
return doc != null && isDocumentUnsaved(doc) && doc.getModificationStamp() != file.getModificationStamp();
}
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
final VirtualFile file = event.getFile();
if (VirtualFile.PROP_WRITABLE.equals(event.getPropertyName())) {
final Document document = getCachedDocument(file);
if (document != null) {
ApplicationManager.getApplication().runWriteAction((ExternalChangeAction)() -> document.setReadOnly(!file.isWritable()));
}
}
else if (VirtualFile.PROP_NAME.equals(event.getPropertyName())) {
Document document = getCachedDocument(file);
if (document != null) {
// a file is linked to a document - chances are it is an "unknown text file" now
if (isBinaryWithoutDecompiler(file)) {
unbindFileFromDocument(file, document);
}
}
}
}
private void unbindFileFromDocument(@NotNull VirtualFile file, @NotNull Document document) {
removeDocumentFromCache(file);
file.putUserData(HARD_REF_TO_DOCUMENT_KEY, null);
document.putUserData(FILE_KEY, null);
}
private static boolean isBinaryWithDecompiler(@NotNull VirtualFile file) {
final FileType ft = file.getFileType();
return ft.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(ft) != null;
}
private static boolean isBinaryWithoutDecompiler(@NotNull VirtualFile file) {
final FileType fileType = file.getFileType();
return fileType.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(fileType) == null;
}
@Override
public void contentsChanged(@NotNull VirtualFileEvent event) {
if (event.isFromSave()) return;
final VirtualFile file = event.getFile();
final Document document = getCachedDocument(file);
if (document == null) {
myMultiCaster.fileWithNoDocumentChanged(file);
return;
}
if (isBinaryWithDecompiler(file)) {
myMultiCaster.fileWithNoDocumentChanged(file); // This will generate PSI event at FileManagerImpl
}
if (document.getModificationStamp() == event.getOldModificationStamp() || !isDocumentUnsaved(document)) {
reloadFromDisk(document);
}
}
@Override
public void reloadFromDisk(@NotNull final Document document) {
ApplicationManager.getApplication().assertIsDispatchThread();
final VirtualFile file = getFile(document);
assert file != null;
if (!fireBeforeFileContentReload(file, document)) {
return;
}
final Project project = ProjectLocator.getInstance().guessProjectForFile(file);
boolean[] isReloadable = {isReloadable(file, document, project)};
if (isReloadable[0]) {
CommandProcessor.getInstance().executeCommand(project, () -> ApplicationManager.getApplication().runWriteAction(
new ExternalChangeAction.ExternalDocumentChange(document, project) {
@Override
public void run() {
if (!isBinaryWithoutDecompiler(file)) {
LoadTextUtil.setCharsetWasDetectedFromBytes(file, null);
file.setBOM(null); // reset BOM in case we had one and the external change stripped it away
file.setCharset(null, null, false);
boolean wasWritable = document.isWritable();
document.setReadOnly(false);
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
CharSequence reloaded = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
isReloadable[0] = isReloadable(file, document, project);
if (isReloadable[0]) {
DocumentEx documentEx = (DocumentEx)document;
documentEx.replaceText(reloaded, file.getModificationStamp());
}
document.setReadOnly(!wasWritable);
}
}
}
), UIBundle.message("file.cache.conflict.action"), null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
}
if (isReloadable[0]) {
myMultiCaster.fileContentReloaded(file, document);
}
else {
unbindFileFromDocument(file, document);
myMultiCaster.fileWithNoDocumentChanged(file);
}
myUnsavedDocuments.remove(document);
}
private static boolean isReloadable(@NotNull VirtualFile file, @NotNull Document document, @Nullable Project project) {
PsiFile cachedPsiFile = project == null ? null : PsiDocumentManager.getInstance(project).getCachedPsiFile(document);
return !(FileUtilRt.isTooLarge(file.getLength()) && file.getFileType().isBinary()) &&
(cachedPsiFile == null || cachedPsiFile instanceof PsiFileImpl || isBinaryWithDecompiler(file));
}
@TestOnly
void setAskReloadFromDisk(@NotNull Disposable disposable, @NotNull MemoryDiskConflictResolver newProcessor) {
final MemoryDiskConflictResolver old = myConflictResolver;
myConflictResolver = newProcessor;
Disposer.register(disposable, () -> myConflictResolver = old);
}
@Override
public void fileDeleted(@NotNull VirtualFileEvent event) {
Document doc = getCachedDocument(event.getFile());
if (doc != null) {
myTrailingSpacesStripper.documentDeleted(doc);
}
}
@Override
public void beforeContentsChange(@NotNull VirtualFileEvent event) {
VirtualFile virtualFile = event.getFile();
// check file type in second order to avoid content detection running
if (virtualFile.getLength() == 0 && virtualFile.getFileType() == UnknownFileType.INSTANCE) {
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, Boolean.TRUE);
}
myConflictResolver.beforeContentChange(event);
}
public static boolean recomputeFileTypeIfNecessary(@NotNull VirtualFile virtualFile) {
if (virtualFile.getUserData(MUST_RECOMPUTE_FILE_TYPE) != null) {
virtualFile.getFileType();
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, null);
return true;
}
return false;
}
@Override
public boolean canClose(@NotNull Project project) {
if (!myUnsavedDocuments.isEmpty()) {
myOnClose = true;
try {
saveAllDocuments();
}
finally {
myOnClose = false;
}
}
return myUnsavedDocuments.isEmpty();
}
private void fireUnsavedDocumentsDropped() {
myMultiCaster.unsavedDocumentsDropped();
}
private boolean fireBeforeFileContentReload(final VirtualFile file, @NotNull Document document) {
for (FileDocumentSynchronizationVetoer vetoer : Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)) {
try {
if (!vetoer.mayReloadFileContent(file, document)) {
return false;
}
}
catch (Exception e) {
LOG.error(e);
}
}
myMultiCaster.beforeFileContentReload(file, document);
return true;
}
@NotNull
private static FileDocumentManagerListener[] getListeners() {
return FileDocumentManagerListener.EP_NAME.getExtensions();
}
private static int getPreviewCharCount(@NotNull VirtualFile file) {
Charset charset = EncodingManager.getInstance().getEncoding(file, false);
float bytesPerChar = charset == null ? 2 : charset.newEncoder().averageBytesPerChar();
return (int)(FileUtilRt.LARGE_FILE_PREVIEW_SIZE / bytesPerChar);
}
private void handleErrorsOnSave(@NotNull Map<Document, IOException> failures) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
IOException ioException = ContainerUtil.getFirstItem(failures.values());
if (ioException != null) {
throw new RuntimeException(ioException);
}
return;
}
for (IOException exception : failures.values()) {
LOG.warn(exception);
}
final String text = StringUtil.join(failures.values(), Throwable::getMessage, "\n");
final DialogWrapper dialog = new DialogWrapper(null) {
{
init();
setTitle(UIBundle.message("cannot.save.files.dialog.title"));
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myOKAction.putValue(Action.NAME, UIBundle
.message(myOnClose ? "cannot.save.files.dialog.ignore.changes" : "cannot.save.files.dialog.revert.changes"));
myOKAction.putValue(DEFAULT_ACTION, null);
if (!myOnClose) {
myCancelAction.putValue(Action.NAME, CommonBundle.getCloseButtonText());
}
}
@Override
protected JComponent createCenterPanel() {
final JPanel panel = new JPanel(new BorderLayout(0, 5));
panel.add(new JLabel(UIBundle.message("cannot.save.files.dialog.message")), BorderLayout.NORTH);
final JTextPane area = new JTextPane();
area.setText(text);
area.setEditable(false);
area.setMinimumSize(new Dimension(area.getMinimumSize().width, 50));
panel.add(new JBScrollPane(area, ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER),
BorderLayout.CENTER);
return panel;
}
};
if (dialog.showAndGet()) {
for (Document document : failures.keySet()) {
reloadFromDisk(document);
}
}
}
private final Map<VirtualFile, Document> myDocumentCache = ContainerUtil.createConcurrentWeakValueMap();
// used in Upsource
protected void cacheDocument(@NotNull VirtualFile file, @NotNull Document document) {
myDocumentCache.put(file, document);
}
// used in Upsource
protected void removeDocumentFromCache(@NotNull VirtualFile file) {
myDocumentCache.remove(file);
}
// used in Upsource
protected Document getDocumentFromCache(@NotNull VirtualFile file) {
return myDocumentCache.get(file);
}
}
| |
package org.jboss.resteasy.test;
import io.netty.channel.ChannelHandlerContext;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.util.Locale;
import static org.jboss.resteasy.test.TestPortProvider.generateURL;
import static org.jboss.resteasy.test.TestPortProvider.getHost;
import static org.jboss.resteasy.test.TestPortProvider.getPort;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class NettyTest
{
@Path("/")
public static class Resource
{
@GET
@Path("/test")
@Produces("text/plain")
public String hello()
{
return "hello world";
}
@GET
@Path("empty")
public void empty() {
}
@GET
@Path("query")
public String query(@QueryParam("param") String value) {
return value;
}
@GET
@Path("/exception")
@Produces("text/plain")
public String exception() {
throw new RuntimeException();
}
@GET
@Path("large")
@Produces("text/plain")
public String large() {
StringBuffer buf = new StringBuffer();
for (int i = 0; i < 1000; i++) {
buf.append(i);
}
return buf.toString();
}
@GET
@Path("/context")
@Produces("text/plain")
public String context(@Context ChannelHandlerContext context) {
return context.channel().toString();
}
@POST
@Path("/post")
@Produces("text/plain")
public String post(String postBody) {
return postBody;
}
@GET
@Path("/test/absolute")
@Produces("text/plain")
public String absolute(@Context UriInfo info)
{
return "uri: " + info.getRequestUri().toString();
}
}
static Client client;
@BeforeClass
public static void setup() throws Exception
{
NettyContainer.start().getRegistry().addPerRequestResource(Resource.class);
client = ClientBuilder.newClient();
}
@AfterClass
public static void end() throws Exception
{
try
{
client.close();
}
catch (Exception e)
{
}
NettyContainer.stop();
}
@Test
public void testBasic() throws Exception
{
WebTarget target = client.target(generateURL("/test"));
String val = target.request().get(String.class);
Assert.assertEquals("hello world", val);
}
@Test
public void testQuery() throws Exception
{
WebTarget target = client.target(generateURL("/query"));
String val = target.queryParam("param", "val").request().get(String.class);
Assert.assertEquals("val", val);
}
@Test
public void testEmpty() throws Exception
{
WebTarget target = client.target(generateURL("/empty"));
Response response = target.request().get();
try
{
Assert.assertEquals(204, response.getStatus());
}
finally
{
response.close();
}
}
@Test
public void testLarge() throws Exception
{
WebTarget target = client.target(generateURL("/large"));
Response response = target.request().get();
try
{
Assert.assertEquals(200, response.getStatus());
StringBuffer buf = new StringBuffer();
for (int i = 0; i < 1000; i++) {
buf.append(i);
}
String expected = buf.toString();
String have = response.readEntity(String.class);
Assert.assertEquals(expected, have);
}
finally
{
response.close();
}
}
@Test
public void testUnhandledException() throws Exception
{
WebTarget target = client.target(generateURL("/exception"));
Response resp = target.request().get();
try
{
Assert.assertEquals(500, resp.getStatus());
}
finally
{
resp.close();
}
}
@Test
public void testChannelContext() throws Exception {
WebTarget target = client.target(generateURL("/context"));
String val = target.request().get(String.class);
Assert.assertNotNull(val);
Assert.assertFalse(val.isEmpty());
}
@Test
public void testPost() {
WebTarget target = client.target(generateURL("/post"));
String postBody = "hello world";
String result = (String) target.request().post(Entity.text(postBody), String.class);
Assert.assertEquals(postBody, result);
}
/**
* Per the HTTP spec, we must allow requests like:
*
* <pre>
* GET http://www.example.com/content HTTP/1.1
* Host: www.example.com
* </pre>
*
* <blockquote>
* RFC 2616 5.1.12:
* To allow for transition to absoluteURIs in all requests in future
versions of HTTP, all HTTP/1.1 servers MUST accept the absoluteURI
form in requests, even though HTTP/1.1 clients will only generate
them in requests to proxies.
</blockquote>
* @throws Exception
*/
@Test
public void testAbsoluteURI() throws Exception {
String uri = generateURL("/test/absolute");
Socket client = new Socket(getHost(), getPort());
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()));
out.printf(Locale.US, "GET %s HTTP/1.1\nHost: %s:%d\n\n", uri, getHost(), getPort());
String statusLine = in.readLine();
String response = in.readLine();
while (!response.startsWith("uri"))
{
response = in.readLine();
}
client.close();
Assert.assertEquals("HTTP/1.1 200 OK", statusLine);
Assert.assertEquals(uri, response.subSequence(5, response.length()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.producer.internals;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.MetricNameTemplate;
import org.apache.kafka.common.metrics.Measurable;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
public class SenderMetricsRegistry {
final static String METRIC_GROUP_NAME = "producer-metrics";
final static String TOPIC_METRIC_GROUP_NAME = "producer-topic-metrics";
private final List<MetricNameTemplate> allTemplates;
public final MetricName batchSizeAvg;
public final MetricName batchSizeMax;
public final MetricName compressionRateAvg;
public final MetricName recordQueueTimeAvg;
public final MetricName recordQueueTimeMax;
public final MetricName requestLatencyAvg;
public final MetricName requestLatencyMax;
public final MetricName produceThrottleTimeAvg;
public final MetricName produceThrottleTimeMax;
public final MetricName recordSendRate;
public final MetricName recordSendTotal;
public final MetricName recordsPerRequestAvg;
public final MetricName recordRetryRate;
public final MetricName recordRetryTotal;
public final MetricName recordErrorRate;
public final MetricName recordErrorTotal;
public final MetricName recordSizeMax;
public final MetricName recordSizeAvg;
public final MetricName requestsInFlight;
public final MetricName metadataAge;
public final MetricName batchSplitRate;
public final MetricName batchSplitTotal;
private final MetricNameTemplate topicRecordSendRate;
private final MetricNameTemplate topicRecordSendTotal;
private final MetricNameTemplate topicByteRate;
private final MetricNameTemplate topicByteTotal;
private final MetricNameTemplate topicCompressionRate;
private final MetricNameTemplate topicRecordRetryRate;
private final MetricNameTemplate topicRecordRetryTotal;
private final MetricNameTemplate topicRecordErrorRate;
private final MetricNameTemplate topicRecordErrorTotal;
private final Metrics metrics;
private final Set<String> tags;
private final LinkedHashSet<String> topicTags;
public SenderMetricsRegistry(Metrics metrics) {
this.metrics = metrics;
this.tags = this.metrics.config().tags().keySet();
this.allTemplates = new ArrayList<>();
/***** Client level *****/
this.batchSizeAvg = createMetricName("batch-size-avg",
"The average number of bytes sent per partition per-request.");
this.batchSizeMax = createMetricName("batch-size-max",
"The max number of bytes sent per partition per-request.");
this.compressionRateAvg = createMetricName("compression-rate-avg",
"The average compression rate of record batches, defined as the average ratio of the " +
"compressed batch size over the uncompressed size.");
this.recordQueueTimeAvg = createMetricName("record-queue-time-avg",
"The average time in ms record batches spent in the send buffer.");
this.recordQueueTimeMax = createMetricName("record-queue-time-max",
"The maximum time in ms record batches spent in the send buffer.");
this.requestLatencyAvg = createMetricName("request-latency-avg",
"The average request latency in ms");
this.requestLatencyMax = createMetricName("request-latency-max",
"The maximum request latency in ms");
this.recordSendRate = createMetricName("record-send-rate",
"The average number of records sent per second.");
this.recordSendTotal = createMetricName("record-send-total",
"The total number of records sent.");
this.recordsPerRequestAvg = createMetricName("records-per-request-avg",
"The average number of records per request.");
this.recordRetryRate = createMetricName("record-retry-rate",
"The average per-second number of retried record sends");
this.recordRetryTotal = createMetricName("record-retry-total",
"The total number of retried record sends");
this.recordErrorRate = createMetricName("record-error-rate",
"The average per-second number of record sends that resulted in errors");
this.recordErrorTotal = createMetricName("record-error-total",
"The total number of record sends that resulted in errors");
this.recordSizeMax = createMetricName("record-size-max",
"The maximum record size");
this.recordSizeAvg = createMetricName("record-size-avg",
"The average record size");
this.requestsInFlight = createMetricName("requests-in-flight",
"The current number of in-flight requests awaiting a response.");
this.metadataAge = createMetricName("metadata-age",
"The age in seconds of the current producer metadata being used.");
this.batchSplitRate = createMetricName("batch-split-rate",
"The average number of batch splits per second");
this.batchSplitTotal = createMetricName("batch-split-total",
"The total number of batch splits");
this.produceThrottleTimeAvg = createMetricName("produce-throttle-time-avg",
"The average time in ms a request was throttled by a broker");
this.produceThrottleTimeMax = createMetricName("produce-throttle-time-max",
"The maximum time in ms a request was throttled by a broker");
/***** Topic level *****/
this.topicTags = new LinkedHashSet<>(tags);
this.topicTags.add("topic");
// We can't create the MetricName up front for these, because we don't know the topic name yet.
this.topicRecordSendRate = createTopicTemplate("record-send-rate",
"The average number of records sent per second for a topic.");
this.topicRecordSendTotal = createTopicTemplate("record-send-total",
"The total number of records sent for a topic.");
this.topicByteRate = createTopicTemplate("byte-rate",
"The average number of bytes sent per second for a topic.");
this.topicByteTotal = createTopicTemplate("byte-total",
"The total number of bytes sent for a topic.");
this.topicCompressionRate = createTopicTemplate("compression-rate",
"The average compression rate of record batches for a topic, defined as the average ratio " +
"of the compressed batch size over the uncompressed size.");
this.topicRecordRetryRate = createTopicTemplate("record-retry-rate",
"The average per-second number of retried record sends for a topic");
this.topicRecordRetryTotal = createTopicTemplate("record-retry-total",
"The total number of retried record sends for a topic");
this.topicRecordErrorRate = createTopicTemplate("record-error-rate",
"The average per-second number of record sends that resulted in errors for a topic");
this.topicRecordErrorTotal = createTopicTemplate("record-error-total",
"The total number of record sends that resulted in errors for a topic");
}
private MetricName createMetricName(String name, String description) {
return this.metrics.metricInstance(createTemplate(name, METRIC_GROUP_NAME, description, this.tags));
}
private MetricNameTemplate createTopicTemplate(String name, String description) {
return createTemplate(name, TOPIC_METRIC_GROUP_NAME, description, this.topicTags);
}
/** topic level metrics **/
public MetricName topicRecordSendRate(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicRecordSendRate, tags);
}
public MetricName topicRecordSendTotal(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicRecordSendTotal, tags);
}
public MetricName topicByteRate(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicByteRate, tags);
}
public MetricName topicByteTotal(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicByteTotal, tags);
}
public MetricName topicCompressionRate(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicCompressionRate, tags);
}
public MetricName topicRecordRetryRate(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicRecordRetryRate, tags);
}
public MetricName topicRecordRetryTotal(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicRecordRetryTotal, tags);
}
public MetricName topicRecordErrorRate(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicRecordErrorRate, tags);
}
public MetricName topicRecordErrorTotal(Map<String, String> tags) {
return this.metrics.metricInstance(this.topicRecordErrorTotal, tags);
}
public List<MetricNameTemplate> allTemplates() {
return allTemplates;
}
public Sensor sensor(String name) {
return this.metrics.sensor(name);
}
public void addMetric(MetricName m, Measurable measurable) {
this.metrics.addMetric(m, measurable);
}
public Sensor getSensor(String name) {
return this.metrics.getSensor(name);
}
private MetricNameTemplate createTemplate(String name, String group, String description, Set<String> tags) {
MetricNameTemplate template = new MetricNameTemplate(name, group, description, tags);
this.allTemplates.add(template);
return template;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.cassandra;
import com.datastax.driver.core.Host;
import com.facebook.presto.cassandra.util.HostAddressFactory;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableLayoutHandle;
import com.facebook.presto.spi.FixedSplitSource;
import com.facebook.presto.spi.HostAddress;
import com.facebook.presto.spi.connector.ConnectorSplitManager;
import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import javax.inject.Inject;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.facebook.presto.cassandra.util.Types.checkType;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class CassandraSplitManager
implements ConnectorSplitManager
{
private final String connectorId;
private final CassandraSession cassandraSession;
private final CachingCassandraSchemaProvider schemaProvider;
private final int partitionSizeForBatchSelect;
private final CassandraTokenSplitManager tokenSplitMgr;
@Inject
public CassandraSplitManager(CassandraConnectorId connectorId,
CassandraClientConfig cassandraClientConfig,
CassandraSession cassandraSession,
CachingCassandraSchemaProvider schemaProvider,
CassandraTokenSplitManager tokenSplitMgr)
{
this.connectorId = requireNonNull(connectorId, "connectorId is null").toString();
this.schemaProvider = requireNonNull(schemaProvider, "schemaProvider is null");
this.cassandraSession = requireNonNull(cassandraSession, "cassandraSession is null");
this.partitionSizeForBatchSelect = cassandraClientConfig.getPartitionSizeForBatchSelect();
this.tokenSplitMgr = tokenSplitMgr;
}
@Override
public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableLayoutHandle layout)
{
CassandraTableLayoutHandle layoutHandle = checkType(layout, CassandraTableLayoutHandle.class, "layout");
CassandraTableHandle cassandraTableHandle = layoutHandle.getTable();
List<CassandraPartition> partitions = layoutHandle.getPartitions().get();
requireNonNull(partitions, "partitions is null");
if (partitions.isEmpty()) {
return new FixedSplitSource(connectorId, ImmutableList.<ConnectorSplit>of());
}
// if this is an unpartitioned table, split into equal ranges
if (partitions.size() == 1) {
CassandraPartition cassandraPartition = partitions.get(0);
if (cassandraPartition.isUnpartitioned() || cassandraPartition.isIndexedColumnPredicatePushdown()) {
CassandraTable table = schemaProvider.getTable(cassandraTableHandle);
List<ConnectorSplit> splits = getSplitsByTokenRange(table, cassandraPartition.getPartitionId());
return new FixedSplitSource(connectorId, splits);
}
}
return new FixedSplitSource(connectorId, getSplitsForPartitions(cassandraTableHandle, partitions));
}
private List<ConnectorSplit> getSplitsByTokenRange(CassandraTable table, String partitionId)
{
String schema = table.getTableHandle().getSchemaName();
String tableName = table.getTableHandle().getTableName();
String tokenExpression = table.getTokenExpression();
ImmutableList.Builder<ConnectorSplit> builder = ImmutableList.builder();
List<CassandraTokenSplitManager.TokenSplit> tokenSplits;
try {
tokenSplits = tokenSplitMgr.getSplits(schema, tableName);
}
catch (IOException e) {
throw new RuntimeException(e);
}
for (CassandraTokenSplitManager.TokenSplit tokenSplit : tokenSplits) {
String condition = buildTokenCondition(tokenExpression, tokenSplit.getStartToken(), tokenSplit.getEndToken());
List<HostAddress> addresses = new HostAddressFactory().AddressNamesToHostAddressList(tokenSplit.getHosts());
CassandraSplit split = new CassandraSplit(connectorId, schema, tableName, partitionId, condition, addresses);
builder.add(split);
}
return builder.build();
}
private static String buildTokenCondition(String tokenExpression, String startToken, String endToken)
{
return tokenExpression + " > " + startToken + " AND " + tokenExpression + " <= " + endToken;
}
private List<ConnectorSplit> getSplitsForPartitions(CassandraTableHandle cassTableHandle, List<CassandraPartition> partitions)
{
String schema = cassTableHandle.getSchemaName();
String table = cassTableHandle.getTableName();
HostAddressFactory hostAddressFactory = new HostAddressFactory();
ImmutableList.Builder<ConnectorSplit> builder = ImmutableList.builder();
// For single partition key column table, we can merge multiple partitions into a single split
// by using IN CLAUSE in a single select query if the partitions have the same host list.
// For multiple partition key columns table, we can't merge them into a single select query, so
// keep them in a separate split.
boolean singlePartitionKeyColumn = true;
String partitionKeyColumnName = null;
if (!partitions.isEmpty()) {
singlePartitionKeyColumn = partitions.get(0).getTupleDomain().getDomains().get().size() == 1;
if (singlePartitionKeyColumn) {
String partitionId = partitions.get(0).getPartitionId();
partitionKeyColumnName = partitionId.substring(0, partitionId.lastIndexOf('=') - 1);
}
}
Map<Set<String>, Set<String>> hostsToPartitionKeys = new HashMap<>();
Map<Set<String>, List<HostAddress>> hostMap = new HashMap<>();
for (CassandraPartition cassandraPartition : partitions) {
Set<Host> hosts = cassandraSession.getReplicas(schema, cassandraPartition.getKeyAsByteBuffer());
List<HostAddress> addresses = hostAddressFactory.toHostAddressList(hosts);
if (singlePartitionKeyColumn) {
// host ip addresses
ImmutableSet.Builder<String> sb = ImmutableSet.builder();
for (HostAddress address : addresses) {
sb.add(address.getHostText());
}
Set<String> hostAddresses = sb.build();
// partition key values
Set<String> values = hostsToPartitionKeys.get(hostAddresses);
if (values == null) {
values = new HashSet<>();
}
String partitionId = cassandraPartition.getPartitionId();
values.add(partitionId.substring(partitionId.lastIndexOf('=') + 2));
hostsToPartitionKeys.put(hostAddresses, values);
hostMap.put(hostAddresses, addresses);
}
else {
CassandraSplit split = new CassandraSplit(connectorId, schema, table, cassandraPartition.getPartitionId(), null, addresses);
builder.add(split);
}
}
if (singlePartitionKeyColumn) {
for (Map.Entry<Set<String>, Set<String>> entry : hostsToPartitionKeys.entrySet()) {
StringBuilder sb = new StringBuilder(partitionSizeForBatchSelect);
int size = 0;
for (String value : entry.getValue()) {
if (size > 0) {
sb.append(",");
}
sb.append(value);
size++;
if (size > partitionSizeForBatchSelect) {
String partitionId = String.format("%s in (%s)", partitionKeyColumnName, sb.toString());
CassandraSplit split = new CassandraSplit(connectorId, schema, table, partitionId, null, hostMap.get(entry.getKey()));
builder.add(split);
size = 0;
sb.setLength(0);
sb.trimToSize();
}
}
if (size > 0) {
String partitionId = String.format("%s in (%s)", partitionKeyColumnName, sb.toString());
CassandraSplit split = new CassandraSplit(connectorId, schema, table, partitionId, null, hostMap.get(entry.getKey()));
builder.add(split);
}
}
}
return builder.build();
}
@Override
public String toString()
{
return toStringHelper(this)
.add("clientId", connectorId)
.toString();
}
}
| |
/*
* NOTE: This copyright does *not* cover user programs that use HQ
* program services by normal system calls through the application
* program interfaces provided as part of the Hyperic Plug-in Development
* Kit or the Hyperic Client Development Kit - this is merely considered
* normal use of the program, and does *not* fall under the heading of
* "derived work".
*
* Copyright (C) [2004, 2005, 2006], Hyperic, Inc.
* This file is part of HQ.
*
* HQ is free software; you can redistribute it and/or modify
* it under the terms version 2 of the GNU General Public License as
* published by the Free Software Foundation. This program is distributed
* in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*/
package org.hyperic.hq.ui.action.portlet.addresource;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.util.LabelValueBean;
import org.hyperic.hq.ui.action.resource.ResourceForm;
/**
* A subclass of <code>ResourceForm</code> representing the
* <em>AddGroupResources</em> form. The purpose of this form is to add
* AppdefResourceValues to a AppdefGroupValue
*/
public class AddResourcesForm
extends ResourceForm {
// -------------------------------------instance variables
private String[] availableResources;
private String[] pendingResources;
private Integer psa;
private Integer psp;
private List availResourceTypes;
private String key;
private String ft;
private Integer ff;
private List functions;
private List types;
private String nameFilter;
private String token;
// -------------------------------------constructors
public AddResourcesForm() {
super();
setDefaults();
}
// -------------------------------------public methods
public String[] getAvailableResource() {
return this.availableResources;
}
public String[] getAvailableResources() {
return getAvailableResource();
}
public void setAvailableResource(String[] availableResource) {
this.availableResources = availableResource;
}
public void setAvailableResources(String[] availableResources) {
setAvailableResource(availableResources);
}
public String[] getPendingResource() {
return this.pendingResources;
}
public String[] getPendingResources() {
return getPendingResource();
}
public void setPendingResource(String[] pendingResource) {
this.pendingResources = pendingResource;
}
public void setPendingResources(String[] pendingResources) {
setPendingResource(pendingResources);
}
public Integer getPsa() {
return this.psa;
}
public void setPsa(Integer ps) {
this.psa = ps;
}
public Integer getPsp() {
return this.psp;
}
public void setPsp(Integer ps) {
this.psp = ps;
}
public void reset(ActionMapping mapping, HttpServletRequest request) {
this.availableResources = new String[0];
this.pendingResources = new String[0];
this.psa = null;
this.psp = null;
super.reset(mapping, request);
}
public String toString() {
StringBuffer s = new StringBuffer(super.toString());
s.append("psa=" + psa + " ");
s.append("psp=" + psp + " ");
s.append("availableResources={");
listToString(s, availableResources);
s.append("} ");
s.append("pendingResources={");
listToString(s, pendingResources);
s.append("}");
return s.toString();
}
private void listToString(StringBuffer s, String[] l) {
if (l != null) {
for (int i = 0; i < l.length; i++) {
s.append(l[i]);
if (i < l.length - 1) {
s.append(", ");
}
}
}
}
/**
* @return List
*/
public List getAvailResourceTypes() {
return availResourceTypes;
}
/**
* Sets the availResourceTypes.
* @param availResourceTypes The availResourceTypes to set
*/
public void setAvailResourceTypes(List availResourceTypes) {
this.availResourceTypes = availResourceTypes;
}
/**
* Getter for property key.
* @return Value of property key.
*
*/
public String getKey() {
return this.key;
}
/**
* Setter for property key.
* @param key New value of property key.
*
*/
public void setKey(String key) {
this.key = key;
}
/**
* Getter for property ft.
* @return Value of property ft.
*
*/
public String getFt() {
return this.ft;
}
/**
* Setter for property ft.
* @param ft New value of property ft.
*
*/
public void setFt(String ft) {
this.ft = ft;
}
/**
* Getter for property ff.
* @return Value of property ff.
*
*/
public Integer getFf() {
return this.ff;
}
/**
* Setter for property ff.
* @param ff New value of property ff.
*
*/
public void setFf(Integer ff) {
this.ff = ff;
}
/**
* Getter for property functions.
* @return Value of property functions.
*
*/
public List getFunctions() {
return this.functions;
}
/**
* Setter for property functions.
* @param functions New value of property functions.
*
*/
public void setFunctions(List functions) {
this.functions = functions;
}
public void addFunction(LabelValueBean b) {
if (this.functions != null) {
this.functions.add(b);
}
}
/**
* Getter for property types.
* @return Value of property types.
*
*/
public List getTypes() {
return this.types;
}
/**
* Setter for property types.
* @param types New value of property types.
*
*/
public void setTypes(List types) {
this.types = types;
}
public void addType(LabelValueBean b) {
if (this.types != null) {
this.types.add(b);
}
}
public String getNameFilter() {
return nameFilter;
}
public void setNameFilter(String nameFilter) {
this.nameFilter = nameFilter;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
// ******************** support methods ***********************************
private void setDefaults() {
ff = null;
ft = null;
functions = new ArrayList();
types = new ArrayList();
nameFilter = null;
token = null;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.block.BlockEncodingManager;
import com.facebook.presto.metadata.BoundVariables;
import com.facebook.presto.metadata.FunctionKind;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.metadata.LongVariableConstraint;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.operator.aggregation.AggregationImplementation;
import com.facebook.presto.operator.aggregation.AggregationMetadata;
import com.facebook.presto.operator.aggregation.InternalAggregationFunction;
import com.facebook.presto.operator.aggregation.LazyAccumulatorFactoryBinder;
import com.facebook.presto.operator.aggregation.ParametricAggregation;
import com.facebook.presto.operator.aggregation.state.NullableDoubleState;
import com.facebook.presto.operator.aggregation.state.NullableDoubleStateSerializer;
import com.facebook.presto.operator.aggregation.state.NullableLongState;
import com.facebook.presto.operator.aggregation.state.SliceState;
import com.facebook.presto.operator.annotations.LiteralImplementationDependency;
import com.facebook.presto.operator.annotations.OperatorImplementationDependency;
import com.facebook.presto.operator.annotations.TypeImplementationDependency;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.function.AccumulatorStateSerializer;
import com.facebook.presto.spi.function.AggregationFunction;
import com.facebook.presto.spi.function.AggregationState;
import com.facebook.presto.spi.function.AggregationStateSerializerFactory;
import com.facebook.presto.spi.function.BlockIndex;
import com.facebook.presto.spi.function.BlockPosition;
import com.facebook.presto.spi.function.CombineFunction;
import com.facebook.presto.spi.function.Description;
import com.facebook.presto.spi.function.InputFunction;
import com.facebook.presto.spi.function.LiteralParameters;
import com.facebook.presto.spi.function.OperatorDependency;
import com.facebook.presto.spi.function.OutputFunction;
import com.facebook.presto.spi.function.SqlType;
import com.facebook.presto.spi.function.TypeParameter;
import com.facebook.presto.spi.function.TypeParameterSpecialization;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.spi.type.TypeSignature;
import com.facebook.presto.spi.type.TypeSignatureParameter;
import com.facebook.presto.spi.type.VarcharType;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.type.Constraint;
import com.facebook.presto.type.LiteralParameter;
import com.facebook.presto.type.TypeRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.slice.Slice;
import org.testng.annotations.Test;
import java.lang.invoke.MethodHandle;
import java.util.List;
import static com.facebook.presto.metadata.Signature.typeVariable;
import static com.facebook.presto.operator.aggregation.AggregationFromAnnotationsParser.parseFunctionDefinition;
import static com.facebook.presto.operator.aggregation.AggregationFromAnnotationsParser.parseFunctionDefinitions;
import static com.facebook.presto.spi.function.OperatorType.LESS_THAN;
import static com.facebook.presto.spi.type.StandardTypes.ARRAY;
import static com.facebook.presto.spi.type.StandardTypes.DOUBLE;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestAnnotationEngineForAggregates
extends TestAnnotationEngine
{
@AggregationFunction("simple_exact_aggregate")
@Description("Simple exact aggregate description")
public static class ExactAggregationFunction
{
@InputFunction
public static void input(@AggregationState NullableDoubleState state, @SqlType(DOUBLE) double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(@AggregationState NullableDoubleState combine1, @AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(@AggregationState NullableDoubleState state, BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testSimpleExactAggregationParse()
{
Signature expectedSignature = new Signature(
"simple_exact_aggregate",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
ParametricAggregation aggregation = parseFunctionDefinition(ExactAggregationFunction.class);
assertEquals(aggregation.getDescription(), "Simple exact aggregate description");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 1, 0, 0);
AggregationImplementation implementation = getOnlyElement(implementations.getExactImplementations().values());
assertFalse(implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), ExactAggregationFunction.class);
assertDependencyCount(implementation, 0, 0, 0);
assertFalse(implementation.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "simple_exact_aggregate");
}
@AggregationFunction("simple_exact_aggregate_aggregation_state_moved")
@Description("Simple exact function which has @AggregationState on different than first positions")
public static class StateOnDifferentThanFirstPositionAggregationFunction
{
@InputFunction
public static void input(@SqlType(DOUBLE) double value, @AggregationState NullableDoubleState state)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(@AggregationState NullableDoubleState combine1, @AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(BlockBuilder out, @AggregationState NullableDoubleState state)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testStateOnDifferentThanFirstPositionAggregationParse()
{
Signature expectedSignature = new Signature(
"simple_exact_aggregate_aggregation_state_moved",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
ParametricAggregation aggregation = parseFunctionDefinition(StateOnDifferentThanFirstPositionAggregationFunction.class);
assertEquals(aggregation.getSignature(), expectedSignature);
AggregationImplementation implementation = getOnlyElement(aggregation.getImplementations().getExactImplementations().values());
assertEquals(implementation.getDefinitionClass(), StateOnDifferentThanFirstPositionAggregationFunction.class);
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL, AggregationMetadata.ParameterMetadata.ParameterType.STATE);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
}
@AggregationFunction("no_aggregation_state_aggregate")
@Description("Aggregate with no @AggregationState annotations")
public static class NotAnnotatedAggregateStateAggregationFunction
{
@InputFunction
public static void input(NullableDoubleState state, @SqlType(DOUBLE) double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(NullableDoubleState combine1, NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(NullableDoubleState state, BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testNotAnnotatedAggregateStateAggregationParse()
{
ParametricAggregation aggregation = parseFunctionDefinition(NotAnnotatedAggregateStateAggregationFunction.class);
AggregationImplementation implementation = getOnlyElement(aggregation.getImplementations().getExactImplementations().values());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "no_aggregation_state_aggregate");
}
@AggregationFunction("custom_serializer_aggregate")
@Description("Aggregate with no @AggregationState annotations")
public static class CustomStateSerializerAggregationFunction
{
public static class CustomSerializer
extends NullableDoubleStateSerializer
{
}
@InputFunction
public static void input(
@AggregationState NullableDoubleState state,
@SqlType(DOUBLE) double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState combine1,
@AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(@AggregationState NullableDoubleState state, BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@AggregationStateSerializerFactory(NullableDoubleState.class)
public static CustomSerializer createSerializer()
{
return new CustomSerializer();
}
}
@Test
public void testCustomStateSerializerAggregationParse()
{
ParametricAggregation aggregation = parseFunctionDefinition(CustomStateSerializerAggregationFunction.class);
AggregationImplementation implementation = getOnlyElement(aggregation.getImplementations().getExactImplementations().values());
assertTrue(implementation.getStateSerializerFactory().isPresent());
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null);
AccumulatorStateSerializer<?> createdSerializer = getOnlyElement(((LazyAccumulatorFactoryBinder) specialized.getAccumulatorFactoryBinder())
.getGenericAccumulatorFactoryBinder().getStateDescriptors()).getSerializer();
Class<?> serializerFactory = implementation.getStateSerializerFactory().get().type().returnType();
assertTrue(serializerFactory.isInstance(createdSerializer));
}
@AggregationFunction(value = "custom_decomposable_aggregate", decomposable = false)
@Description("Aggregate with Decomposable=false")
public static class NotDecomposableAggregationFunction
{
@InputFunction
public static void input(
@AggregationState NullableDoubleState state,
@SqlType(DOUBLE) double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState combine1,
@AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@AggregationStateSerializerFactory(NullableDoubleState.class)
public static AccumulatorStateSerializer<?> createSerializer()
{
return new CustomStateSerializerAggregationFunction.CustomSerializer();
}
}
@Test
public void testNotDecomposableAggregationParse()
{
Signature expectedSignature = new Signature(
"custom_decomposable_aggregate",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
ParametricAggregation aggregation = parseFunctionDefinition(NotDecomposableAggregationFunction.class);
assertEquals(aggregation.getDescription(), "Aggregate with Decomposable=false");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertFalse(specialized.isDecomposable());
assertEquals(specialized.name(), "custom_decomposable_aggregate");
}
@AggregationFunction("simple_generic_implementations")
@Description("Simple aggregate with two generic implementations")
public static class GenericAggregationFunction
{
@InputFunction
@TypeParameter("T")
public static void input(
@AggregationState NullableDoubleState state,
@SqlType("T") double value)
{
// noop this is only for annotation testing puproses
}
@InputFunction
@TypeParameter("T")
public static void input(
@AggregationState NullableLongState state,
@SqlType("T") long value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableLongState state,
@AggregationState NullableLongState otherState)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState state,
@AggregationState NullableDoubleState otherState)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@AggregationState NullableLongState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testSimpleGenericAggregationFunctionParse()
{
Signature expectedSignature = new Signature(
"simple_generic_implementations",
FunctionKind.AGGREGATE,
ImmutableList.of(typeVariable("T")),
ImmutableList.of(),
parseTypeSignature("T"),
ImmutableList.of(parseTypeSignature("T")),
false);
ParametricAggregation aggregation = parseFunctionDefinition(GenericAggregationFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with two generic implementations");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 0, 0, 2);
AggregationImplementation implementationDouble = implementations.getGenericImplementations().stream().filter(impl -> impl.getStateClass() == NullableDoubleState.class).collect(toImmutableList()).get(0);
assertFalse(implementationDouble.getStateSerializerFactory().isPresent());
assertEquals(implementationDouble.getDefinitionClass(), GenericAggregationFunction.class);
assertDependencyCount(implementationDouble, 0, 0, 0);
assertFalse(implementationDouble.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementationDouble.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
assertEquals(implementationDouble.getStateClass(), NullableDoubleState.class);
AggregationImplementation implementationLong = implementations.getGenericImplementations().stream().filter(impl -> impl.getStateClass() == NullableLongState.class).collect(toImmutableList()).get(0);
assertFalse(implementationLong.getStateSerializerFactory().isPresent());
assertEquals(implementationLong.getDefinitionClass(), GenericAggregationFunction.class);
assertDependencyCount(implementationLong, 0, 0, 0);
assertFalse(implementationLong.hasSpecializedTypeParameters());
assertTrue(implementationLong.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
assertEquals(implementationLong.getStateClass(), NullableLongState.class);
InternalAggregationFunction specialized = aggregation.specialize(
BoundVariables.builder().setTypeVariable("T", DoubleType.DOUBLE).build(),
1,
new TypeRegistry(),
null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.getParameterTypes().equals(ImmutableList.of(DoubleType.DOUBLE)));
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "simple_generic_implementations");
}
@AggregationFunction("block_input_aggregate")
@Description("Simple aggregate with @BlockPosition usage")
public static class BlockInputAggregationFunction
{
@InputFunction
public static void input(
@AggregationState NullableDoubleState state,
@BlockPosition @SqlType(DOUBLE) Block value,
@BlockIndex int id)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState combine1,
@AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testSimpleBlockInputAggregationParse()
{
Signature expectedSignature = new Signature(
"block_input_aggregate",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
ParametricAggregation aggregation = parseFunctionDefinition(BlockInputAggregationFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with @BlockPosition usage");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 1, 0, 0);
AggregationImplementation implementation = getOnlyElement(implementations.getExactImplementations().values());
assertFalse(implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), BlockInputAggregationFunction.class);
assertDependencyCount(implementation, 0, 0, 0);
assertFalse(implementation.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.BLOCK_INPUT_CHANNEL, AggregationMetadata.ParameterMetadata.ParameterType.BLOCK_INDEX);
assertEquals(implementation.getInputParameterMetadataTypes(), expectedMetadataTypes);
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "block_input_aggregate");
}
@AggregationFunction("implicit_specialized_aggregate")
@Description("Simple implicit specialized aggregate")
public static class ImplicitSpecializedAggregationFunction
{
@InputFunction
@TypeParameter("T")
public static void input(
@AggregationState NullableDoubleState state,
@SqlType("array(T)") Block arrayBlock, @SqlType("T") double additionalValue)
{
// noop this is only for annotation testing puproses
}
@InputFunction
@TypeParameter("T")
public static void input(
@AggregationState NullableLongState state,
@SqlType("array(T)") Block arrayBlock, @SqlType("T") long additionalValue)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableLongState state,
@AggregationState NullableLongState otherState)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState state,
@AggregationState NullableDoubleState otherState)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@AggregationState NullableLongState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
// @Test - this is not yet supported
public void testSimpleImplicitSpecializedAggregationParse()
{
Signature expectedSignature = new Signature(
"implicit_specialized_aggregate",
FunctionKind.AGGREGATE,
ImmutableList.of(typeVariable("T")),
ImmutableList.of(),
parseTypeSignature("T"),
ImmutableList.of(new TypeSignature(ARRAY, TypeSignatureParameter.of(parseTypeSignature("T"))), parseTypeSignature("T")),
false);
ParametricAggregation aggregation = parseFunctionDefinition(ImplicitSpecializedAggregationFunction.class);
assertEquals(aggregation.getDescription(), "Simple implicit specialized aggregate");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 0, 0, 2);
AggregationImplementation implementation1 = implementations.getSpecializedImplementations().get(0);
assertTrue(implementation1.hasSpecializedTypeParameters());
assertFalse(implementation1.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation1.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
AggregationImplementation implementation2 = implementations.getSpecializedImplementations().get(1);
assertTrue(implementation2.hasSpecializedTypeParameters());
assertFalse(implementation2.hasSpecializedTypeParameters());
assertTrue(implementation2.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().setTypeVariable("T", DoubleType.DOUBLE).build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "implicit_specialized_aggregate");
}
@AggregationFunction("explicit_specialized_aggregate")
@Description("Simple explicit specialized aggregate")
public static class ExplicitSpecializedAggregationFunction
{
@InputFunction
@TypeParameterSpecialization(name = "T", nativeContainerType = double.class)
@TypeParameter("T")
public static void input(
@AggregationState NullableDoubleState state,
@SqlType("array(T)") Block arrayBlock)
{
// noop this is only for annotation testing puproses
}
@InputFunction
@TypeParameter("T")
public static void input(
@AggregationState NullableLongState state,
@SqlType("array(T)") Block arrayBlock)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableLongState state,
@AggregationState NullableLongState otherState)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState state,
@AggregationState NullableDoubleState otherState)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@AggregationState NullableLongState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
// @Test - this is not yet supported
public void testSimpleExplicitSpecializedAggregationParse()
{
Signature expectedSignature = new Signature(
"explicit_specialized_aggregate",
FunctionKind.AGGREGATE,
ImmutableList.of(typeVariable("T")),
ImmutableList.of(),
parseTypeSignature("T"),
ImmutableList.of(new TypeSignature(ARRAY, TypeSignatureParameter.of(parseTypeSignature("T")))),
false);
ParametricAggregation aggregation = parseFunctionDefinition(ExplicitSpecializedAggregationFunction.class);
assertEquals(aggregation.getDescription(), "Simple explicit specialized aggregate");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 0, 1, 1);
AggregationImplementation implementation1 = implementations.getSpecializedImplementations().get(0);
assertTrue(implementation1.hasSpecializedTypeParameters());
assertFalse(implementation1.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation1.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
AggregationImplementation implementation2 = implementations.getSpecializedImplementations().get(1);
assertTrue(implementation2.hasSpecializedTypeParameters());
assertFalse(implementation2.hasSpecializedTypeParameters());
assertTrue(implementation2.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().setTypeVariable("T", DoubleType.DOUBLE).build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "implicit_specialized_aggregate");
}
@AggregationFunction("multi_output_aggregate")
@Description("Simple multi output function aggregate generic description")
public static class MultiOutputAggregationFunction
{
@InputFunction
public static void input(
@AggregationState NullableDoubleState state,
@SqlType(DOUBLE) double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState NullableDoubleState combine1,
@AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@AggregationFunction("multi_output_aggregate_1")
@Description("Simple multi output function aggregate specialized description")
@OutputFunction(DOUBLE)
public static void output1(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@AggregationFunction("multi_output_aggregate_2")
@OutputFunction(DOUBLE)
public static void output2(
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testMultiOutputAggregationParse()
{
Signature expectedSignature1 = new Signature(
"multi_output_aggregate_1",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
Signature expectedSignature2 = new Signature(
"multi_output_aggregate_2",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
List<ParametricAggregation> aggregations = parseFunctionDefinitions(MultiOutputAggregationFunction.class);
assertEquals(aggregations.size(), 2);
ParametricAggregation aggregation1 = aggregations.stream().filter(aggregate -> aggregate.getSignature().getName().equals("multi_output_aggregate_1")).collect(toImmutableList()).get(0);
assertEquals(aggregation1.getSignature(), expectedSignature1);
assertEquals(aggregation1.getDescription(), "Simple multi output function aggregate specialized description");
ParametricAggregation aggregation2 = aggregations.stream().filter(aggregate -> aggregate.getSignature().getName().equals("multi_output_aggregate_2")).collect(toImmutableList()).get(0);
assertEquals(aggregation2.getSignature(), expectedSignature2);
assertEquals(aggregation2.getDescription(), "Simple multi output function aggregate generic description");
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
ParametricImplementationsGroup<AggregationImplementation> implementations1 = aggregation1.getImplementations();
assertImplementationCount(implementations1, 1, 0, 0);
ParametricImplementationsGroup<AggregationImplementation> implementations2 = aggregation2.getImplementations();
assertImplementationCount(implementations2, 1, 0, 0);
AggregationImplementation implementation = getOnlyElement(implementations1.getExactImplementations().values());
assertFalse(implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), MultiOutputAggregationFunction.class);
assertDependencyCount(implementation, 0, 0, 0);
assertFalse(implementation.hasSpecializedTypeParameters());
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
InternalAggregationFunction specialized = aggregation1.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "multi_output_aggregate_1");
}
@AggregationFunction("inject_operator_aggregate")
@Description("Simple aggregate with operator injected")
public static class InjectOperatorAggregateFunction
{
@InputFunction
public static void input(
@OperatorDependency(operator = LESS_THAN, returnType = StandardTypes.BOOLEAN, argumentTypes = {DOUBLE, DOUBLE}) MethodHandle methodHandle,
@AggregationState NullableDoubleState state,
@SqlType(DOUBLE) double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@OperatorDependency(operator = LESS_THAN, returnType = StandardTypes.BOOLEAN, argumentTypes = {DOUBLE, DOUBLE}) MethodHandle methodHandle,
@AggregationState NullableDoubleState combine1,
@AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction(DOUBLE)
public static void output(
@OperatorDependency(operator = LESS_THAN, returnType = StandardTypes.BOOLEAN, argumentTypes = {DOUBLE, DOUBLE}) MethodHandle methodHandle,
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@AggregationStateSerializerFactory(NullableDoubleState.class)
public static CustomStateSerializerAggregationFunction.CustomSerializer createSerializer(
@OperatorDependency(operator = LESS_THAN, returnType = StandardTypes.BOOLEAN, argumentTypes = {DOUBLE, DOUBLE}) MethodHandle methodHandle)
{
return new CustomStateSerializerAggregationFunction.CustomSerializer();
}
}
@Test
public void testInjectOperatorAggregateParse()
{
Signature expectedSignature = new Signature(
"inject_operator_aggregate",
FunctionKind.AGGREGATE,
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()));
ParametricAggregation aggregation = parseFunctionDefinition(InjectOperatorAggregateFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with operator injected");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
AggregationImplementation implementation = getOnlyElement(implementations.getExactImplementations().values());
assertTrue(implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), InjectOperatorAggregateFunction.class);
assertDependencyCount(implementation, 1, 1, 1);
assertEquals(implementation.getStateSerializerFactoryDependencies().size(), 1);
assertTrue(implementation.getInputDependencies().get(0) instanceof OperatorImplementationDependency);
assertTrue(implementation.getCombineDependencies().get(0) instanceof OperatorImplementationDependency);
assertTrue(implementation.getOutputDependencies().get(0) instanceof OperatorImplementationDependency);
assertTrue(implementation.getStateSerializerFactoryDependencies().get(0) instanceof OperatorImplementationDependency);
assertFalse(implementation.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
TypeManager typeRegistry = new TypeRegistry();
FunctionRegistry functionRegistry = new FunctionRegistry(typeRegistry, new BlockEncodingManager(typeRegistry), new FeaturesConfig());
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, typeRegistry, functionRegistry);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "inject_operator_aggregate");
}
@AggregationFunction("inject_type_aggregate")
@Description("Simple aggregate with type injected")
public static class InjectTypeAggregateFunction
{
@InputFunction
@TypeParameter("T")
public static void input(
@TypeParameter("T") Type type,
@AggregationState NullableDoubleState state,
@SqlType("T") double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@TypeParameter("T") Type type,
@AggregationState NullableDoubleState combine1,
@AggregationState NullableDoubleState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("T")
public static void output(
@TypeParameter("T") Type type,
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@AggregationStateSerializerFactory(NullableDoubleState.class)
public static CustomStateSerializerAggregationFunction.CustomSerializer createSerializer(
@TypeParameter("T") Type type)
{
return new CustomStateSerializerAggregationFunction.CustomSerializer();
}
}
@Test
public void testInjectTypeAggregateParse()
{
Signature expectedSignature = new Signature(
"inject_type_aggregate",
FunctionKind.AGGREGATE,
ImmutableList.of(typeVariable("T")),
ImmutableList.of(),
parseTypeSignature("T"),
ImmutableList.of(parseTypeSignature("T")),
false);
ParametricAggregation aggregation = parseFunctionDefinition(InjectTypeAggregateFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with type injected");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertEquals(implementations.getGenericImplementations().size(), 1);
AggregationImplementation implementation = implementations.getGenericImplementations().get(0);
assertTrue(implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), InjectTypeAggregateFunction.class);
assertDependencyCount(implementation, 1, 1, 1);
assertEquals(implementation.getStateSerializerFactoryDependencies().size(), 1);
assertTrue(implementation.getInputDependencies().get(0) instanceof TypeImplementationDependency);
assertTrue(implementation.getCombineDependencies().get(0) instanceof TypeImplementationDependency);
assertTrue(implementation.getOutputDependencies().get(0) instanceof TypeImplementationDependency);
assertTrue(implementation.getStateSerializerFactoryDependencies().get(0) instanceof TypeImplementationDependency);
assertFalse(implementation.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
TypeManager typeRegistry = new TypeRegistry();
FunctionRegistry functionRegistry = new FunctionRegistry(typeRegistry, new BlockEncodingManager(typeRegistry), new FeaturesConfig());
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().setTypeVariable("T", DoubleType.DOUBLE).build(), 1, typeRegistry, functionRegistry);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "inject_type_aggregate");
}
@AggregationFunction("inject_literal_aggregate")
@Description("Simple aggregate with type literal")
public static class InjectLiteralAggregateFunction
{
@InputFunction
@LiteralParameters("x")
public static void input(
@LiteralParameter("x") Long varcharSize,
@AggregationState SliceState state,
@SqlType("varchar(x)") Slice slice)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@LiteralParameter("x") Long varcharSize,
@AggregationState SliceState combine1,
@AggregationState SliceState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("varchar(x)")
public static void output(
@LiteralParameter("x") Long varcharSize,
@AggregationState SliceState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
@AggregationStateSerializerFactory(SliceState.class)
public static CustomStateSerializerAggregationFunction.CustomSerializer createSerializer(
@LiteralParameter("x") Long varcharSize)
{
return new CustomStateSerializerAggregationFunction.CustomSerializer();
}
}
@Test
public void testInjectLiteralAggregateParse()
{
Signature expectedSignature = new Signature(
"inject_literal_aggregate",
FunctionKind.AGGREGATE,
parseTypeSignature("varchar(x)", ImmutableSet.of("x")),
ImmutableList.of(parseTypeSignature("varchar(x)", ImmutableSet.of("x"))));
ParametricAggregation aggregation = parseFunctionDefinition(InjectLiteralAggregateFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with type literal");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertEquals(implementations.getGenericImplementations().size(), 1);
AggregationImplementation implementation = implementations.getGenericImplementations().get(0);
assertTrue(implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), InjectLiteralAggregateFunction.class);
assertDependencyCount(implementation, 1, 1, 1);
assertEquals(implementation.getStateSerializerFactoryDependencies().size(), 1);
assertTrue(implementation.getInputDependencies().get(0) instanceof LiteralImplementationDependency);
assertTrue(implementation.getCombineDependencies().get(0) instanceof LiteralImplementationDependency);
assertTrue(implementation.getOutputDependencies().get(0) instanceof LiteralImplementationDependency);
assertTrue(implementation.getStateSerializerFactoryDependencies().get(0) instanceof LiteralImplementationDependency);
assertFalse(implementation.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
TypeManager typeRegistry = new TypeRegistry();
FunctionRegistry functionRegistry = new FunctionRegistry(typeRegistry, new BlockEncodingManager(typeRegistry), new FeaturesConfig());
InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().setLongVariable("x", 17L).build(), 1, typeRegistry, functionRegistry);
assertEquals(specialized.getFinalType(), VarcharType.createVarcharType(17));
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "inject_literal_aggregate");
}
@AggregationFunction("parametric_aggregate_long_constraint")
@Description("Parametric aggregate with parametric type returned")
public static class LongConstraintAggregateFunction
{
@InputFunction
@LiteralParameters({"x", "y", "z"})
@Constraint(variable = "z", expression = "x + y")
public static void input(
@AggregationState SliceState state,
@SqlType("varchar(x)") Slice slice1,
@SqlType("varchar(y)") Slice slice2)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@AggregationState SliceState combine1,
@AggregationState SliceState combine2)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("varchar(z)")
public static void output(
@AggregationState SliceState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testLongConstraintAggregateFunctionParse()
{
Signature expectedSignature = new Signature(
"parametric_aggregate_long_constraint",
FunctionKind.AGGREGATE,
ImmutableList.of(),
ImmutableList.of(new LongVariableConstraint("z", "x + y")),
parseTypeSignature("varchar(z)", ImmutableSet.of("z")),
ImmutableList.of(parseTypeSignature("varchar(x)", ImmutableSet.of("x")),
parseTypeSignature("varchar(y)", ImmutableSet.of("y"))),
false);
ParametricAggregation aggregation = parseFunctionDefinition(LongConstraintAggregateFunction.class);
assertEquals(aggregation.getDescription(), "Parametric aggregate with parametric type returned");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertEquals(implementations.getGenericImplementations().size(), 1);
AggregationImplementation implementation = implementations.getGenericImplementations().get(0);
assertTrue(!implementation.getStateSerializerFactory().isPresent());
assertEquals(implementation.getDefinitionClass(), LongConstraintAggregateFunction.class);
assertDependencyCount(implementation, 0, 0, 0);
assertEquals(implementation.getStateSerializerFactoryDependencies().size(), 0);
assertFalse(implementation.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
TypeManager typeRegistry = new TypeRegistry();
FunctionRegistry functionRegistry = new FunctionRegistry(typeRegistry, new BlockEncodingManager(typeRegistry), new FeaturesConfig());
InternalAggregationFunction specialized = aggregation.specialize(
BoundVariables.builder()
.setLongVariable("x", 17L)
.setLongVariable("y", 13L)
.setLongVariable("z", 30L)
.build(), 2, typeRegistry, functionRegistry);
assertEquals(specialized.getFinalType(), VarcharType.createVarcharType(30));
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "parametric_aggregate_long_constraint");
}
@AggregationFunction("fixed_type_parameter_injection")
@Description("Simple aggregate with fixed parameter type injected")
public static class FixedTypeParameterInjectionAggregateFunction
{
@InputFunction
public static void input(
@TypeParameter("ROW(ARRAY(BIGINT),ROW(ROW(CHAR)),BIGINT,MAP(BIGINT,CHAR))") Type type,
@AggregationState NullableDoubleState state,
@SqlType("double") double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
public static void combine(
@TypeParameter("ROW(ARRAY(BIGINT),ROW(ROW(CHAR)),BIGINT,MAP(BIGINT,CHAR))") Type type,
@AggregationState NullableDoubleState state,
@AggregationState NullableDoubleState otherState)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("double")
public static void output(
@TypeParameter("ROW(ARRAY(BIGINT),ROW(ROW(CHAR)),BIGINT,MAP(BIGINT,CHAR))") Type type,
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testFixedTypeParameterInjectionAggregateFunctionParse()
{
Signature expectedSignature = new Signature(
"fixed_type_parameter_injection",
FunctionKind.AGGREGATE,
ImmutableList.of(),
ImmutableList.of(),
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()),
false);
ParametricAggregation aggregation = parseFunctionDefinition(FixedTypeParameterInjectionAggregateFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with fixed parameter type injected");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 1, 0, 0);
AggregationImplementation implementationDouble = implementations.getExactImplementations().get(expectedSignature);
assertFalse(implementationDouble.getStateSerializerFactory().isPresent());
assertEquals(implementationDouble.getDefinitionClass(), FixedTypeParameterInjectionAggregateFunction.class);
assertDependencyCount(implementationDouble, 1, 1, 1);
assertFalse(implementationDouble.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementationDouble.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
assertEquals(implementationDouble.getStateClass(), NullableDoubleState.class);
}
@AggregationFunction("partially_fixed_type_parameter_injection")
@Description("Simple aggregate with fixed parameter type injected")
public static class PartiallyFixedTypeParameterInjectionAggregateFunction
{
@InputFunction
@TypeParameter("T1")
@TypeParameter("T2")
public static void input(
@TypeParameter("ROW(ARRAY(T1),ROW(ROW(T2)),CHAR)") Type type,
@AggregationState NullableDoubleState state,
@SqlType("double") double value)
{
// noop this is only for annotation testing puproses
}
@CombineFunction
@TypeParameter("T1")
@TypeParameter("T2")
public static void combine(
@TypeParameter("ROW(ARRAY(T1),ROW(ROW(T2)),CHAR)") Type type,
@AggregationState NullableDoubleState state,
@AggregationState NullableDoubleState otherState)
{
// noop this is only for annotation testing puproses
}
@OutputFunction("double")
@TypeParameter("T1")
@TypeParameter("T2")
public static void output(
@TypeParameter("ROW(ARRAY(T1),ROW(ROW(T2)),CHAR)") Type type,
@AggregationState NullableDoubleState state,
BlockBuilder out)
{
// noop this is only for annotation testing puproses
}
}
@Test
public void testPartiallyFixedTypeParameterInjectionAggregateFunctionParse()
{
Signature expectedSignature = new Signature(
"partially_fixed_type_parameter_injection",
FunctionKind.AGGREGATE,
ImmutableList.of(typeVariable("T1"), typeVariable("T2")),
ImmutableList.of(),
DoubleType.DOUBLE.getTypeSignature(),
ImmutableList.of(DoubleType.DOUBLE.getTypeSignature()),
false);
ParametricAggregation aggregation = parseFunctionDefinition(PartiallyFixedTypeParameterInjectionAggregateFunction.class);
assertEquals(aggregation.getDescription(), "Simple aggregate with fixed parameter type injected");
assertTrue(aggregation.isDeterministic());
assertEquals(aggregation.getSignature(), expectedSignature);
ParametricImplementationsGroup<AggregationImplementation> implementations = aggregation.getImplementations();
assertImplementationCount(implementations, 0, 0, 1);
AggregationImplementation implementationDouble = implementations.getGenericImplementations().stream().filter(impl -> impl.getStateClass() == NullableDoubleState.class).collect(toImmutableList()).get(0);
assertFalse(implementationDouble.getStateSerializerFactory().isPresent());
assertEquals(implementationDouble.getDefinitionClass(), PartiallyFixedTypeParameterInjectionAggregateFunction.class);
assertDependencyCount(implementationDouble, 1, 1, 1);
assertFalse(implementationDouble.hasSpecializedTypeParameters());
List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL);
assertTrue(implementationDouble.getInputParameterMetadataTypes().equals(expectedMetadataTypes));
assertEquals(implementationDouble.getStateClass(), NullableDoubleState.class);
InternalAggregationFunction specialized = aggregation.specialize(
BoundVariables.builder().setTypeVariable("T1", DoubleType.DOUBLE).setTypeVariable("T2", DoubleType.DOUBLE).build(),
1,
new TypeRegistry(),
null);
assertEquals(specialized.getFinalType(), DoubleType.DOUBLE);
assertTrue(specialized.getParameterTypes().equals(ImmutableList.of(DoubleType.DOUBLE)));
assertTrue(specialized.isDecomposable());
assertEquals(specialized.name(), "partially_fixed_type_parameter_injection");
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.lint.checks;
import static com.android.SdkConstants.ANDROID_PKG_PREFIX;
import static com.android.SdkConstants.ANDROID_URI;
import static com.android.SdkConstants.ATTR_CLASS;
import static com.android.SdkConstants.ATTR_FRAGMENT;
import static com.android.SdkConstants.ATTR_NAME;
import static com.android.SdkConstants.CONSTRUCTOR_NAME;
import static com.android.SdkConstants.TAG_ACTIVITY;
import static com.android.SdkConstants.TAG_APPLICATION;
import static com.android.SdkConstants.TAG_HEADER;
import static com.android.SdkConstants.TAG_PROVIDER;
import static com.android.SdkConstants.TAG_RECEIVER;
import static com.android.SdkConstants.TAG_SERVICE;
import static com.android.SdkConstants.TAG_STRING;
import static com.android.SdkConstants.VIEW_FRAGMENT;
import static com.android.SdkConstants.VIEW_TAG;
import static com.android.resources.ResourceFolderType.LAYOUT;
import static com.android.resources.ResourceFolderType.VALUES;
import static com.android.resources.ResourceFolderType.XML;
import com.android.annotations.NonNull;
import com.android.annotations.Nullable;
import com.android.resources.ResourceFolderType;
import com.android.tools.lint.detector.api.Category;
import com.android.tools.lint.detector.api.ClassContext;
import com.android.tools.lint.detector.api.Context;
import com.android.tools.lint.detector.api.Detector.ClassScanner;
import com.android.tools.lint.detector.api.Implementation;
import com.android.tools.lint.detector.api.Issue;
import com.android.tools.lint.detector.api.LayoutDetector;
import com.android.tools.lint.detector.api.LintUtils;
import com.android.tools.lint.detector.api.Location;
import com.android.tools.lint.detector.api.Location.Handle;
import com.android.tools.lint.detector.api.Scope;
import com.android.tools.lint.detector.api.Severity;
import com.android.tools.lint.detector.api.Speed;
import com.android.tools.lint.detector.api.TextFormat;
import com.android.tools.lint.detector.api.XmlContext;
import com.android.utils.SdkUtils;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.tree.MethodNode;
import org.w3c.dom.Attr;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Checks to ensure that classes referenced in the manifest actually exist and are included
*
*/
public class MissingClassDetector extends LayoutDetector implements ClassScanner {
/** Manifest-referenced classes missing from the project or libraries */
public static final Issue MISSING = Issue.create(
"MissingRegistered", //$NON-NLS-1$
"Missing registered class",
"If a class is referenced in the manifest, it must also exist in the project (or in one " +
"of the libraries included by the project. This check helps uncover typos in " +
"registration names, or attempts to rename or move classes without updating the " +
"manifest file properly.",
Category.CORRECTNESS,
8,
Severity.ERROR,
new Implementation(
MissingClassDetector.class,
EnumSet.of(Scope.MANIFEST, Scope.CLASS_FILE,
Scope.JAVA_LIBRARIES, Scope.RESOURCE_FILE)))
.addMoreInfo("http://developer.android.com/guide/topics/manifest/manifest-intro.html"); //$NON-NLS-1$
/** Are activity, service, receiver etc subclasses instantiatable? */
public static final Issue INSTANTIATABLE = Issue.create(
"Instantiatable", //$NON-NLS-1$
"Registered class is not instantiatable",
"Activities, services, broadcast receivers etc. registered in the manifest file " +
"must be \"instantiatable\" by the system, which means that the class must be " +
"public, it must have an empty public constructor, and if it's an inner class, " +
"it must be a static inner class.",
Category.CORRECTNESS,
6,
Severity.FATAL,
new Implementation(
MissingClassDetector.class,
Scope.CLASS_FILE_SCOPE));
/** Is the right character used for inner class separators? */
public static final Issue INNERCLASS = Issue.create(
"InnerclassSeparator", //$NON-NLS-1$
"Inner classes should use `$` rather than `.`",
"When you reference an inner class in a manifest file, you must use '$' instead of '.' " +
"as the separator character, i.e. Outer$Inner instead of Outer.Inner.\n" +
"\n" +
"(If you get this warning for a class which is not actually an inner class, it's " +
"because you are using uppercase characters in your package name, which is not " +
"conventional.)",
Category.CORRECTNESS,
3,
Severity.WARNING,
new Implementation(
MissingClassDetector.class,
Scope.MANIFEST_SCOPE));
private Map<String, Location.Handle> mReferencedClasses;
private Set<String> mCustomViews;
private boolean mHaveClasses;
/** Constructs a new {@link MissingClassDetector} */
public MissingClassDetector() {
}
@NonNull
@Override
public Speed getSpeed() {
return Speed.FAST;
}
// ---- Implements XmlScanner ----
@Override
public Collection<String> getApplicableElements() {
return ALL;
}
@Override
public boolean appliesTo(@NonNull ResourceFolderType folderType) {
return folderType == VALUES || folderType == LAYOUT || folderType == XML;
}
@Override
public void visitElement(@NonNull XmlContext context, @NonNull Element element) {
String pkg = null;
Node classNameNode;
String className;
String tag = element.getTagName();
ResourceFolderType folderType = context.getResourceFolderType();
if (folderType == VALUES) {
if (!tag.equals(TAG_STRING)) {
return;
}
Attr attr = element.getAttributeNode(ATTR_NAME);
if (attr == null) {
return;
}
className = attr.getValue();
classNameNode = attr;
} else if (folderType == LAYOUT) {
if (tag.indexOf('.') > 0) {
className = tag;
classNameNode = element;
} else if (tag.equals(VIEW_FRAGMENT) || tag.equals(VIEW_TAG)) {
Attr attr = element.getAttributeNodeNS(ANDROID_URI, ATTR_NAME);
if (attr == null) {
attr = element.getAttributeNode(ATTR_CLASS);
}
if (attr == null) {
return;
}
className = attr.getValue();
classNameNode = attr;
} else {
return;
}
} else if (folderType == XML) {
if (!tag.equals(TAG_HEADER)) {
return;
}
Attr attr = element.getAttributeNodeNS(ANDROID_URI, ATTR_FRAGMENT);
if (attr == null) {
return;
}
className = attr.getValue();
classNameNode = attr;
} else {
// Manifest file
if (TAG_APPLICATION.equals(tag)
|| TAG_ACTIVITY.equals(tag)
|| TAG_SERVICE.equals(tag)
|| TAG_RECEIVER.equals(tag)
|| TAG_PROVIDER.equals(tag)) {
Attr attr = element.getAttributeNodeNS(ANDROID_URI, ATTR_NAME);
if (attr == null) {
return;
}
className = attr.getValue();
classNameNode = attr;
pkg = context.getMainProject().getPackage();
} else {
return;
}
}
if (className.isEmpty()) {
return;
}
String fqcn;
int dotIndex = className.indexOf('.');
if (dotIndex <= 0) {
if (pkg == null) {
return; // value file
}
if (dotIndex == 0) {
fqcn = pkg + className;
} else {
// According to the <activity> manifest element documentation, this is not
// valid ( http://developer.android.com/guide/topics/manifest/activity-element.html )
// but it appears in manifest files and appears to be supported by the runtime
// so handle this in code as well:
fqcn = pkg + '.' + className;
}
} else { // else: the class name is already a fully qualified class name
fqcn = className;
// Only look for fully qualified tracker names in analytics files
if (folderType == VALUES
&& !SdkUtils.endsWith(context.file.getPath(), "analytics.xml")) { //$NON-NLS-1$
return;
}
}
String signature = ClassContext.getInternalName(fqcn);
if (signature.isEmpty() || signature.startsWith(ANDROID_PKG_PREFIX)) {
return;
}
if (!context.getProject().getReportIssues()) {
// If this is a library project not being analyzed, ignore it
return;
}
Handle handle = null;
if (!context.getDriver().isSuppressed(context, MISSING, element)) {
if (mReferencedClasses == null) {
mReferencedClasses = Maps.newHashMapWithExpectedSize(16);
mCustomViews = Sets.newHashSetWithExpectedSize(8);
}
handle = context.createLocationHandle(element);
mReferencedClasses.put(signature, handle);
if (folderType == LAYOUT && !tag.equals(VIEW_FRAGMENT)) {
mCustomViews.add(ClassContext.getInternalName(className));
}
}
if (signature.indexOf('$') != -1) {
checkInnerClass(context, element, pkg, classNameNode, className);
// The internal name contains a $ which means it's an inner class.
// The conversion from fqcn to internal name is a bit ambiguous:
// "a.b.C.D" usually means "inner class D in class C in package a.b".
// However, it can (see issue 31592) also mean class D in package "a.b.C".
// To make sure we don't falsely complain that foo/Bar$Baz doesn't exist,
// in case the user has actually created a package named foo/Bar and a proper
// class named Baz, we register *both* into the reference map.
// When generating errors we'll look for these an rip them back out if
// it looks like one of the two variations have been seen.
if (handle != null) {
// Assume that each successive $ is really a capitalized package name
// instead. In other words, for A$B$C$D (assumed to be class A with
// inner classes A.B, A.B.C and A.B.C.D) generate the following possible
// referenced classes A/B$C$D (class B in package A with inner classes C and C.D),
// A/B/C$D and A/B/C/D
while (true) {
int index = signature.indexOf('$');
if (index == -1) {
break;
}
signature = signature.substring(0, index) + '/'
+ signature.substring(index + 1);
mReferencedClasses.put(signature, handle);
if (folderType == LAYOUT && !tag.equals(VIEW_FRAGMENT)) {
mCustomViews.add(signature);
}
}
}
}
}
private static void checkInnerClass(XmlContext context, Element element, String pkg,
Node classNameNode, String className) {
if (pkg != null && className.indexOf('$') == -1 && className.indexOf('.', 1) > 0) {
boolean haveUpperCase = false;
for (int i = 0, n = pkg.length(); i < n; i++) {
if (Character.isUpperCase(pkg.charAt(i))) {
haveUpperCase = true;
break;
}
}
if (!haveUpperCase) {
String fixed = className.charAt(0) + className.substring(1).replace('.','$');
String message = "Use '$' instead of '.' for inner classes " +
"(or use only lowercase letters in package names); replace \"" +
className + "\" with \"" + fixed + "\"";
Location location = context.getLocation(classNameNode);
context.report(INNERCLASS, element, location, message);
}
}
}
@Override
public void afterCheckProject(@NonNull Context context) {
if (!context.getProject().isLibrary() && mHaveClasses
&& mReferencedClasses != null && !mReferencedClasses.isEmpty()
&& context.getDriver().getScope().contains(Scope.CLASS_FILE)) {
List<String> classes = new ArrayList<String>(mReferencedClasses.keySet());
Collections.sort(classes);
for (String owner : classes) {
Location.Handle handle = mReferencedClasses.get(owner);
String fqcn = ClassContext.getFqcn(owner);
String signature = ClassContext.getInternalName(fqcn);
if (!signature.equals(owner)) {
if (!mReferencedClasses.containsKey(signature)) {
continue;
}
} else if (signature.indexOf('$') != -1) {
signature = signature.replace('$', '/');
if (!mReferencedClasses.containsKey(signature)) {
continue;
}
}
mReferencedClasses.remove(owner);
// Ignore usages of platform libraries
if (owner.startsWith("android/")) { //$NON-NLS-1$
continue;
}
String message = String.format(
"Class referenced in the manifest, `%1$s`, was not found in the " +
"project or the libraries", fqcn);
Location location = handle.resolve();
File parentFile = location.getFile().getParentFile();
if (parentFile != null) {
String parent = parentFile.getName();
ResourceFolderType type = ResourceFolderType.getFolderType(parent);
if (type == LAYOUT) {
message = String.format(
"Class referenced in the layout file, `%1$s`, was not found in "
+ "the project or the libraries", fqcn);
} else if (type == XML) {
message = String.format(
"Class referenced in the preference header file, `%1$s`, was not "
+ "found in the project or the libraries", fqcn);
} else if (type == VALUES) {
message = String.format(
"Class referenced in the analytics file, `%1$s`, was not "
+ "found in the project or the libraries", fqcn);
}
}
context.report(MISSING, location, message);
}
}
}
// ---- Implements ClassScanner ----
@Override
public void checkClass(@NonNull ClassContext context, @NonNull ClassNode classNode) {
if (!mHaveClasses && !context.isFromClassLibrary()
&& context.getProject() == context.getMainProject()) {
mHaveClasses = true;
}
String curr = classNode.name;
if (mReferencedClasses != null && mReferencedClasses.containsKey(curr)) {
boolean isCustomView = mCustomViews.contains(curr);
removeReferences(curr);
// Ensure that the class is public, non static and has a null constructor!
if ((classNode.access & Opcodes.ACC_PUBLIC) == 0) {
context.report(INSTANTIATABLE, context.getLocation(classNode), String.format(
"This class should be public (%1$s)",
ClassContext.createSignature(classNode.name, null, null)));
return;
}
if (classNode.name.indexOf('$') != -1 && !LintUtils.isStaticInnerClass(classNode)) {
context.report(INSTANTIATABLE, context.getLocation(classNode), String.format(
"This inner class should be static (%1$s)",
ClassContext.createSignature(classNode.name, null, null)));
return;
}
boolean hasDefaultConstructor = false;
@SuppressWarnings("rawtypes") // ASM API
List methodList = classNode.methods;
for (Object m : methodList) {
MethodNode method = (MethodNode) m;
if (method.name.equals(CONSTRUCTOR_NAME)) {
if (method.desc.equals("()V")) { //$NON-NLS-1$
// The constructor must be public
if ((method.access & Opcodes.ACC_PUBLIC) != 0) {
hasDefaultConstructor = true;
} else {
context.report(INSTANTIATABLE, context.getLocation(method, classNode),
"The default constructor must be public");
// Also mark that we have a constructor so we don't complain again
// below since we've already emitted a more specific error related
// to the default constructor
hasDefaultConstructor = true;
}
}
}
}
if (!hasDefaultConstructor && !isCustomView && !context.isFromClassLibrary()
&& context.getProject().getReportIssues()) {
context.report(INSTANTIATABLE, context.getLocation(classNode), String.format(
"This class should provide a default constructor (a public " +
"constructor with no arguments) (%1$s)",
ClassContext.createSignature(classNode.name, null, null)));
}
}
}
private void removeReferences(String curr) {
mReferencedClasses.remove(curr);
// Since "A.B.C" is ambiguous whether it's referencing a class in package A.B or
// an inner class C in package A, we insert multiple possible references when we
// encounter the A.B.C reference; now that we've seen the actual class we need to
// remove all the possible permutations we've added such that the permutations
// don't count as unreferenced classes.
int index = curr.lastIndexOf('/');
if (index == -1) {
return;
}
boolean hasCapitalizedPackageName = false;
for (int i = index - 1; i >= 0; i--) {
char c = curr.charAt(i);
if (Character.isUpperCase(c)) {
hasCapitalizedPackageName = true;
break;
}
}
if (!hasCapitalizedPackageName) {
// No path ambiguity
return;
}
while (true) {
index = curr.lastIndexOf('/');
if (index == -1) {
break;
}
curr = curr.substring(0, index) + '$' + curr.substring(index + 1);
mReferencedClasses.remove(curr);
}
}
/**
* Given an error message produced by this lint detector for the given issue type,
* returns the old value to be replaced in the source code.
* <p>
* Intended for IDE quickfix implementations.
*
* @param issue the corresponding issue
* @param errorMessage the error message associated with the error
* @param format the format of the error message
* @return the corresponding old value, or null if not recognized
*/
@Nullable
public static String getOldValue(@NonNull Issue issue, @NonNull String errorMessage,
@NonNull TextFormat format) {
if (issue == INNERCLASS) {
errorMessage = format.toText(errorMessage);
return LintUtils.findSubstring(errorMessage, " replace \"", "\"");
}
return null;
}
/**
* Given an error message produced by this lint detector for the given issue type,
* returns the new value to be put into the source code.
* <p>
* Intended for IDE quickfix implementations.
*
* @param issue the corresponding issue
* @param errorMessage the error message associated with the error
* @param format the format of the error message
* @return the corresponding new value, or null if not recognized
*/
@Nullable
public static String getNewValue(@NonNull Issue issue, @NonNull String errorMessage,
@NonNull TextFormat format) {
if (issue == INNERCLASS) {
errorMessage = format.toText(errorMessage);
return LintUtils.findSubstring(errorMessage, " with \"", "\"");
}
return null;
}
}
| |
package tonius.simplyjetpacks.setup;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fluids.FluidRegistry;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.oredict.OreDictionary;
import tonius.simplyjetpacks.SimplyJetpacks;
import tonius.simplyjetpacks.config.Config;
import tonius.simplyjetpacks.crafting.UpgradingRecipe;
import tonius.simplyjetpacks.integration.BCItems;
import tonius.simplyjetpacks.integration.BCRecipes;
import tonius.simplyjetpacks.integration.EIOItems;
import tonius.simplyjetpacks.integration.EIORecipes;
import tonius.simplyjetpacks.integration.ModType;
import tonius.simplyjetpacks.integration.RAItems;
import tonius.simplyjetpacks.integration.TDItems;
import tonius.simplyjetpacks.integration.TEItems;
import tonius.simplyjetpacks.integration.TERecipes;
import tonius.simplyjetpacks.item.ItemJetpackFueller;
import tonius.simplyjetpacks.item.ItemMeta;
import tonius.simplyjetpacks.item.ItemMeta.MetaItem;
import tonius.simplyjetpacks.item.ItemMysteriousPotato;
import tonius.simplyjetpacks.item.ItemPack.ItemFluxPack;
import tonius.simplyjetpacks.item.ItemPack.ItemJetpack;
import cofh.lib.util.helpers.ItemHelper;
import cpw.mods.fml.common.Loader;
import cpw.mods.fml.common.registry.GameRegistry;
public abstract class ModItems {
private static boolean integrateTE = false;
private static boolean integrateTD = false;
private static boolean integrateRA = false;
private static boolean integrateEIO = false;
private static boolean integrateBC = false;
public static void preInit() {
integrateTE = ModType.THERMAL_EXPANSION.loaded && Config.enableIntegrationTE;
integrateTD = ModType.THERMAL_DYNAMICS.loaded && integrateTE;
integrateRA = ModType.REDSTONE_ARSENAL.loaded && integrateTE;
integrateEIO = ModType.ENDER_IO.loaded && Config.enableIntegrationEIO;
integrateBC = ModType.BUILDCRAFT.loaded && Config.enableIntegrationBC;
registerItems();
}
public static void init() {
if (integrateTE) {
TEItems.init();
if (integrateTD) {
TDItems.init();
}
if (integrateRA) {
RAItems.init();
}
}
if (integrateEIO) {
EIOItems.init();
}
if (integrateBC) {
BCItems.init();
}
registerRecipes();
doIMC();
}
private static void registerItems() {
SimplyJetpacks.logger.info("Registering items");
// For compatibility, do not change item IDs until 1.8+
jetpacksCommon = new ItemJetpack(ModType.SIMPLY_JETPACKS, "jetpacksCommon");
jetpackPotato = jetpacksCommon.putPack(0, Packs.jetpackPotato, true);
jetpackCreative = jetpacksCommon.putPack(9001, Packs.jetpackCreative);
fluxPacksCommon = new ItemFluxPack(ModType.SIMPLY_JETPACKS, "fluxpacksCommon");
fluxPackCreative = fluxPacksCommon.putPack(9001, Packs.fluxPackCreative);
if (integrateTE) {
jetpacksTE = new ItemJetpack(ModType.THERMAL_EXPANSION, "jetpacks");
jetpackTE1 = jetpacksTE.putPack(1, Packs.jetpackTE1);
jetpackTE1Armored = jetpacksTE.putPack(101, Packs.jetpackTE1Armored);
jetpackTE2 = jetpacksTE.putPack(2, Packs.jetpackTE2);
jetpackTE2Armored = jetpacksTE.putPack(102, Packs.jetpackTE2Armored);
jetpackTE3 = jetpacksTE.putPack(3, Packs.jetpackTE3);
jetpackTE3Armored = jetpacksTE.putPack(103, Packs.jetpackTE3Armored);
jetpackTE4 = jetpacksTE.putPack(4, Packs.jetpackTE4);
jetpackTE4Armored = jetpacksTE.putPack(104, Packs.jetpackTE4Armored);
if (integrateRA || Config.addRAItemsIfNotInstalled) {
jetpackTE5 = jetpacksTE.putPack(5, Packs.jetpackTE5);
}
fluxPacksTE = new ItemFluxPack(ModType.THERMAL_EXPANSION, "fluxpacks");
fluxPackTE1 = fluxPacksTE.putPack(1, Packs.fluxPackTE1);
fluxPackTE2 = fluxPacksTE.putPack(2, Packs.fluxPackTE2);
fluxPackTE2Armored = fluxPacksTE.putPack(102, Packs.fluxPackTE2Armored);
fluxPackTE3 = fluxPacksTE.putPack(3, Packs.fluxPackTE3);
fluxPackTE3Armored = fluxPacksTE.putPack(103, Packs.fluxPackTE3Armored);
fluxPackTE4 = fluxPacksTE.putPack(4, Packs.fluxPackTE4);
fluxPackTE4Armored = fluxPacksTE.putPack(104, Packs.fluxPackTE4Armored);
}
if (integrateEIO) {
jetpacksEIO = new ItemJetpack(ModType.ENDER_IO, "jetpacksEIO");
jetpackEIO1 = jetpacksEIO.putPack(1, Packs.jetpackEIO1);
jetpackEIO1Armored = jetpacksEIO.putPack(101, Packs.jetpackEIO1Armored);
jetpackEIO2 = jetpacksEIO.putPack(2, Packs.jetpackEIO2);
jetpackEIO2Armored = jetpacksEIO.putPack(102, Packs.jetpackEIO2Armored);
jetpackEIO3 = jetpacksEIO.putPack(3, Packs.jetpackEIO3);
jetpackEIO3Armored = jetpacksEIO.putPack(103, Packs.jetpackEIO3Armored);
jetpackEIO4 = jetpacksEIO.putPack(4, Packs.jetpackEIO4);
jetpackEIO4Armored = jetpacksEIO.putPack(104, Packs.jetpackEIO4Armored);
jetpackEIO5 = jetpacksEIO.putPack(5, Packs.jetpackEIO5);
fluxPacksEIO = new ItemFluxPack(ModType.ENDER_IO, "fluxpacksEIO");
fluxPackEIO1 = fluxPacksEIO.putPack(1, Packs.fluxPackEIO1);
fluxPackEIO2 = fluxPacksEIO.putPack(2, Packs.fluxPackEIO2);
fluxPackEIO2Armored = fluxPacksEIO.putPack(102, Packs.fluxPackEIO2Armored);
fluxPackEIO3 = fluxPacksEIO.putPack(3, Packs.fluxPackEIO3);
fluxPackEIO3Armored = fluxPacksEIO.putPack(103, Packs.fluxPackEIO3Armored);
fluxPackEIO4 = fluxPacksEIO.putPack(4, Packs.fluxPackEIO4);
fluxPackEIO4Armored = fluxPacksEIO.putPack(104, Packs.fluxPackEIO4Armored);
}
if (integrateBC) {
jetpacksBC = new ItemJetpack(ModType.BUILDCRAFT, "jetpacksBC");
if (Loader.isModLoaded("BuildCraft|Energy") && Loader.isModLoaded("BuildCraft|Factory")) {
jetpackBC1 = jetpacksBC.putPack(1, Packs.jetpackBC1);
jetpackBC1Armored = jetpacksBC.putPack(101, Packs.jetpackBC1Armored);
}
jetpackBC2 = jetpacksBC.putPack(2, Packs.jetpackBC2);
jetpackBC2Armored = jetpacksBC.putPack(102, Packs.jetpackBC2Armored);
}
components = new ItemMeta("components");
armorPlatings = new ItemMeta("armorPlatings");
particleCustomizers = new ItemMeta("particleCustomizers");
jetpackFueller = new ItemJetpackFueller("jetpackFueller");
mysteriousPotato = new ItemMysteriousPotato("mysteriousPotato");
leatherStrap = components.addMetaItem(0, new MetaItem("leatherStrap", null, EnumRarity.common), true, false);
jetpackIcon = components.addMetaItem(1, new MetaItem("jetpack.icon", null, EnumRarity.common, false, true), false, false);
if (integrateTE) {
thrusterTE1 = components.addMetaItem(11, new MetaItem("thruster.te.1", null, EnumRarity.common), true, false);
thrusterTE2 = components.addMetaItem(12, new MetaItem("thruster.te.2", null, EnumRarity.common), true, false);
thrusterTE3 = components.addMetaItem(13, new MetaItem("thruster.te.3", null, EnumRarity.uncommon), true, false);
thrusterTE4 = components.addMetaItem(14, new MetaItem("thruster.te.4", null, EnumRarity.rare), true, false);
if (integrateRA || Config.addRAItemsIfNotInstalled) {
thrusterTE5 = components.addMetaItem(15, new MetaItem("thruster.te.5", null, EnumRarity.epic), true, false);
unitGlowstoneEmpty = components.addMetaItem(60, new MetaItem("unitGlowstone.empty", null, EnumRarity.common), true, false);
unitGlowstone = components.addMetaItem(61, new MetaItem("unitGlowstone", null, EnumRarity.uncommon), true, false);
unitCryotheumEmpty = components.addMetaItem(62, new MetaItem("unitCryotheum.empty", null, EnumRarity.common), true, false);
unitCryotheum = components.addMetaItem(63, new MetaItem("unitCryotheum", null, EnumRarity.rare), true, false);
}
if (!integrateRA && Config.addRAItemsIfNotInstalled) {
dustElectrumFlux = components.addMetaItem(64, new MetaItem("dustElectrumFlux", "raReplacement", EnumRarity.uncommon), true, true);
ingotElectrumFlux = components.addMetaItem(65, new MetaItem("ingotElectrumFlux", "raReplacement", EnumRarity.uncommon), true, true);
nuggetElectrumFlux = components.addMetaItem(66, new MetaItem("nuggetElectrumFlux", "raReplacement", EnumRarity.uncommon), true, true);
gemCrystalFlux = components.addMetaItem(67, new MetaItem("gemCrystalFlux", "raReplacement", EnumRarity.uncommon), true, true);
plateFlux = components.addMetaItem(68, new MetaItem("plateFlux", "raReplacement", EnumRarity.uncommon), true, false);
armorFluxPlate = components.addMetaItem(69, new MetaItem("armorFluxPlate", "raReplacement", EnumRarity.uncommon), true, false);
}
if (ModType.REDSTONE_ARMORY.loaded) {
enderiumUpgrade = components.addMetaItem(59, new MetaItem("enderiumUpgrade", "enderiumUpgrade", EnumRarity.rare), true, false);
}
armorPlatingTE1 = armorPlatings.addMetaItem(1, new MetaItem("armorPlating.te.1", null, EnumRarity.common), true, false);
armorPlatingTE2 = armorPlatings.addMetaItem(2, new MetaItem("armorPlating.te.2", null, EnumRarity.common), true, false);
armorPlatingTE3 = armorPlatings.addMetaItem(3, new MetaItem("armorPlating.te.3", null, EnumRarity.common), true, false);
armorPlatingTE4 = armorPlatings.addMetaItem(4, new MetaItem("armorPlating.te.4", null, EnumRarity.rare), true, false);
}
if (integrateEIO) {
thrusterEIO1 = components.addMetaItem(21, new MetaItem("thruster.eio.1", null, EnumRarity.common), true, false);
thrusterEIO2 = components.addMetaItem(22, new MetaItem("thruster.eio.2", null, EnumRarity.common), true, false);
thrusterEIO3 = components.addMetaItem(23, new MetaItem("thruster.eio.3", null, EnumRarity.uncommon), true, false);
thrusterEIO4 = components.addMetaItem(24, new MetaItem("thruster.eio.4", null, EnumRarity.rare), true, false);
thrusterEIO5 = components.addMetaItem(25, new MetaItem("thruster.eio.5", null, EnumRarity.epic), true, false);
ingotDarkSoularium = components.addMetaItem(70, new MetaItem("ingotDarkSoularium", null, EnumRarity.uncommon, true, false), true, true);
reinforcedGliderWing = components.addMetaItem(71, new MetaItem("reinforcedGliderWing", null, EnumRarity.uncommon), true, false);
unitFlightControlEmpty = components.addMetaItem(72, new MetaItem("unitFlightControl.empty", null, EnumRarity.common), true, false);
unitFlightControl = components.addMetaItem(73, new MetaItem("unitFlightControl", null, EnumRarity.uncommon), true, false);
armorPlatingEIO1 = armorPlatings.addMetaItem(11, new MetaItem("armorPlating.eio.1", null, EnumRarity.common), true, false);
armorPlatingEIO2 = armorPlatings.addMetaItem(12, new MetaItem("armorPlating.eio.2", null, EnumRarity.common), true, false);
armorPlatingEIO3 = armorPlatings.addMetaItem(13, new MetaItem("armorPlating.eio.3", null, EnumRarity.common), true, false);
armorPlatingEIO4 = armorPlatings.addMetaItem(14, new MetaItem("armorPlating.eio.4", null, EnumRarity.common), true, false);
}
if (integrateBC) {
if (Loader.isModLoaded("BuildCraft|Energy") && Loader.isModLoaded("BuildCraft|Factory")) {
thrusterBC1 = components.addMetaItem(31, new MetaItem("thruster.bc.1", null, EnumRarity.common), true, false);
}
thrusterBC2 = components.addMetaItem(32, new MetaItem("thruster.bc.2", null, EnumRarity.uncommon), true, false);
armorPlatingBC1 = armorPlatings.addMetaItem(21, new MetaItem("armorPlating.bc.1", null, EnumRarity.common), true, false);
armorPlatingBC2 = armorPlatings.addMetaItem(22, new MetaItem("armorPlating.bc.2", null, EnumRarity.uncommon), true, false);
}
particleDefault = particleCustomizers.addMetaItem(0, new MetaItem("particle.0", "particleCustomizers", EnumRarity.common), true, false);
particleNone = particleCustomizers.addMetaItem(1, new MetaItem("particle.1", "particleCustomizers", EnumRarity.common), true, false);
particleSmoke = particleCustomizers.addMetaItem(2, new MetaItem("particle.2", "particleCustomizers", EnumRarity.common), true, false);
particleRainbowSmoke = particleCustomizers.addMetaItem(3, new MetaItem("particle.3", "particleCustomizers", EnumRarity.common), true, false);
}
private static void registerRecipes() {
SimplyJetpacks.logger.info("Registering recipes");
ItemHelper.addShapedOreRecipe(jetpackPotato, "S S", "NPN", "R R", 'S', Items.string, 'N', "nuggetGold", 'P', Items.potato, 'R', "dustRedstone");
ItemHelper.addShapedOreRecipe(jetpackPotato, "S S", "NPN", "R R", 'S', Items.string, 'N', "nuggetGold", 'P', Items.poisonous_potato, 'R', "dustRedstone");
GameRegistry.addRecipe(new UpgradingRecipe(jetpackCreative, "J", "P", 'J', jetpackCreative, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
ItemHelper.addShapedOreRecipe(leatherStrap, "LIL", "LIL", 'L', Items.leather, 'I', "ingotIron");
Object dustCoal = OreDictionary.getOres("dustCoal").size() > 0 ? "dustCoal" : new ItemStack(Items.coal);
ItemHelper.addShapedOreRecipe(particleDefault, " D ", "DCD", " D ", 'C', dustCoal, 'D', Blocks.torch);
ItemHelper.addShapedOreRecipe(particleNone, " D ", "DCD", " D ", 'C', dustCoal, 'D', "blockGlass");
ItemHelper.addShapedOreRecipe(particleSmoke, " C ", "CCC", " C ", 'C', dustCoal);
ItemHelper.addShapedOreRecipe(particleRainbowSmoke, " R ", " C ", "G B", 'C', dustCoal, 'R', "dyeRed", 'G', "dyeLime", 'B', "dyeBlue");
ItemHelper.addShapedOreRecipe(jetpackFueller, "IY ", " IY", " SI", 'I', "ingotIron", 'Y', "dyeYellow", 'S', "stickWood");
if (integrateTE) {
if (!integrateRA && Config.addRAItemsIfNotInstalled) {
ItemHelper.addReverseStorageRecipe(nuggetElectrumFlux, "ingotElectrumFlux");
ItemHelper.addStorageRecipe(ingotElectrumFlux, "nuggetElectrumFlux");
ItemHelper.addShapedOreRecipe(plateFlux, "NNN", "GIG", "NNN", 'G', "gemCrystalFlux", 'I', "ingotElectrumFlux", 'N', "nuggetElectrumFlux");
ItemHelper.addShapedOreRecipe(armorFluxPlate, "I I", "III", "III", 'I', plateFlux);
}
Object ductFluxLeadstone = integrateTD ? TDItems.ductFluxLeadstone : "blockGlass";
Object ductFluxHardened = integrateTD ? TDItems.ductFluxHardened : "blockGlass";
Object ductFluxRedstoneEnergy = integrateTD ? TDItems.ductFluxRedstoneEnergy : "blockGlassHardened";
Object ductFluxResonant = integrateTD ? TDItems.ductFluxResonant : "blockGlassHardened";
ItemHelper.addShapedOreRecipe(thrusterTE1, "ICI", "PDP", "IRI", 'I', "ingotLead", 'P', ductFluxLeadstone, 'C', TEItems.powerCoilGold, 'D', TEItems.dynamoSteam, 'R', "dustRedstone");
ItemHelper.addShapedOreRecipe(thrusterTE2, "ICI", "PDP", "IRI", 'I', "ingotInvar", 'P', ductFluxHardened, 'C', TEItems.powerCoilGold, 'D', TEItems.dynamoReactant, 'R', "dustRedstone");
ItemHelper.addShapedOreRecipe(thrusterTE3, "ICI", "PDP", "IRI", 'I', "ingotElectrum", 'P', ductFluxRedstoneEnergy, 'C', TEItems.powerCoilGold, 'D', TEItems.dynamoMagmatic, 'R', "bucketRedstone");
ItemHelper.addShapedOreRecipe(thrusterTE4, "ICI", "PDP", "IRI", 'I', "ingotEnderium", 'P', ductFluxResonant, 'C', TEItems.powerCoilGold, 'D', TEItems.dynamoEnervation, 'R', "bucketRedstone");
ItemHelper.addShapedOreRecipe(armorPlatingTE1, "TIT", "III", "TIT", 'I', "ingotIron", 'T', "ingotTin");
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE1, "ICI", "ISI", 'I', "ingotLead", 'C', TEItems.cellBasic, 'S', leatherStrap));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE2, " I ", "ISI", " I ", 'I', "ingotInvar", 'S', fluxPackTE1));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE3, " C ", "ISI", "LOL", 'I', "ingotElectrum", 'L', "ingotLead", 'C', TEItems.frameCellReinforcedFull, 'S', fluxPackTE2, 'O', TEItems.powerCoilElectrum));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE4, " I ", "ISI", " I ", 'I', "ingotEnderium", 'S', fluxPackTE3));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE2Armored, "P", "J", 'J', fluxPackTE2, 'P', armorPlatingTE1));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE2, "J", 'J', fluxPackTE2Armored));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE3Armored, "P", "J", 'J', fluxPackTE3, 'P', armorPlatingTE2));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE3, "J", 'J', fluxPackTE3Armored));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE4Armored, "P", "J", 'J', fluxPackTE4, 'P', armorPlatingTE3));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackTE4, "J", 'J', fluxPackTE4Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE1, "IBI", "IJI", "T T", 'I', "ingotLead", 'B', TEItems.capacitorBasic, 'T', thrusterTE1, 'J', leatherStrap));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE2, "IBI", "IJI", "T T", 'I', "ingotInvar", 'B', TEItems.capacitorHardened, 'T', thrusterTE2, 'J', jetpackTE1));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE3, "IBI", "IJI", "T T", 'I', "ingotElectrum", 'B', TEItems.capacitorReinforced, 'T', thrusterTE3, 'J', jetpackTE2));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE4, "IBI", "IJI", "T T", 'I', "ingotEnderium", 'B', TEItems.capacitorResonant, 'T', thrusterTE4, 'J', jetpackTE3));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE1Armored, "P", "J", 'J', jetpackTE1, 'P', armorPlatingTE1));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE1, "J", 'J', jetpackTE1Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE2Armored, "P", "J", 'J', jetpackTE2, 'P', armorPlatingTE2));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE2, "J", 'J', jetpackTE2Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE3Armored, "P", "J", 'J', jetpackTE3, 'P', armorPlatingTE3));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE3, "J", 'J', jetpackTE3Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE4Armored, "P", "J", 'J', jetpackTE4, 'P', armorPlatingTE4));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE4, "J", 'J', jetpackTE4Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE1, "J", "P", 'J', jetpackTE1, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE2, "J", "P", 'J', jetpackTE2, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE3, "J", "P", 'J', jetpackTE3, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE4, "J", "P", 'J', jetpackTE4, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
if (integrateRA || Config.addRAItemsIfNotInstalled) {
ItemHelper.addShapedOreRecipe(unitGlowstoneEmpty, "FLF", "LHL", "FLF", 'L', "ingotLumium", 'F', "ingotElectrumFlux", 'H', TEItems.frameIlluminator);
ItemHelper.addShapedOreRecipe(unitCryotheumEmpty, "FTF", "THT", "FTF", 'T', "ingotTin", 'F', "ingotElectrumFlux", 'H', "blockGlassHardened");
ItemHelper.addShapedOreRecipe(thrusterTE5, "FPF", "GRG", 'G', unitGlowstone, 'P', RAItems.plateFlux != null ? RAItems.plateFlux : plateFlux, 'R', thrusterTE4, 'F', "ingotElectrumFlux");
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE5, "PAP", "OJO", "TCT", 'A', RAItems.armorFluxPlate != null ? RAItems.armorFluxPlate : armorFluxPlate, 'J', jetpackTE4Armored, 'O', unitCryotheum, 'C', fluxPackTE4Armored, 'T', thrusterTE5, 'P', RAItems.plateFlux != null ? RAItems.plateFlux : plateFlux));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE5, "J", "P", 'J', jetpackTE5, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
if (ModType.REDSTONE_ARMORY.loaded) {
ItemHelper.addGearRecipe(enderiumUpgrade, "ingotEnderium", "slimeball");
GameRegistry.addRecipe(new UpgradingRecipe(jetpackTE5, "U", "J", 'J', jetpackTE5, 'U', enderiumUpgrade));
}
}
}
if (integrateEIO) {
ItemHelper.addShapedOreRecipe(thrusterEIO1, "ICI", "PCP", "DSD", 'I', "ingotConductiveIron", 'P', EIOItems.redstoneConduit, 'C', EIOItems.basicCapacitor, 'D', EIOItems.basicGear, 'S', "dustRedstone");
ItemHelper.addShapedOreRecipe(thrusterEIO2, "ICI", "PCP", "DSD", 'I', "ingotElectricalSteel", 'P', EIOItems.energyConduit1, 'C', EIOItems.basicCapacitor, 'D', EIOItems.machineChassis, 'S', "dustRedstone");
ItemHelper.addShapedOreRecipe(thrusterEIO3, "ICI", "PCP", "DSD", 'I', "ingotEnergeticAlloy", 'P', EIOItems.energyConduit2, 'C', EIOItems.doubleCapacitor, 'D', EIOItems.pulsatingCrystal, 'S', "ingotRedstoneAlloy");
ItemHelper.addShapedOreRecipe(thrusterEIO4, "ICI", "PCP", "DSD", 'I', "ingotPhasedGold", 'P', EIOItems.energyConduit3, 'C', EIOItems.octadicCapacitor, 'D', EIOItems.vibrantCrystal, 'S', "ingotRedstoneAlloy");
ItemHelper.addShapedOreRecipe(armorPlatingEIO1, "SIS", "ISI", "SIS", 'I', "ingotIron", 'S', "itemSilicon");
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO1, "CIC", "ISI", "IPI", 'S', leatherStrap, 'C', EIOItems.basicCapacitor, 'I', "ingotConductiveIron", 'P', "dustCoal"));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO2, "DCD", "ISI", "IPI", 'S', fluxPackEIO1, 'C', EIOItems.basicCapacitor, 'D', EIOItems.doubleCapacitor, 'I', "ingotElectricalSteel", 'P', "dustGold"));
if (EIOItems.capacitorBank != null && EIOItems.capacitorBank.getItem() != null) {
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO3, "CBC", "ISI", "IPI", 'S', fluxPackEIO2, 'C', EIOItems.doubleCapacitor, 'B', EIOItems.capacitorBank, 'I', "ingotEnergeticAlloy", 'P', EIOItems.pulsatingCrystal));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO4, "BCB", "ISI", "CPC", 'S', fluxPackEIO3, 'C', EIOItems.octadicCapacitor, 'B', EIOItems.capacitorBankVibrant, 'I', "ingotPhasedGold", 'P', EIOItems.vibrantCrystal));
} else {
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO3, "CBC", "ISI", "IPI", 'S', fluxPackEIO2, 'C', EIOItems.doubleCapacitor, 'B', EIOItems.capacitorBankOld, 'I', "ingotEnergeticAlloy", 'P', EIOItems.pulsatingCrystal));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO4, "CBC", "ISI", "BPB", 'S', fluxPackEIO3, 'C', EIOItems.octadicCapacitor, 'B', EIOItems.capacitorBankOld, 'I', "ingotPhasedGold", 'P', EIOItems.vibrantCrystal));
}
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO2Armored, "P", "J", 'J', fluxPackEIO2, 'P', armorPlatingEIO1));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO2, "J", 'J', fluxPackEIO2Armored));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO3Armored, "P", "J", 'J', fluxPackEIO3, 'P', armorPlatingEIO2));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO3, "J", 'J', fluxPackEIO3Armored));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO4Armored, "P", "J", 'J', fluxPackEIO4, 'P', armorPlatingEIO3));
GameRegistry.addRecipe(new UpgradingRecipe(fluxPackEIO4, "J", 'J', fluxPackEIO4Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO1, "IBI", "IJI", "T T", 'I', "ingotConductiveIron", 'B', EIOItems.basicCapacitor, 'T', thrusterEIO1, 'J', leatherStrap));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO2, "IBI", "IJI", "T T", 'I', "ingotElectricalSteel", 'B', EIOItems.basicCapacitor, 'T', thrusterEIO2, 'J', jetpackEIO1));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO3, "IBI", "IJI", "T T", 'I', "ingotEnergeticAlloy", 'B', EIOItems.doubleCapacitor, 'T', thrusterEIO3, 'J', jetpackEIO2));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO4, "IBI", "IJI", "T T", 'I', "ingotPhasedGold", 'B', EIOItems.octadicCapacitor, 'T', thrusterEIO4, 'J', jetpackEIO3));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO1Armored, "P", "J", 'J', jetpackEIO1, 'P', armorPlatingEIO1));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO1, "J", 'J', jetpackEIO1Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO2Armored, "P", "J", 'J', jetpackEIO2, 'P', armorPlatingEIO2));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO2, "J", 'J', jetpackEIO2Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO3Armored, "P", "J", 'J', jetpackEIO3, 'P', armorPlatingEIO3));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO3, "J", 'J', jetpackEIO3Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO4Armored, "P", "J", 'J', jetpackEIO4, 'P', armorPlatingEIO4));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO4, "J", 'J', jetpackEIO4Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO1, "J", "P", 'J', jetpackEIO1, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO2, "J", "P", 'J', jetpackEIO2, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO3, "J", "P", 'J', jetpackEIO3, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO4, "J", "P", 'J', jetpackEIO4, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
ItemHelper.addShapedOreRecipe(unitFlightControlEmpty, "FLF", "LHL", "FLF", 'L', "ingotElectricalSteel", 'F', "ingotDarkSoularium", 'H', "blockGlassHardened");
ItemHelper.addShapedOreRecipe(thrusterEIO5, "SES", "CTC", 'T', thrusterEIO4, 'S', "ingotDarkSoularium", 'E', unitFlightControl, 'C', EIOItems.octadicCapacitor);
ItemHelper.addShapedOreRecipe(reinforcedGliderWing, " S", " SP", "SPP", 'S', "ingotDarkSoularium", 'P', armorPlatingEIO2);
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO5, "OAO", "PJP", "TCT", 'A', EIOItems.enderCrystal, 'J', jetpackEIO4Armored, 'O', "ingotDarkSoularium", 'C', fluxPackEIO4Armored, 'T', thrusterEIO5, 'P', reinforcedGliderWing));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackEIO5, "J", "P", 'J', jetpackEIO5, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
}
if (integrateBC) {
ItemHelper.addShapedOreRecipe(armorPlatingBC1, /* listen here u */"LIL"/* shit */, "ILI", "LIL", 'I', "ingotIron", 'L', Items.leather);
ItemHelper.addSurroundRecipe(armorPlatingBC2, armorPlatingBC1, "gemDiamond");
if (jetpackBC1 != null) {
ItemHelper.addShapedOreRecipe(thrusterBC1, "IGI", "PEP", "IBI", 'I', "ingotIron", 'G', "gearIron", 'P', BCItems.pipeFluidStone, 'E', BCItems.engineCombustion, 'B', Blocks.iron_bars);
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC1, "IBI", "IJI", "T T", 'I', "ingotIron", 'B', BCItems.tank, 'T', thrusterBC1, 'J', leatherStrap));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC1Armored, "P", "J", 'J', jetpackBC1, 'P', armorPlatingBC1));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC1, "J", 'J', jetpackBC1Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC1, "J", "P", 'J', jetpackBC1, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
}
Object jetpack = jetpackBC1 != null ? jetpackBC1 : leatherStrap;
Object thruster = thrusterBC1 != null ? thrusterBC1 : "gearIron";
if (Loader.isModLoaded("BuildCraft|Silicon")) {
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC2, "IBI", "IJI", "T T", 'I', "ingotGold", 'B', "crystalRedstone" /* BC7 */, 'T', thrusterBC2, 'J', jetpack));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC2, "IBI", "IJI", "T T", 'I', "ingotGold", 'B', "redstoneCrystal" /* BC6 */, 'T', thrusterBC2, 'J', jetpack));
} else {
ItemHelper.addShapedOreRecipe(thrusterBC2, "IGI", "PEP", "IBI", 'I', "ingotGold", 'G', "gearGold", 'P', BCItems.pipeEnergyGold, 'E', thruster, 'B', Blocks.iron_bars);
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC2, "IBI", "IJI", "T T", 'I', "ingotGold", 'B', "gearDiamond", 'T', thrusterBC2, 'J', jetpack));
}
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC2Armored, "P", "J", 'J', jetpackBC2, 'P', armorPlatingBC2));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC2, "J", 'J', jetpackBC2Armored));
GameRegistry.addRecipe(new UpgradingRecipe(jetpackBC2, "J", "P", 'J', jetpackBC2, 'P', new ItemStack(particleCustomizers, 1, OreDictionary.WILDCARD_VALUE)));
}
}
private static void doIMC() {
SimplyJetpacks.logger.info("Doing intermod communication");
if (integrateTE) {
if (!integrateRA && Config.addRAItemsIfNotInstalled) {
TERecipes.addTransposerFill(8000, new ItemStack(Items.diamond), gemCrystalFlux, new FluidStack(FluidRegistry.getFluid("redstone"), 200), false);
TERecipes.addTransposerFill(4000, OreDictionary.getOres("dustElectrum").get(0), dustElectrumFlux, new FluidStack(FluidRegistry.getFluid("redstone"), 200), false);
TERecipes.addSmelterBlastOre("ElectrumFlux");
}
ItemStack i = OreDictionary.getOres("ingotBronze").get(0).copy();
i.stackSize = 10;
TERecipes.addSmelterRecipe(3200, armorPlatingTE1, i, armorPlatingTE2, null, 0);
i = OreDictionary.getOres("ingotInvar").get(0).copy();
i.stackSize = 10;
TERecipes.addSmelterRecipe(4800, armorPlatingTE2, i, armorPlatingTE3, null, 0);
i = OreDictionary.getOres("ingotEnderium").get(0).copy();
i.stackSize = 10;
TERecipes.addSmelterRecipe(6400, armorPlatingTE3, i, armorPlatingTE4, null, 0);
if (integrateRA || Config.addRAItemsIfNotInstalled) {
TERecipes.addTransposerFill(6400, unitGlowstoneEmpty, unitGlowstone, new FluidStack(FluidRegistry.getFluid("glowstone"), 4000), false);
TERecipes.addTransposerFill(6400, unitCryotheumEmpty, unitCryotheum, new FluidStack(FluidRegistry.getFluid("cryotheum"), 4000), false);
}
}
if (integrateEIO) {
ItemStack ingotConductiveIron = OreDictionary.getOres("ingotConductiveIron").get(0).copy();
ingotConductiveIron.stackSize = 10;
EIORecipes.addAlloySmelterRecipe("Conductive Iron Armor Plating", 3200, armorPlatingEIO1, ingotConductiveIron, null, armorPlatingEIO2);
ItemStack ingotElectricalSteel = OreDictionary.getOres("ingotElectricalSteel").get(0).copy();
ingotElectricalSteel.stackSize = 10;
EIORecipes.addAlloySmelterRecipe("Electrical Steel Armor Plating", 4800, armorPlatingEIO2, ingotElectricalSteel, null, armorPlatingEIO3);
ItemStack ingotDarkSteel = OreDictionary.getOres("ingotDarkSteel").get(0).copy();
ingotDarkSteel.stackSize = 10;
EIORecipes.addAlloySmelterRecipe("Dark Steel Armor Plating", 6400, armorPlatingEIO3, ingotDarkSteel, null, armorPlatingEIO4);
ItemStack ingotSoularium = OreDictionary.getOres("ingotSoularium").get(0).copy();
ingotDarkSteel.stackSize = 1;
EIORecipes.addAlloySmelterRecipe("Enriched Soularium Alloy", 32000, ingotDarkSteel, ingotSoularium, EIOItems.pulsatingCrystal, ingotDarkSoularium);
EIORecipes.addSoulBinderRecipe("Flight Control Unit", 75000, 8, "Bat", unitFlightControlEmpty, unitFlightControl);
}
if (integrateBC && Loader.isModLoaded("BuildCraft|Silicon")) {
ItemStack pipeEnergyGold = BCItems.getStack(BCItems.pipeEnergyGold);
pipeEnergyGold.stackSize = 2;
ItemStack[] inputs;
if (thrusterBC1 != null) {
inputs = new ItemStack[] { thrusterBC1.copy(), new ItemStack(Items.gold_ingot, 4), pipeEnergyGold, BCItems.getStack(BCItems.chipsetGold) };
} else {
inputs = new ItemStack[] { BCItems.getStack(BCItems.engineCombustion), new ItemStack(Items.gold_ingot, 4), pipeEnergyGold, BCItems.getStack(BCItems.chipsetGold), new ItemStack(Blocks.iron_bars) };
}
BCRecipes.addAssemblyRecipe("kineticThruster", 1200000, inputs, thrusterBC2.copy());
}
}
public static ItemJetpack jetpacksCommon = null;
public static ItemFluxPack fluxPacksCommon = null;
public static ItemJetpack jetpacksTE = null;
public static ItemFluxPack fluxPacksTE = null;
public static ItemJetpack jetpacksEIO = null;
public static ItemFluxPack fluxPacksEIO = null;
public static ItemJetpack jetpacksBC = null;
public static ItemMeta components = null;
public static ItemMeta armorPlatings = null;
public static ItemMeta particleCustomizers = null;
public static ItemJetpackFueller jetpackFueller = null;
public static ItemMysteriousPotato mysteriousPotato = null;
public static ItemStack jetpackPotato = null;
public static ItemStack jetpackCreative = null;
public static ItemStack fluxPackCreative = null;
public static ItemStack jetpackTE1 = null;
public static ItemStack jetpackTE1Armored = null;
public static ItemStack jetpackTE2 = null;
public static ItemStack jetpackTE2Armored = null;
public static ItemStack jetpackTE3 = null;
public static ItemStack jetpackTE3Armored = null;
public static ItemStack jetpackTE4 = null;
public static ItemStack jetpackTE4Armored = null;
public static ItemStack jetpackTE5 = null;
public static ItemStack fluxPackTE1 = null;
public static ItemStack fluxPackTE2 = null;
public static ItemStack fluxPackTE2Armored = null;
public static ItemStack fluxPackTE3 = null;
public static ItemStack fluxPackTE3Armored = null;
public static ItemStack fluxPackTE4 = null;
public static ItemStack fluxPackTE4Armored = null;
public static ItemStack jetpackEIO1 = null;
public static ItemStack jetpackEIO1Armored = null;
public static ItemStack jetpackEIO2 = null;
public static ItemStack jetpackEIO2Armored = null;
public static ItemStack jetpackEIO3 = null;
public static ItemStack jetpackEIO3Armored = null;
public static ItemStack jetpackEIO4 = null;
public static ItemStack jetpackEIO4Armored = null;
public static ItemStack jetpackEIO5 = null;
public static ItemStack fluxPackEIO1 = null;
public static ItemStack fluxPackEIO2 = null;
public static ItemStack fluxPackEIO2Armored = null;
public static ItemStack fluxPackEIO3 = null;
public static ItemStack fluxPackEIO3Armored = null;
public static ItemStack fluxPackEIO4 = null;
public static ItemStack fluxPackEIO4Armored = null;
public static ItemStack jetpackBC1 = null;
public static ItemStack jetpackBC1Armored = null;
public static ItemStack jetpackBC2 = null;
public static ItemStack jetpackBC2Armored = null;
public static ItemStack leatherStrap = null;
public static ItemStack jetpackIcon = null;
public static ItemStack thrusterTE1 = null;
public static ItemStack thrusterTE2 = null;
public static ItemStack thrusterTE3 = null;
public static ItemStack thrusterTE4 = null;
public static ItemStack thrusterTE5 = null;
public static ItemStack thrusterEIO1 = null;
public static ItemStack thrusterEIO2 = null;
public static ItemStack thrusterEIO3 = null;
public static ItemStack thrusterEIO4 = null;
public static ItemStack thrusterEIO5 = null;
public static ItemStack thrusterBC1 = null;
public static ItemStack thrusterBC2 = null;
public static ItemStack unitGlowstoneEmpty = null;
public static ItemStack unitGlowstone = null;
public static ItemStack unitCryotheumEmpty = null;
public static ItemStack unitCryotheum = null;
public static ItemStack dustElectrumFlux = null;
public static ItemStack ingotElectrumFlux = null;
public static ItemStack nuggetElectrumFlux = null;
public static ItemStack gemCrystalFlux = null;
public static ItemStack plateFlux = null;
public static ItemStack armorFluxPlate = null;
public static ItemStack enderiumUpgrade = null;
public static ItemStack ingotDarkSoularium = null;
public static ItemStack reinforcedGliderWing = null;
public static ItemStack unitFlightControlEmpty = null;
public static ItemStack unitFlightControl = null;
public static ItemStack armorPlatingTE1 = null;
public static ItemStack armorPlatingTE2 = null;
public static ItemStack armorPlatingTE3 = null;
public static ItemStack armorPlatingTE4 = null;
public static ItemStack armorPlatingEIO1 = null;
public static ItemStack armorPlatingEIO2 = null;
public static ItemStack armorPlatingEIO3 = null;
public static ItemStack armorPlatingEIO4 = null;
public static ItemStack armorPlatingBC1 = null;
public static ItemStack armorPlatingBC2 = null;
public static ItemStack particleDefault = null;
public static ItemStack particleNone = null;
public static ItemStack particleSmoke = null;
public static ItemStack particleRainbowSmoke = null;
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Ordinastie
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.malisis.core.block;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists;
import net.malisis.core.MalisisCore;
import net.malisis.core.asm.AsmUtils;
import net.malisis.core.block.component.ITickableComponent.PeriodicTickableComponent;
import net.malisis.core.block.component.ITickableComponent.RandomTickableComponent;
import net.malisis.core.block.component.LadderComponent;
import net.malisis.core.inventory.MalisisTab;
import net.malisis.core.item.MalisisItemBlock;
import net.malisis.core.renderer.DefaultRenderer;
import net.malisis.core.renderer.MalisisRendered;
import net.malisis.core.renderer.icon.Icon;
import net.malisis.core.renderer.icon.provider.IIconProvider;
import net.minecraft.block.Block;
import net.minecraft.block.material.MapColor;
import net.minecraft.block.material.Material;
import net.minecraft.block.properties.IProperty;
import net.minecraft.block.state.BlockStateContainer;
import net.minecraft.block.state.IBlockState;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumBlockRenderType;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.NonNullList;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
/**
* @author Ordinastie
*
*/
@MalisisRendered(DefaultRenderer.Block.class)
@SuppressWarnings("deprecation")
public class MalisisBlock extends Block implements IBoundingBox, IRegisterable<Block>, IComponentProvider
{
private static Field blockStateField = AsmUtils.changeFieldAccess(Block.class, "blockState", "field_176227_L");
protected AxisAlignedBB boundingBox;
protected final List<IBlockComponent> blockComponents = Lists.newArrayList();
protected final List<IComponent> components = Lists.newArrayList();
protected MalisisBlock(Material material)
{
super(material);
}
protected List<IProperty<?>> getProperties()
{
return Lists.newArrayList();
}
protected void buildBlockState()
{
List<IProperty<?>> properties = getProperties();
for (IBlockComponent component : getBlockComponents())
properties.addAll(Arrays.asList(component.getProperties()));
try
{
blockStateField.set(this, new BlockStateContainer(this, properties.toArray(new IProperty[0])));
}
catch (ReflectiveOperationException e)
{
MalisisCore.log.error("[MalisisBlock] Failed to set the new BlockState for {}.", this.getClass().getSimpleName(), e);
}
}
private void buildDefaultState()
{
IBlockState state = blockState.getBaseState();
for (IBlockComponent component : getBlockComponents())
state = component.setDefaultState(this, state);
setDefaultState(state);
}
public List<IBlockComponent> getBlockComponents()
{
return blockComponents;
}
@Override
public List<IComponent> getComponents()
{
return Stream.concat(blockComponents.stream(), components.stream()).collect(Collectors.toList());
}
@Override
public void addComponent(IComponent component)
{
if (component.isClientComponent() && !MalisisCore.isClient())
throw new IllegalStateException("Trying to add component " + component.getClass().getSimpleName() + " on server.");
if (component instanceof IBlockComponent)
{
blockComponents.add((IBlockComponent) component);
for (IComponent dep : ((IBlockComponent) component).getDependencies())
addComponent(dep);
buildBlockState();
buildDefaultState();
}
else
components.add(component);
component.onComponentAdded(this);
lightOpacity = getDefaultState().isOpaqueCube() ? 255 : 0;
}
@Override
public MalisisBlock setName(String name)
{
IRegisterable.super.setName(name);
setUnlocalizedName(name);
return this;
}
public String getUnlocalizedName(IBlockState state)
{
for (IBlockComponent component : getBlockComponents())
{
String name = component.getUnlocalizedName(this, state);
if (name != null)
return name;
}
return getUnlocalizedName();
}
@Override
public Item getItem(Block block)
{
for (IBlockComponent component : getBlockComponents())
{
Item item = component.getItem(this);
if (item == null || item.getClass() != MalisisItemBlock.class)
return item;
}
return IRegisterable.super.getItem(this);
}
public boolean hasItemSubtypes(Item item)
{
for (IBlockComponent component : getBlockComponents())
if (component.getHasSubtypes(this, item))
return true;
return false;
}
public void setTexture(String textureName)
{
if (!StringUtils.isEmpty(textureName) && MalisisCore.isClient())
{
Icon icon = Icon.from(textureName);
addComponent((IIconProvider) () -> icon);
}
}
public void setTexture(Item item)
{
if (item != null && MalisisCore.isClient())
{
Icon icon = Icon.from(item);
addComponent((IIconProvider) () -> icon);
}
}
public void setTexture(Block block)
{
if (block != null)
setTexture(block.getDefaultState());
}
public void setTexture(IBlockState state)
{
if (state != null && MalisisCore.isClient())
{
Icon icon = Icon.from(state);
addComponent((IIconProvider) () -> icon);
}
}
public IBlockState getStateFromItemStack(ItemStack itemStack)
{
return getStateFromMeta(itemStack.getItem().getMetadata(itemStack.getMetadata()));
}
//EVENTS
@Override
public void onBlockAdded(World world, BlockPos pos, IBlockState state)
{
getBlockComponents().forEach(c -> c.onBlockAdded(this, world, pos, state));
}
@Override
public IBlockState getStateForPlacement(World world, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ, int meta, EntityLivingBase placer, EnumHand hand)
{
IBlockState state = super.getStateForPlacement(world, pos, facing, hitX, hitY, hitZ, meta, placer, hand);
for (IBlockComponent component : getBlockComponents())
state = component.getStateForPlacement(this, world, pos, state, facing, hitX, hitY, hitZ, meta, placer, hand);
return state;
}
@Override
public void onBlockPlacedBy(World world, BlockPos pos, IBlockState state, EntityLivingBase placer, ItemStack stack)
{
getBlockComponents().forEach(c -> c.onBlockPlacedBy(this, world, pos, state, placer, stack));
}
@Override
public boolean onBlockActivated(World world, BlockPos pos, IBlockState state, EntityPlayer player, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ)
{
boolean b = false;
for (IBlockComponent component : getBlockComponents())
b |= component.onBlockActivated(this, world, pos, state, player, hand, side, hitX, hitY, hitZ);
return b;
}
@Override
public void neighborChanged(IBlockState state, World world, BlockPos pos, Block neighborBlock, BlockPos neighborPos)
{
getBlockComponents().forEach(c -> c.onNeighborBlockChange(this, world, pos, state, neighborBlock, neighborPos));
}
@Override
public boolean canProvidePower(IBlockState state)
{
boolean b = false;
for (IBlockComponent component : getBlockComponents())
b |= component.canProvidePower(this, state);
return b;
}
@Override
public void breakBlock(World world, BlockPos pos, IBlockState state)
{
getBlockComponents().forEach(c -> c.breakBlock(this, world, pos, state));
super.breakBlock(world, pos, state);
}
//UPDATE
@Override
public void randomTick(World world, BlockPos pos, IBlockState state, Random rand)
{
RandomTickableComponent rtc = IComponent.getComponent(RandomTickableComponent.class, this);
if (rtc != null)
rtc.update(this, world, pos, state, rand);
}
@Override
public void updateTick(World world, BlockPos pos, IBlockState state, Random rand)
{
PeriodicTickableComponent ptc = IComponent.getComponent(PeriodicTickableComponent.class, this);
if (ptc == null)
return;
int nextTick = ptc.update(this, world, pos, state, rand);
if (nextTick > 0)
world.scheduleBlockUpdate(pos, this, nextTick, nextTick);
}
//BOUNDING BOX
@Override
public AxisAlignedBB getBoundingBox(IBlockAccess world, BlockPos pos, IBlockState state, BoundingBoxType type)
{
for (IBlockComponent component : getBlockComponents())
{
AxisAlignedBB aabb = component.getBoundingBox(this, world, pos, state, type);
if (aabb != null)
return aabb;
}
return FULL_BLOCK_AABB;
}
@Override
public AxisAlignedBB[] getBoundingBoxes(IBlockAccess world, BlockPos pos, IBlockState state, BoundingBoxType type)
{
List<AxisAlignedBB> list = Lists.newArrayList();
for (IBlockComponent component : getBlockComponents())
{
AxisAlignedBB[] aabbs = component.getBoundingBoxes(this, world, pos, state, type);
if (aabbs != null)
Collections.addAll(list, aabbs);
}
return list.size() != 0 ? list.toArray(new AxisAlignedBB[0]) : IBoundingBox.super.getBoundingBoxes(world, pos, state, type);
}
@Override
public void addCollisionBoxToList(IBlockState state, World world, BlockPos pos, AxisAlignedBB mask, List<AxisAlignedBB> list, @Nullable Entity collidingEntity, boolean useActualState)
{
IBoundingBox.super.addCollisionBoxToList(state, world, pos, mask, list, collidingEntity, false);
}
@Override
public AxisAlignedBB getSelectedBoundingBox(IBlockState state, World world, BlockPos pos)
{
return IBoundingBox.super.getSelectedBoundingBox(state, world, pos);
}
@Override
public RayTraceResult collisionRayTrace(IBlockState state, World world, BlockPos pos, Vec3d src, Vec3d dest)
{
return IBoundingBox.super.collisionRayTrace(state, world, pos, src, dest);
}
@Override
public boolean canPlaceBlockOnSide(World world, BlockPos pos, EnumFacing side)
{
for (IBlockComponent component : getBlockComponents())
if (!component.canPlaceBlockOnSide(this, world, pos, side))
return false;
return super.canPlaceBlockOnSide(world, pos, side);
}
@Override
public boolean canPlaceBlockAt(World world, BlockPos pos)
{
for (IBlockComponent component : getBlockComponents())
if (!component.canPlaceBlockAt(this, world, pos))
return false;
return super.canPlaceBlockAt(world, pos);
}
//SUB BLOCKS
@Override
public int damageDropped(IBlockState state)
{
for (IBlockComponent component : getBlockComponents())
{
int damage = component.damageDropped(this, state);
if (damage != 0)
return damage;
}
return getMetaFromState(state);
}
@Override
public void getSubBlocks(CreativeTabs tab, NonNullList<ItemStack> list)
{
NonNullList<ItemStack> l = NonNullList.<ItemStack> create();
for (IBlockComponent component : getBlockComponents())
component.getSubBlocks(this, tab, l);
//only add default itemStack if components don't add itemStacks
if (l.isEmpty())
super.getSubBlocks(tab, list);
else
list.addAll(l);
}
//COLORS
@Override
public MapColor getMapColor(IBlockState state, IBlockAccess world, BlockPos pos)
{
for (IBlockComponent component : getBlockComponents())
{
MapColor color = component.getMapColor(this, state, world, pos);
if (color != null)
return color;
}
return super.getMapColor(state, world, pos);
}
@Override
public IBlockState getStateFromMeta(int meta)
{
IBlockState state = getDefaultState();
for (IBlockComponent component : getBlockComponents())
state = component.getStateFromMeta(this, state, meta);
return state;
}
@Override
public int getMetaFromState(IBlockState state)
{
int meta = 0;
for (IBlockComponent component : getBlockComponents())
meta += component.getMetaFromState(this, state);
return meta;
}
//FULLNESS
@Override
public boolean shouldSideBeRendered(IBlockState state, IBlockAccess world, BlockPos pos, EnumFacing side)
{
for (IBlockComponent component : getBlockComponents())
{
Boolean render = component.shouldSideBeRendered(this, world, pos, state, side);
if (render != null)
return render;
}
return super.shouldSideBeRendered(state, world, pos, side);
}
@Override
public boolean isFullBlock(IBlockState state)
{
for (IBlockComponent component : getBlockComponents())
{
Boolean full = component.isFullBlock(this, state);
if (full != null)
return full;
}
return super.isFullBlock(state);
}
@Override
public boolean isFullCube(IBlockState state)
{
for (IBlockComponent component : getBlockComponents())
{
Boolean full = component.isFullCube(this, state);
if (full != null)
return full;
}
return super.isFullCube(state);
}
@Override
public boolean isOpaqueCube(IBlockState state)
{
//parent constructor call
if (getBlockComponents() == null)
return super.isOpaqueCube(state);
for (IBlockComponent component : getBlockComponents())
{
Boolean opaque = component.isOpaqueCube(this, state);
if (opaque != null)
return opaque;
}
return super.isOpaqueCube(state);
}
//OTHER
@Override
@SideOnly(Side.CLIENT)
public int getPackedLightmapCoords(IBlockState state, IBlockAccess world, BlockPos pos)
{
for (IBlockComponent component : getBlockComponents())
{
//TODO: use max light value
Integer light = component.getPackedLightmapCoords(this, world, pos, state);
if (light != null)
return light;
}
return super.getPackedLightmapCoords(state, world, pos);
}
@Override
public Item getItemDropped(IBlockState state, Random rand, int fortune)
{
for (IBlockComponent component : getBlockComponents())
{
Item item = component.getItemDropped(this, state, rand, fortune);
if (item != null)
return item;
}
return super.getItemDropped(state, rand, fortune);
}
@Override
public int quantityDropped(IBlockState state, int fortune, Random random)
{
for (IBlockComponent component : getBlockComponents())
{
Integer quantity = component.quantityDropped(this, state, fortune, random);
if (quantity != null)
return quantity;
}
return super.quantityDropped(state, fortune, random);
}
@Override
public int getLightOpacity(IBlockState state, IBlockAccess world, BlockPos pos)
{
for (IBlockComponent component : getBlockComponents())
{
Integer quantity = component.getLightOpacity(this, world, pos, state);
if (quantity != null)
return quantity;
}
return super.getLightOpacity(state, world, pos);
}
@Override
public boolean isLadder(IBlockState state, IBlockAccess world, BlockPos pos, EntityLivingBase entity)
{
return IComponent.getComponent(LadderComponent.class, this) != null;
}
@Override
public EnumBlockRenderType getRenderType(IBlockState state)
{
return EnumBlockRenderType.ENTITYBLOCK_ANIMATED;
}
@Override
public MalisisBlock setCreativeTab(CreativeTabs tab)
{
super.setCreativeTab(tab);
if (tab instanceof MalisisTab)
((MalisisTab) tab).addItem(this);
return this;
}
}
| |
package org.drools.compiler.lang.dsl;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import org.drools.compiler.lang.dsl.DSLMappingEntry;
import org.drools.compiler.lang.dsl.DSLMappingFile;
import org.drools.compiler.lang.dsl.DSLTokenizedMappingFile;
import org.drools.compiler.lang.dsl.DefaultExpander;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
public class DSLMappingFileTest {
private DSLMappingFile file = null;
private final String filename = "test_metainfo.dsl";
@Test
public void testParseFile() {
try {
final Reader reader = new InputStreamReader( this.getClass().getResourceAsStream( this.filename ) );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
assertEquals( 31,
this.file.getMapping().getEntries().size() );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
@Test
public void testParseFileWithEscaptedBrackets() {
String file = "[when][]ATTRIBUTE \"{attr}\" IS IN \\[{list}\\]=Attribute( {attr} in ({list}) )";
try {
final Reader reader = new StringReader( file );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
assertEquals( 1,
this.file.getMapping().getEntries().size() );
DSLMappingEntry entry = (DSLMappingEntry) this.file.getMapping().getEntries().get( 0 );
assertEquals( DSLMappingEntry.CONDITION,
entry.getSection() );
assertEquals( DSLMappingEntry.EMPTY_METADATA,
entry.getMetaData() );
assertEquals( "ATTRIBUTE \"{attr}\" IS IN \\[{list}\\]",
entry.getMappingKey() );
assertEquals( "Attribute( {attr} in ({list}) )",
entry.getMappingValue() );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
@Test
public void testParseFileWithEscaptedCurlyBrackets() {
String file = "[consequence][$policy]Add surcharge {surcharge} to Policy=modify(policy) \\{price = {surcharge}\\}";
try {
final Reader reader = new StringReader( file );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
assertEquals( 1,
this.file.getMapping().getEntries().size() );
DSLMappingEntry entry = (DSLMappingEntry) this.file.getMapping().getEntries().get( 0 );
assertEquals( DSLMappingEntry.CONSEQUENCE,
entry.getSection() );
assertEquals( "$policy",
entry.getMetaData().toString() );
assertEquals( "Add surcharge {surcharge} to Policy",
entry.getMappingKey() );
assertEquals( "modify(policy) \\{price = {surcharge}\\}",
entry.getMappingValue() );
String input = "rule x\nwhen\nthen\nAdd surcharge 300 to Policy\nend\n";
String expected = "rule x\nwhen\nthen\nmodify(policy) {price = 300}\nend\n";
DefaultExpander de = new DefaultExpander();
de.addDSLMapping( this.file.getMapping() );
final String result = de.expand( input );
// String result = entry.getKeyPattern().matcher( input ).replaceAll( entry.getValuePattern() );
assertEquals( expected,
result );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
/**
* Right now this test fails because there is no RHS for the rule. It connects the "then" and "end" to "thenend".
*/
@Test
public void testNoRHS() {
String file = "[then]TEST=System.out.println(\"DO_SOMETHING\");\n" +
"[when]code {code1} occurs and sum of all digit not equal \\( {code2} \\+ {code3} \\)=AAAA( cd1 == {code1}, cd2 != ( {code2} + {code3} ))\n"
+ "[when]code {code1} occurs=BBBB\n";
try {
final Reader reader = new StringReader( file );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
final String LHS = "code 1041 occurs and sum of all digit not equal ( 1034 + 1035 )";
final String rule = "rule \"x\"\nwhen\n" + LHS + "\nthen\nend";
DefaultExpander de = new DefaultExpander();
de.addDSLMapping( this.file.getMapping() );
final String ruleAfterExpansion = de.expand( rule );
final String expected = "rule \"x\"\nwhen\nAAAA( cd1 == 1041, cd2 != ( 1034 + 1035 ))\nthen\nend";
assertEquals( expected,
ruleAfterExpansion );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
@Test
public void testParseFileWithEscapes() {
String file = "[then]TEST=System.out.println(\"DO_SOMETHING\");\n" + "[when]code {code1} occurs and sum of all digit not equal \\( {code2} \\+ {code3} \\)=AAAA( cd1 == {code1}, cd2 != ( {code2} + {code3} ))\n"
+ "[when]code {code1} occurs=BBBB\n";
try {
final Reader reader = new StringReader( file );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
final String LHS = "code 1041 occurs and sum of all digit not equal ( 1034 + 1035 )";
final String rule = "rule \"x\"\nwhen\n" + LHS + "\nthen\nTEST\nend";
DefaultExpander de = new DefaultExpander();
de.addDSLMapping( this.file.getMapping() );
final String ruleAfterExpansion = de.expand( rule );
final String expected = "rule \"x\"\nwhen\nAAAA( cd1 == 1041, cd2 != ( 1034 + 1035 ))\nthen\nSystem.out.println(\"DO_SOMETHING\");\nend";
assertEquals( expected,
ruleAfterExpansion );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
@Test @Ignore
public void testParseFileWithEscaptedEquals() {
String file = "[when][]something:\\={value}=Attribute( something == \"{value}\" )";
try {
final Reader reader = new StringReader( file );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
assertEquals( 1,
this.file.getMapping().getEntries().size() );
DSLMappingEntry entry = (DSLMappingEntry) this.file.getMapping().getEntries().get( 0 );
assertEquals( DSLMappingEntry.CONDITION,
entry.getSection() );
assertEquals( DSLMappingEntry.EMPTY_METADATA,
entry.getMetaData() );
assertEquals( "something:={value}",
entry.getMappingKey() );
assertEquals( "Attribute( something == \"{value}\" )",
entry.getMappingValue() );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
@Test
public void testEnum() {
String file = "[when][]ATTRIBUTE {attr:ENUM:Attribute.value} in {list}=Attribute( {attr} in ({list}) )";
try {
final Reader reader = new StringReader( file );
this.file = new DSLTokenizedMappingFile();
final boolean parsingResult = this.file.parseAndLoad( reader );
reader.close();
assertTrue( this.file.getErrors().toString(),
parsingResult );
assertTrue( this.file.getErrors().isEmpty() );
assertEquals( 1,
this.file.getMapping().getEntries().size() );
DSLMappingEntry entry = (DSLMappingEntry) this.file.getMapping().getEntries().get( 0 );
assertEquals( DSLMappingEntry.CONDITION,
entry.getSection() );
assertEquals( DSLMappingEntry.EMPTY_METADATA,
entry.getMetaData() );
System.out.println( entry.getValuePattern() );
System.out.println( entry.getVariables() );
assertEquals( "ATTRIBUTE {attr:ENUM:Attribute.value} in {list}",
entry.getMappingKey() );
assertEquals( "Attribute( {attr} in ({list}) )",
entry.getMappingValue() );
assertEquals( "(?:(?<=^)|(?<=\\W))ATTRIBUTE\\s+(.*?)\\s+in\\s+(.*?)$",
entry.getKeyPattern().toString() );
} catch ( final IOException e ) {
e.printStackTrace();
fail( "Should not raise exception " );
}
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2014 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.apache.hive.jdbc;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Properties;
import java.util.logging.Logger;
import org.pentaho.hadoop.hive.jdbc.HadoopConfigurationUtil;
import org.pentaho.hadoop.hive.jdbc.JDBCDriverCallable;
/**
* <p>
* This is proxy driver for the Hive Server 2 JDBC Driver available through the current
* active Hadoop configuration.
* </p>
* <p>
* This driver is named exactly the same as the official Apache Hive driver
* so no further modifications are required by calling code to swap in this
* proxy.
* </p>
* <p>
* This class uses reflection to attempt to find the Big Data Plugin and load
* the HadoopConfigurationBootstrap so we have access to the Hive JDBC driver
* that is compatible with the currently selected Hadoop configuration. All
* operations are delegated to the current active Hadoop configuration's Hive
* JDBC driver via HadoopConfiguration#getHiveJdbcDriver.
* </p>
* <p>
* All calls to the loaded HiveDriver will have the current Thread's context
* class loader set to the class that loaded the driver so subsequent resource
* lookups are successful.
* </p>
*/
public class HiveDriver implements java.sql.Driver {
/**
* Method name of {@link org.pentaho.hadoop.shim.spi.HadoopShim#getJdbcDriver(String)}
*/
protected static final String METHOD_GET_JDBC_DRIVER = "getJdbcDriver";
protected static final String SIMBA_SPECIFIC_URL_PARAMETER = "AuthMech=";
// Register ourself with the JDBC Driver Manager
static {
try {
DriverManager.registerDriver( new HiveDriver() );
} catch ( Exception ex ) {
throw new RuntimeException( "Unable to register Hive Server 2 JDBC driver", ex );
}
}
protected String ERROR_SELF_DESCRIPTION = "Hive Server 2";
/**
* Driver type = "hive2"
*/
protected String METHOD_JDBC_PARAM = "hive2";
/**
* Utility for resolving Hadoop configurations dynamically.
*/
protected HadoopConfigurationUtil util;
/**
* Create a new Hive driver with the default configuration utility.
*/
public HiveDriver() {
this( new HadoopConfigurationUtil() );
}
public HiveDriver( HadoopConfigurationUtil util ) {
if ( util == null ) {
throw new NullPointerException();
}
this.util = util;
}
protected Driver getActiveDriver() throws SQLException {
Driver driver = null;
try {
Object shim = util.getActiveHadoopShim();
Method getHiveJdbcDriver = shim.getClass().getMethod( METHOD_GET_JDBC_DRIVER, String.class );
driver = (Driver) getHiveJdbcDriver.invoke( shim, METHOD_JDBC_PARAM );
} catch ( Exception ex ) {
throw new SQLException( String
.format( "Unable to load %s JDBC driver for the currently active Hadoop configuration",
ERROR_SELF_DESCRIPTION ), ex );
}
// Check if the Shim contains a Hive driver. It may return this driver if it
// doesn't contain one since it'll be found in one of the parent class loaders
// so we also need to make sure we didn't return ourself... :)
if ( driver != null && driver.getClass() == this.getClass() ) {
driver = null;
}
return driver;
}
protected <T> T callWithActiveDriver( JDBCDriverCallable<T> callback ) throws SQLException {
Driver drv = getActiveDriver();
if ( drv != null ) {
return callback.callWithDriver( drv );
} else {
throw new SQLException( String.format( "The active Hadoop configuration does not contain a %s JDBC driver",
ERROR_SELF_DESCRIPTION ) );
}
}
/**
* Attempt to find the {@link ClassLoader} for the Hadoop Shim API. It is here that we'll be able to load the
* appropriate Hive JDBC driver with all associated Hadoop and other libraries required to properly execute against
* a given cluster.
*
* @return The {@link ClassLoader} for the Hadoop Shim API or {@code null} if none could be located
* @throws RuntimeException if there was an error attempting to load the database meta interface
*/
@Override
public Connection connect( final String url, final Properties info ) throws SQLException {
if ( checkBeforeCallActiveDriver( url ) ) {
return null;
}
return callWithActiveDriver( new JDBCDriverCallable<Connection>() {
@Override
public Connection call() throws Exception {
return driver.connect( url, info );
}
} );
}
@Override
public boolean acceptsURL( final String url ) throws SQLException {
if ( checkBeforeCallActiveDriver( url ) ) {
return false;
}
try {
boolean accepts = callWithActiveDriver( new JDBCDriverCallable<Boolean>() {
@Override
public Boolean call() throws Exception {
try {
return driver.acceptsURL( url );
} catch ( Throwable t ) {
// This should not have happened. If there was an error during processing, assume this driver can't
// handle the URL and thus return false
return false;
}
}
} );
return Boolean.TRUE.equals( accepts );
} catch ( Throwable t ) {
return false;
}
}
protected boolean checkBeforeCallActiveDriver( String url ) throws SQLException {
if ( getActiveDriver() == null ) {
// Ignore connection attempt in case corresponding driver is not provided by the shim
return true;
} else if ( url.contains( SIMBA_SPECIFIC_URL_PARAMETER ) ) {
// BAD-215 check required to distinguish Simba driver
return true;
}
return false;
}
@Override
public DriverPropertyInfo[] getPropertyInfo( final String url, final Properties info ) throws SQLException {
if ( checkBeforeCallActiveDriver( url ) ) {
return null;
}
return callWithActiveDriver( new JDBCDriverCallable<DriverPropertyInfo[]>() {
@Override
public DriverPropertyInfo[] call() throws Exception {
return driver.getPropertyInfo( url, info );
}
} );
}
@Override
public int getMajorVersion() {
try {
return (int) callWithActiveDriver( new JDBCDriverCallable<Integer>() {
@Override
public Integer call() throws Exception {
return driver.getMajorVersion();
}
} );
} catch ( SQLException ex ) {
// No idea what the driver version is without a driver
return -1;
}
}
@Override
public int getMinorVersion() {
try {
return (int) callWithActiveDriver( new JDBCDriverCallable<Integer>() {
@Override
public Integer call() throws Exception {
return driver.getMinorVersion();
}
} );
} catch ( SQLException ex ) {
// No idea what the driver version is without a driver
return -1;
}
}
@Override
public boolean jdbcCompliant() {
try {
return Boolean.TRUE.equals( callWithActiveDriver( new JDBCDriverCallable<Boolean>() {
@Override
public Boolean call() throws Exception {
return driver.jdbcCompliant();
}
} ) );
} catch ( SQLException ex ) {
// The HiveDriver is not JDBC compliant as of Hive 0.9.0. If the driver
// cannot return it's actual compliancy we'll default to false
return false;
}
}
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return null;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.TableLayoutResult;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.GroupingProperty;
import com.facebook.presto.spi.LocalProperty;
import com.facebook.presto.spi.SortingProperty;
import com.facebook.presto.spi.predicate.NullableValue;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.DependencyExtractor;
import com.facebook.presto.sql.planner.DomainTranslator;
import com.facebook.presto.sql.planner.ExpressionInterpreter;
import com.facebook.presto.sql.planner.LookupSymbolResolver;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.ChildReplacer;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.GroupIdNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.IndexSourceNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.BooleanLiteral;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.NodeRef;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.SetMultimap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import static com.facebook.presto.SystemSessionProperties.isColocatedJoinEnabled;
import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.extractConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.stripDeterministicConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.stripNonDeterministicConjuncts;
import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.getExpressionTypes;
import static com.facebook.presto.sql.planner.FragmentTableScanCounter.countSources;
import static com.facebook.presto.sql.planner.FragmentTableScanCounter.hasMultipleSources;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.optimizations.ActualProperties.Global.partitionedOn;
import static com.facebook.presto.sql.planner.optimizations.ActualProperties.Global.singleStreamPartition;
import static com.facebook.presto.sql.planner.optimizations.LocalProperties.grouped;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.REMOTE;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.GATHER;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.REPARTITION;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.gatheringExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.partitionedExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.replicatedExchange;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.String.format;
import static java.util.Collections.emptyList;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public class AddExchanges
implements PlanOptimizer
{
private final SqlParser parser;
private final Metadata metadata;
public AddExchanges(Metadata metadata, SqlParser parser)
{
this.metadata = metadata;
this.parser = parser;
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
Context context = new Context(PreferredProperties.any(), ImmutableList.of());
PlanWithProperties result = plan.accept(new Rewriter(idAllocator, symbolAllocator, session), context);
return result.getNode();
}
private static class Context
{
private final PreferredProperties preferredProperties;
private final List<Symbol> correlations;
Context(PreferredProperties preferredProperties, List<Symbol> correlations)
{
this.preferredProperties = preferredProperties;
this.correlations = ImmutableList.copyOf(requireNonNull(correlations, "correlations is null"));
}
Context withPreferredProperties(PreferredProperties preferredProperties)
{
return new Context(preferredProperties, correlations);
}
Context withCorrelations(List<Symbol> correlations)
{
return new Context(preferredProperties, correlations);
}
PreferredProperties getPreferredProperties()
{
return preferredProperties;
}
List<Symbol> getCorrelations()
{
return correlations;
}
}
private class Rewriter
extends PlanVisitor<PlanWithProperties, Context>
{
private final PlanNodeIdAllocator idAllocator;
private final SymbolAllocator symbolAllocator;
private final Map<Symbol, Type> types;
private final Session session;
private final boolean distributedIndexJoins;
private final boolean preferStreamingOperators;
private final boolean redistributeWrites;
public Rewriter(PlanNodeIdAllocator idAllocator, SymbolAllocator symbolAllocator, Session session)
{
this.idAllocator = idAllocator;
this.symbolAllocator = symbolAllocator;
this.types = ImmutableMap.copyOf(symbolAllocator.getTypes());
this.session = session;
this.distributedIndexJoins = SystemSessionProperties.isDistributedIndexJoinEnabled(session);
this.redistributeWrites = SystemSessionProperties.isRedistributeWrites(session);
this.preferStreamingOperators = SystemSessionProperties.preferStreamingOperators(session);
}
@Override
protected PlanWithProperties visitPlan(PlanNode node, Context context)
{
return rebaseAndDeriveProperties(node, planChild(node, context));
}
@Override
public PlanWithProperties visitProject(ProjectNode node, Context context)
{
Map<Symbol, Symbol> identities = computeIdentityTranslations(node.getAssignments());
PreferredProperties translatedPreferred = context.getPreferredProperties().translate(symbol -> Optional.ofNullable(identities.get(symbol)));
return rebaseAndDeriveProperties(node, planChild(node, context.withPreferredProperties(translatedPreferred)));
}
@Override
public PlanWithProperties visitOutput(OutputNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.undistributed()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitEnforceSingleRow(EnforceSingleRowNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.any()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitAggregation(AggregationNode node, Context context)
{
Set<Symbol> partitioningRequirement = ImmutableSet.copyOf(node.getGroupingKeys());
boolean preferSingleNode = (node.hasEmptyGroupingSet() && !node.hasNonEmptyGroupingSet()) ||
(node.hasDefaultOutput() && !node.isDecomposable(metadata.getFunctionRegistry()));
PreferredProperties preferredProperties = preferSingleNode ? PreferredProperties.undistributed() : PreferredProperties.any();
if (!node.getGroupingKeys().isEmpty()) {
preferredProperties = PreferredProperties.partitionedWithLocal(partitioningRequirement, grouped(node.getGroupingKeys()))
.mergeWithParent(context.getPreferredProperties());
}
PlanWithProperties child = planChild(node, context.withPreferredProperties(preferredProperties));
if (child.getProperties().isSingleNode()) {
// If already unpartitioned, just drop the single aggregation back on
return rebaseAndDeriveProperties(node, child);
}
if (preferSingleNode) {
// For queries with only empty grouping sets like
//
// SELECT count(*) FROM lineitem;
//
// there is no need for distributed aggregation. Single node FINAL aggregation will suffice,
// since all input have to be aggregated into one line output.
//
// If aggregation must produce default output and it is not decomposable, we can not distribute it
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
else if (!child.getProperties().isStreamPartitionedOn(partitioningRequirement) && !child.getProperties().isNodePartitionedOn(partitioningRequirement)) {
child = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, child.getNode(), node.getGroupingKeys(), node.getHashSymbol()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitGroupId(GroupIdNode node, Context context)
{
PreferredProperties childPreference = context.getPreferredProperties().translate(translateGroupIdSymbols(node));
PlanWithProperties child = planChild(node, context.withPreferredProperties(childPreference));
return rebaseAndDeriveProperties(node, child);
}
private Function<Symbol, Optional<Symbol>> translateGroupIdSymbols(GroupIdNode node)
{
return symbol -> {
if (node.getArgumentMappings().containsKey(symbol)) {
return Optional.of(node.getArgumentMappings().get(symbol));
}
if (node.getCommonGroupingColumns().contains(symbol)) {
return Optional.of(node.getGroupingSetMappings().get(symbol));
}
return Optional.empty();
};
}
@Override
public PlanWithProperties visitMarkDistinct(MarkDistinctNode node, Context context)
{
PreferredProperties preferredChildProperties = PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getDistinctSymbols()), grouped(node.getDistinctSymbols()))
.mergeWithParent(context.getPreferredProperties());
PlanWithProperties child = node.getSource().accept(this, context.withPreferredProperties(preferredChildProperties));
if (child.getProperties().isSingleNode() ||
!child.getProperties().isStreamPartitionedOn(node.getDistinctSymbols())) {
child = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
child.getNode(),
node.getDistinctSymbols(),
node.getHashSymbol()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitWindow(WindowNode node, Context context)
{
List<LocalProperty<Symbol>> desiredProperties = new ArrayList<>();
if (!node.getPartitionBy().isEmpty()) {
desiredProperties.add(new GroupingProperty<>(node.getPartitionBy()));
}
for (Symbol symbol : node.getOrderBy()) {
desiredProperties.add(new SortingProperty<>(symbol, node.getOrderings().get(symbol)));
}
PlanWithProperties child = planChild(
node,
context.withPreferredProperties(
PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getPartitionBy()), desiredProperties)
.mergeWithParent(context.getPreferredProperties())));
if (!child.getProperties().isStreamPartitionedOn(node.getPartitionBy())) {
if (node.getPartitionBy().isEmpty()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
else {
child = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, child.getNode(), node.getPartitionBy(), node.getHashSymbol()),
child.getProperties());
}
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitRowNumber(RowNumberNode node, Context context)
{
if (node.getPartitionBy().isEmpty()) {
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.undistributed()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
PlanWithProperties child = planChild(node, context.withPreferredProperties(
PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getPartitionBy()), grouped(node.getPartitionBy()))
.mergeWithParent(context.getPreferredProperties())));
// TODO: add config option/session property to force parallel plan if child is unpartitioned and window has a PARTITION BY clause
if (!child.getProperties().isStreamPartitionedOn(node.getPartitionBy())) {
child = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
child.getNode(),
node.getPartitionBy(),
node.getHashSymbol()),
child.getProperties());
}
// TODO: streaming
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitTopNRowNumber(TopNRowNumberNode node, Context context)
{
PreferredProperties preferredChildProperties;
Function<PlanNode, PlanNode> addExchange;
if (node.getPartitionBy().isEmpty()) {
preferredChildProperties = PreferredProperties.any();
addExchange = partial -> gatheringExchange(idAllocator.getNextId(), REMOTE, partial);
}
else {
preferredChildProperties = PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getPartitionBy()), grouped(node.getPartitionBy()))
.mergeWithParent(context.getPreferredProperties());
addExchange = partial -> partitionedExchange(idAllocator.getNextId(), REMOTE, partial, node.getPartitionBy(), node.getHashSymbol());
}
PlanWithProperties child = planChild(node, context.withPreferredProperties(preferredChildProperties));
if (!child.getProperties().isStreamPartitionedOn(node.getPartitionBy())) {
// add exchange + push function to child
child = withDerivedProperties(
new TopNRowNumberNode(
idAllocator.getNextId(),
child.getNode(),
node.getSpecification(),
node.getRowNumberSymbol(),
node.getMaxRowCountPerPartition(),
true,
node.getHashSymbol()),
child.getProperties());
child = withDerivedProperties(addExchange.apply(child.getNode()), child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitTopN(TopNNode node, Context context)
{
PlanWithProperties child;
switch (node.getStep()) {
case SINGLE:
case FINAL:
child = planChild(node, context.withPreferredProperties(PreferredProperties.undistributed()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
break;
case PARTIAL:
child = planChild(node, context.withPreferredProperties(PreferredProperties.any()));
break;
default:
throw new UnsupportedOperationException(format("Unsupported step for TopN [%s]", node.getStep()));
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitSort(SortNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.undistributed()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
else {
// current plan so far is single node, so local properties are effectively global properties
// skip the SortNode if the local properties guarantee ordering on Sort keys
// TODO: This should be extracted as a separate optimizer once the planner is able to reason about the ordering of each operator
List<LocalProperty<Symbol>> desiredProperties = new ArrayList<>();
for (Symbol symbol : node.getOrderBy()) {
desiredProperties.add(new SortingProperty<>(symbol, node.getOrderings().get(symbol)));
}
if (LocalProperties.match(child.getProperties().getLocalProperties(), desiredProperties).stream()
.noneMatch(Optional::isPresent)) {
return child;
}
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitLimit(LimitNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.any()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
new LimitNode(idAllocator.getNextId(), child.getNode(), node.getCount(), true),
child.getProperties());
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.any()));
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(
idAllocator.getNextId(),
REMOTE,
new DistinctLimitNode(idAllocator.getNextId(), child.getNode(), node.getLimit(), true, node.getDistinctSymbols(), node.getHashSymbol())),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitFilter(FilterNode node, Context context)
{
if (node.getSource() instanceof TableScanNode) {
return planTableScan((TableScanNode) node.getSource(), node.getPredicate(), context);
}
return rebaseAndDeriveProperties(node, planChild(node, context));
}
@Override
public PlanWithProperties visitTableScan(TableScanNode node, Context context)
{
return planTableScan(node, BooleanLiteral.TRUE_LITERAL, context);
}
@Override
public PlanWithProperties visitTableWriter(TableWriterNode node, Context context)
{
PlanWithProperties source = node.getSource().accept(this, context);
Optional<PartitioningScheme> partitioningScheme = node.getPartitioningScheme();
if (!partitioningScheme.isPresent() && redistributeWrites) {
partitioningScheme = Optional.of(new PartitioningScheme(Partitioning.create(FIXED_ARBITRARY_DISTRIBUTION, ImmutableList.of()), source.getNode().getOutputSymbols()));
}
if (partitioningScheme.isPresent()) {
source = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
source.getNode(),
partitioningScheme.get()),
source.getProperties()
);
}
return rebaseAndDeriveProperties(node, source);
}
private PlanWithProperties planTableScan(TableScanNode node, Expression predicate, Context context)
{
// don't include non-deterministic predicates
Expression deterministicPredicate = stripNonDeterministicConjuncts(predicate);
DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.fromPredicate(
metadata,
session,
deterministicPredicate,
types);
TupleDomain<ColumnHandle> simplifiedConstraint = decomposedPredicate.getTupleDomain()
.transform(node.getAssignments()::get)
.intersect(node.getCurrentConstraint());
Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse();
Expression constraint = combineConjuncts(
deterministicPredicate,
DomainTranslator.toPredicate(node.getCurrentConstraint().transform(assignments::get)));
// Layouts will be returned in order of the connector's preference
List<TableLayoutResult> layouts = metadata.getLayouts(
session, node.getTable(),
new Constraint<>(simplifiedConstraint, bindings -> !shouldPrune(constraint, node.getAssignments(), bindings, context.getCorrelations())),
Optional.of(node.getOutputSymbols().stream()
.map(node.getAssignments()::get)
.collect(toImmutableSet())));
if (layouts.isEmpty()) {
return new PlanWithProperties(
new ValuesNode(idAllocator.getNextId(), node.getOutputSymbols(), ImmutableList.of()),
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
// Filter out layouts that cannot supply all the required columns
layouts = layouts.stream()
.filter(layoutHasAllNeededOutputs(node))
.collect(toList());
checkState(!layouts.isEmpty(), "No usable layouts for %s", node);
List<PlanWithProperties> possiblePlans = layouts.stream()
.map(layout -> {
TableScanNode tableScan = new TableScanNode(
node.getId(),
node.getTable(),
node.getOutputSymbols(),
node.getAssignments(),
Optional.of(layout.getLayout().getHandle()),
simplifiedConstraint.intersect(layout.getLayout().getPredicate()),
Optional.ofNullable(node.getOriginalConstraint()).orElse(predicate));
PlanWithProperties result = new PlanWithProperties(tableScan, deriveProperties(tableScan, ImmutableList.of()));
Expression resultingPredicate = combineConjuncts(
DomainTranslator.toPredicate(layout.getUnenforcedConstraint().transform(assignments::get)),
stripDeterministicConjuncts(predicate),
decomposedPredicate.getRemainingExpression());
if (!BooleanLiteral.TRUE_LITERAL.equals(resultingPredicate)) {
return withDerivedProperties(
new FilterNode(idAllocator.getNextId(), result.getNode(), resultingPredicate),
deriveProperties(tableScan, ImmutableList.of()));
}
return result;
})
.collect(toList());
return pickPlan(possiblePlans, context);
}
private Predicate<TableLayoutResult> layoutHasAllNeededOutputs(TableScanNode node)
{
return layout -> !layout.getLayout().getColumns().isPresent()
|| layout.getLayout().getColumns().get().containsAll(Lists.transform(node.getOutputSymbols(), node.getAssignments()::get));
}
/**
* possiblePlans should be provided in layout preference order
*/
private PlanWithProperties pickPlan(List<PlanWithProperties> possiblePlans, Context context)
{
checkArgument(!possiblePlans.isEmpty());
if (preferStreamingOperators) {
possiblePlans = new ArrayList<>(possiblePlans);
Collections.sort(possiblePlans, Comparator.comparing(PlanWithProperties::getProperties, streamingExecutionPreference(context.getPreferredProperties()))); // stable sort; is Collections.min() guaranteed to be stable?
}
return possiblePlans.get(0);
}
private boolean shouldPrune(Expression predicate, Map<Symbol, ColumnHandle> assignments, Map<ColumnHandle, NullableValue> bindings, List<Symbol> correlations)
{
List<Expression> conjuncts = extractConjuncts(predicate);
Map<NodeRef<Expression>, Type> expressionTypes = getExpressionTypes(
session,
metadata,
parser,
types,
predicate,
emptyList() /* parameters already replaced */);
LookupSymbolResolver inputs = new LookupSymbolResolver(assignments, bindings);
// If any conjuncts evaluate to FALSE or null, then the whole predicate will never be true and so the partition should be pruned
for (Expression expression : conjuncts) {
if (DependencyExtractor.extractUnique(expression).stream().anyMatch(correlations::contains)) {
// expression contains correlated symbol with outer query
continue;
}
ExpressionInterpreter optimizer = ExpressionInterpreter.expressionOptimizer(expression, metadata, session, expressionTypes);
Object optimized = optimizer.optimize(inputs);
if (Boolean.FALSE.equals(optimized) || optimized == null || optimized instanceof NullLiteral) {
return true;
}
}
return false;
}
@Override
public PlanWithProperties visitValues(ValuesNode node, Context context)
{
return new PlanWithProperties(
node,
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
@Override
public PlanWithProperties visitExplainAnalyze(ExplainAnalyzeNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.any()));
// if the child is already a gathering exchange, don't add another
if ((child.getNode() instanceof ExchangeNode) && ((ExchangeNode) child.getNode()).getType() == ExchangeNode.Type.GATHER) {
return rebaseAndDeriveProperties(node, child);
}
// Always add an exchange because ExplainAnalyze should be in its own stage
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitTableFinish(TableFinishNode node, Context context)
{
PlanWithProperties child = planChild(node, context.withPreferredProperties(PreferredProperties.any()));
// if the child is already a gathering exchange, don't add another
if ((child.getNode() instanceof ExchangeNode) && ((ExchangeNode) child.getNode()).getType().equals(GATHER)) {
return rebaseAndDeriveProperties(node, child);
}
if (!child.getProperties().isSingleNode() || !child.getProperties().isCoordinatorOnly()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
private <T> SetMultimap<T, T> createMapping(List<T> keys, List<T> values)
{
checkArgument(keys.size() == values.size(), "Inputs must have the same size");
ImmutableSetMultimap.Builder<T, T> builder = ImmutableSetMultimap.builder();
for (int i = 0; i < keys.size(); i++) {
builder.put(keys.get(i), values.get(i));
}
return builder.build();
}
private <T> Function<T, Optional<T>> createTranslator(SetMultimap<T, T> inputToOutput)
{
return input -> inputToOutput.get(input).stream().findAny();
}
private <T> Function<T, T> createDirectTranslator(SetMultimap<T, T> inputToOutput)
{
return input -> inputToOutput.get(input).iterator().next();
}
@Override
public PlanWithProperties visitJoin(JoinNode node, Context context)
{
List<Symbol> leftSymbols = Lists.transform(node.getCriteria(), JoinNode.EquiJoinClause::getLeft);
List<Symbol> rightSymbols = Lists.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight);
JoinNode.Type type = node.getType();
PlanWithProperties left;
PlanWithProperties right;
JoinNode.DistributionType distributionType = node.getDistributionType().orElseThrow(() -> new IllegalArgumentException("distributionType not yet set"));
if (distributionType == JoinNode.DistributionType.PARTITIONED) {
SetMultimap<Symbol, Symbol> rightToLeft = createMapping(rightSymbols, leftSymbols);
SetMultimap<Symbol, Symbol> leftToRight = createMapping(leftSymbols, rightSymbols);
left = node.getLeft().accept(this, context.withPreferredProperties(PreferredProperties.partitioned(ImmutableSet.copyOf(leftSymbols))));
if (left.getProperties().isNodePartitionedOn(leftSymbols) && !left.getProperties().isSingleNode()) {
Partitioning rightPartitioning = left.getProperties().translate(createTranslator(leftToRight)).getNodePartitioning().get();
right = node.getRight().accept(this, context.withPreferredProperties(PreferredProperties.partitioned(rightPartitioning)));
if (!right.getProperties().isNodePartitionedWith(left.getProperties(), rightToLeft::get)) {
right = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, right.getNode(), new PartitioningScheme(rightPartitioning, right.getNode().getOutputSymbols())),
right.getProperties());
}
}
else {
right = node.getRight().accept(this, context.withPreferredProperties(PreferredProperties.partitioned(ImmutableSet.copyOf(rightSymbols))));
if (right.getProperties().isNodePartitionedOn(rightSymbols) && !right.getProperties().isSingleNode()) {
Partitioning leftPartitioning = right.getProperties().translate(createTranslator(rightToLeft)).getNodePartitioning().get();
left = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, left.getNode(), new PartitioningScheme(leftPartitioning, left.getNode().getOutputSymbols())),
left.getProperties());
}
else {
left = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, left.getNode(), leftSymbols, Optional.empty()),
left.getProperties());
right = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, right.getNode(), rightSymbols, Optional.empty()),
right.getProperties());
}
}
verify(left.getProperties().isNodePartitionedWith(right.getProperties(), leftToRight::get));
// if colocated joins are disabled, force redistribute when using a custom partitioning
if (!isColocatedJoinEnabled(session) && hasMultipleSources(left.getNode(), right.getNode())) {
Partitioning rightPartitioning = left.getProperties().translate(createTranslator(leftToRight)).getNodePartitioning().get();
right = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, right.getNode(), new PartitioningScheme(rightPartitioning, right.getNode().getOutputSymbols())),
right.getProperties());
}
}
else {
// Broadcast Join
left = node.getLeft().accept(this, context.withPreferredProperties(PreferredProperties.any()));
right = node.getRight().accept(this, context.withPreferredProperties(PreferredProperties.any()));
if (left.getProperties().isSingleNode()) {
if (!right.getProperties().isSingleNode() ||
(!isColocatedJoinEnabled(session) && hasMultipleSources(left.getNode(), right.getNode()))) {
right = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, right.getNode()),
right.getProperties());
}
}
else {
right = withDerivedProperties(
replicatedExchange(idAllocator.getNextId(), REMOTE, right.getNode()),
right.getProperties());
}
}
JoinNode result = new JoinNode(node.getId(),
type,
left.getNode(),
right.getNode(),
node.getCriteria(),
node.getOutputSymbols(),
node.getFilter(),
node.getLeftHashSymbol(),
node.getRightHashSymbol(),
node.getDistributionType());
return new PlanWithProperties(result, deriveProperties(result, ImmutableList.of(left.getProperties(), right.getProperties())));
}
@Override
public PlanWithProperties visitUnnest(UnnestNode node, Context context)
{
PreferredProperties translatedPreferred = context.getPreferredProperties().translate(symbol -> node.getReplicateSymbols().contains(symbol) ? Optional.of(symbol) : Optional.empty());
return rebaseAndDeriveProperties(node, planChild(node, context.withPreferredProperties(translatedPreferred)));
}
@Override
public PlanWithProperties visitSemiJoin(SemiJoinNode node, Context context)
{
PlanWithProperties source;
PlanWithProperties filteringSource;
SemiJoinNode.DistributionType distributionType = node.getDistributionType().orElseThrow(() -> new IllegalArgumentException("distributionType not yet set"));
if (distributionType == SemiJoinNode.DistributionType.PARTITIONED) {
List<Symbol> sourceSymbols = ImmutableList.of(node.getSourceJoinSymbol());
List<Symbol> filteringSourceSymbols = ImmutableList.of(node.getFilteringSourceJoinSymbol());
SetMultimap<Symbol, Symbol> sourceToFiltering = createMapping(sourceSymbols, filteringSourceSymbols);
SetMultimap<Symbol, Symbol> filteringToSource = createMapping(filteringSourceSymbols, sourceSymbols);
source = node.getSource().accept(this, context.withPreferredProperties(PreferredProperties.partitioned(ImmutableSet.copyOf(sourceSymbols))));
if (source.getProperties().isNodePartitionedOn(sourceSymbols) && !source.getProperties().isSingleNode()) {
Partitioning filteringPartitioning = source.getProperties().translate(createTranslator(sourceToFiltering)).getNodePartitioning().get();
filteringSource = node.getFilteringSource().accept(this, context.withPreferredProperties(PreferredProperties.partitionedWithNullsAndAnyReplicated(filteringPartitioning)));
if (!source.getProperties().withReplicatedNulls(true).isNodePartitionedWith(filteringSource.getProperties(), sourceToFiltering::get)) {
filteringSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode(), new PartitioningScheme(
filteringPartitioning,
filteringSource.getNode().getOutputSymbols(),
Optional.empty(),
true,
Optional.empty())),
filteringSource.getProperties());
}
}
else {
filteringSource = node.getFilteringSource().accept(this, context.withPreferredProperties(PreferredProperties.partitionedWithNullsAndAnyReplicated(ImmutableSet.copyOf(filteringSourceSymbols))));
if (filteringSource.getProperties().isNodePartitionedOn(filteringSourceSymbols, true) && !filteringSource.getProperties().isSingleNode()) {
Partitioning sourcePartitioning = filteringSource.getProperties().translate(createTranslator(filteringToSource)).getNodePartitioning().get();
source = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, source.getNode(), new PartitioningScheme(sourcePartitioning, source.getNode().getOutputSymbols())),
source.getProperties());
}
else {
source = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, source.getNode(), sourceSymbols, Optional.empty()),
source.getProperties());
filteringSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode(), filteringSourceSymbols, Optional.empty(), true),
filteringSource.getProperties());
}
}
verify(source.getProperties().withReplicatedNulls(true).isNodePartitionedWith(filteringSource.getProperties(), sourceToFiltering::get));
// if colocated joins are disabled, force redistribute when using a custom partitioning
if (!isColocatedJoinEnabled(session) && hasMultipleSources(source.getNode(), filteringSource.getNode())) {
Partitioning filteringPartitioning = source.getProperties().translate(createTranslator(sourceToFiltering)).getNodePartitioning().get();
filteringSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode(), new PartitioningScheme(
filteringPartitioning,
filteringSource.getNode().getOutputSymbols(),
Optional.empty(),
true,
Optional.empty())),
filteringSource.getProperties());
}
}
else {
source = node.getSource().accept(this, context.withPreferredProperties(PreferredProperties.any()));
// Delete operator works fine even if TableScans on the filtering (right) side is not co-located with itself. It only cares about the corresponding TableScan,
// which is always on the source (left) side. Therefore, hash-partitioned semi-join is always allowed on the filtering side.
filteringSource = node.getFilteringSource().accept(this, context.withPreferredProperties(PreferredProperties.any()));
// make filtering source match requirements of source
if (source.getProperties().isSingleNode()) {
if (!filteringSource.getProperties().isSingleNode() ||
(!isColocatedJoinEnabled(session) && hasMultipleSources(source.getNode(), filteringSource.getNode()))) {
filteringSource = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode()),
filteringSource.getProperties());
}
}
else {
filteringSource = withDerivedProperties(
replicatedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode()),
filteringSource.getProperties());
}
}
return rebaseAndDeriveProperties(node, ImmutableList.of(source, filteringSource));
}
@Override
public PlanWithProperties visitIndexJoin(IndexJoinNode node, Context context)
{
List<Symbol> joinColumns = Lists.transform(node.getCriteria(), IndexJoinNode.EquiJoinClause::getProbe);
// Only prefer grouping on join columns if no parent local property preferences
List<LocalProperty<Symbol>> desiredLocalProperties = context.getPreferredProperties().getLocalProperties().isEmpty() ? grouped(joinColumns) : ImmutableList.of();
PlanWithProperties probeSource = node.getProbeSource().accept(this, context.withPreferredProperties(
PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(joinColumns), desiredLocalProperties)
.mergeWithParent(context.getPreferredProperties())));
ActualProperties probeProperties = probeSource.getProperties();
PlanWithProperties indexSource = node.getIndexSource().accept(this, context.withPreferredProperties(PreferredProperties.any()));
// TODO: allow repartitioning if unpartitioned to increase parallelism
if (shouldRepartitionForIndexJoin(joinColumns, context.getPreferredProperties(), probeProperties)) {
probeSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, probeSource.getNode(), joinColumns, node.getProbeHashSymbol()),
probeProperties);
}
// TODO: if input is grouped, create streaming join
// index side is really a nested-loops plan, so don't add exchanges
PlanNode result = ChildReplacer.replaceChildren(node, ImmutableList.of(probeSource.getNode(), node.getIndexSource()));
return new PlanWithProperties(result, deriveProperties(result, ImmutableList.of(probeSource.getProperties(), indexSource.getProperties())));
}
private boolean shouldRepartitionForIndexJoin(List<Symbol> joinColumns, PreferredProperties parentPreferredProperties, ActualProperties probeProperties)
{
// See if distributed index joins are enabled
if (!distributedIndexJoins) {
return false;
}
// No point in repartitioning if the plan is not distributed
if (probeProperties.isSingleNode()) {
return false;
}
Optional<PreferredProperties.PartitioningProperties> parentPartitioningPreferences = parentPreferredProperties.getGlobalProperties()
.flatMap(PreferredProperties.Global::getPartitioningProperties);
// Disable repartitioning if it would disrupt a parent's partitioning preference when streaming is enabled
boolean parentAlreadyPartitionedOnChild = parentPartitioningPreferences
.map(partitioning -> probeProperties.isStreamPartitionedOn(partitioning.getPartitioningColumns()))
.orElse(false);
if (preferStreamingOperators && parentAlreadyPartitionedOnChild) {
return false;
}
// Otherwise, repartition if we need to align with the join columns
if (!probeProperties.isStreamPartitionedOn(joinColumns)) {
return true;
}
// If we are already partitioned on the join columns because the data has been forced effectively into one stream,
// then we should repartition if that would make a difference (from the single stream state).
return probeProperties.isEffectivelySingleStream() && probeProperties.isStreamRepartitionEffective(joinColumns);
}
@Override
public PlanWithProperties visitIndexSource(IndexSourceNode node, Context context)
{
return new PlanWithProperties(
node,
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
private Function<Symbol, Optional<Symbol>> outputToInputTranslator(UnionNode node, int sourceIndex)
{
return symbol -> Optional.of(node.getSymbolMapping().get(symbol).get(sourceIndex));
}
private Partitioning selectUnionPartitioning(UnionNode node, Context context, PreferredProperties.PartitioningProperties parentPreference)
{
// Use the parent's requested partitioning if available
if (parentPreference.getPartitioning().isPresent()) {
return parentPreference.getPartitioning().get();
}
// Try planning the children to see if any of them naturally produce a partitioning (for now, just select the first)
boolean nullsAndAnyReplicated = parentPreference.isNullsAndAnyReplicated();
for (int sourceIndex = 0; sourceIndex < node.getSources().size(); sourceIndex++) {
PreferredProperties.PartitioningProperties childPartitioning = parentPreference.translate(outputToInputTranslator(node, sourceIndex)).get();
PreferredProperties childPreferred = PreferredProperties.builder()
.global(PreferredProperties.Global.distributed(childPartitioning.withNullsAndAnyReplicated(nullsAndAnyReplicated)))
.build();
PlanWithProperties child = node.getSources().get(sourceIndex).accept(this, context.withPreferredProperties(childPreferred));
if (child.getProperties().isNodePartitionedOn(childPartitioning.getPartitioningColumns(), nullsAndAnyReplicated)) {
Function<Symbol, Optional<Symbol>> childToParent = createTranslator(createMapping(node.sourceOutputLayout(sourceIndex), node.getOutputSymbols()));
return child.getProperties().translate(childToParent).getNodePartitioning().get();
}
}
// Otherwise, choose an arbitrary partitioning over the columns
return Partitioning.create(FIXED_HASH_DISTRIBUTION, ImmutableList.copyOf(parentPreference.getPartitioningColumns()));
}
@Override
public PlanWithProperties visitUnion(UnionNode node, Context context)
{
PreferredProperties parentPreference = context.getPreferredProperties();
Optional<PreferredProperties.Global> parentGlobal = parentPreference.getGlobalProperties();
if (parentGlobal.isPresent() && parentGlobal.get().isDistributed() && parentGlobal.get().getPartitioningProperties().isPresent()) {
PreferredProperties.PartitioningProperties parentPartitioningPreference = parentGlobal.get().getPartitioningProperties().get();
boolean nullsAndAnyReplicated = parentPartitioningPreference.isNullsAndAnyReplicated();
Partitioning desiredParentPartitioning = selectUnionPartitioning(node, context, parentPartitioningPreference);
ImmutableList.Builder<PlanNode> partitionedSources = ImmutableList.builder();
ImmutableListMultimap.Builder<Symbol, Symbol> outputToSourcesMapping = ImmutableListMultimap.builder();
for (int sourceIndex = 0; sourceIndex < node.getSources().size(); sourceIndex++) {
Partitioning childPartitioning = desiredParentPartitioning.translate(createDirectTranslator(createMapping(node.getOutputSymbols(), node.sourceOutputLayout(sourceIndex))));
PreferredProperties childPreferred = PreferredProperties.builder()
.global(PreferredProperties.Global.distributed(PreferredProperties.PartitioningProperties.partitioned(childPartitioning)
.withNullsAndAnyReplicated(nullsAndAnyReplicated)))
.build();
PlanWithProperties source = node.getSources().get(sourceIndex).accept(this, context.withPreferredProperties(childPreferred));
if (!source.getProperties().isNodePartitionedOn(childPartitioning, nullsAndAnyReplicated)) {
source = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
source.getNode(),
new PartitioningScheme(
childPartitioning,
source.getNode().getOutputSymbols(),
Optional.empty(),
nullsAndAnyReplicated,
Optional.empty())),
source.getProperties());
}
partitionedSources.add(source.getNode());
for (int column = 0; column < node.getOutputSymbols().size(); column++) {
outputToSourcesMapping.put(node.getOutputSymbols().get(column), node.sourceOutputLayout(sourceIndex).get(column));
}
}
UnionNode newNode = new UnionNode(
node.getId(),
partitionedSources.build(),
outputToSourcesMapping.build(),
ImmutableList.copyOf(outputToSourcesMapping.build().keySet()));
return new PlanWithProperties(
newNode,
ActualProperties.builder()
.global(partitionedOn(desiredParentPartitioning, Optional.of(desiredParentPartitioning)))
.build()
.withReplicatedNulls(parentPartitioningPreference.isNullsAndAnyReplicated()));
}
// first, classify children into partitioned and unpartitioned
List<PlanNode> unpartitionedChildren = new ArrayList<>();
List<List<Symbol>> unpartitionedOutputLayouts = new ArrayList<>();
List<PlanNode> partitionedChildren = new ArrayList<>();
List<List<Symbol>> partitionedOutputLayouts = new ArrayList<>();
List<PlanWithProperties> plannedChildren = new ArrayList<>();
for (int i = 0; i < node.getSources().size(); i++) {
PlanWithProperties child = node.getSources().get(i).accept(this, context.withPreferredProperties(PreferredProperties.any()));
plannedChildren.add(child);
if (child.getProperties().isSingleNode()) {
unpartitionedChildren.add(child.getNode());
unpartitionedOutputLayouts.add(node.sourceOutputLayout(i));
}
else {
partitionedChildren.add(child.getNode());
// union may drop or duplicate symbols from the input so we must provide an exact mapping
partitionedOutputLayouts.add(node.sourceOutputLayout(i));
}
}
PlanNode result;
if (!partitionedChildren.isEmpty() && unpartitionedChildren.isEmpty()) {
// parent does not have preference or prefers some partitioning without any explicit partitioning - just use
// children partitioning and don't GATHER partitioned inputs
// TODO: add FIXED_ARBITRARY_DISTRIBUTION support on non empty unpartitionedChildren
if (!parentGlobal.isPresent() || parentGlobal.get().isDistributed()) {
return arbitraryDistributeUnion(node, plannedChildren, partitionedChildren, partitionedOutputLayouts);
}
// add a gathering exchange above partitioned inputs
result = new ExchangeNode(
idAllocator.getNextId(),
GATHER,
REMOTE,
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), node.getOutputSymbols()),
partitionedChildren,
partitionedOutputLayouts);
}
else if (!unpartitionedChildren.isEmpty()) {
if (!partitionedChildren.isEmpty()) {
// add a gathering exchange above partitioned inputs and fold it into the set of unpartitioned inputs
// NOTE: new symbols for ExchangeNode output are required in order to keep plan logically correct with new local union below
List<Symbol> exchangeOutputLayout = node.getOutputSymbols().stream()
.map(outputSymbol -> symbolAllocator.newSymbol(outputSymbol.getName(), types.get(outputSymbol)))
.collect(toImmutableList());
result = new ExchangeNode(
idAllocator.getNextId(),
GATHER,
REMOTE,
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), exchangeOutputLayout),
partitionedChildren,
partitionedOutputLayouts);
unpartitionedChildren.add(result);
unpartitionedOutputLayouts.add(result.getOutputSymbols());
}
ImmutableListMultimap.Builder<Symbol, Symbol> mappings = ImmutableListMultimap.builder();
for (int i = 0; i < node.getOutputSymbols().size(); i++) {
for (List<Symbol> outputLayout : unpartitionedOutputLayouts) {
mappings.put(node.getOutputSymbols().get(i), outputLayout.get(i));
}
}
// add local union for all unpartitioned inputs
result = new UnionNode(node.getId(), unpartitionedChildren, mappings.build(), ImmutableList.copyOf(mappings.build().keySet()));
}
else {
throw new IllegalStateException("both unpartitionedChildren partitionedChildren are empty");
}
return new PlanWithProperties(
result,
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
private PlanWithProperties arbitraryDistributeUnion(
UnionNode node,
List<PlanWithProperties> plannedChildren,
List<PlanNode> partitionedChildren,
List<List<Symbol>> partitionedOutputLayouts)
{
// TODO: can we insert LOCAL exchange for one child SOURCE distributed and another HASH distributed?
if (countSources(partitionedChildren) == 0) {
// No source distributed child, we can use insert LOCAL exchange
// TODO: if all children have the same partitioning, pass this partitioning to the parent
// instead of "arbitraryPartition".
return new PlanWithProperties(node.replaceChildren(
plannedChildren.stream()
.map(PlanWithProperties::getNode)
.collect(toList())));
}
else {
// Presto currently can not execute stage that has multiple table scans, so in that case
// we have to insert REMOTE exchange with FIXED_ARBITRARY_DISTRIBUTION instead of local exchange
return new PlanWithProperties(
new ExchangeNode(
idAllocator.getNextId(),
REPARTITION,
REMOTE,
new PartitioningScheme(Partitioning.create(FIXED_ARBITRARY_DISTRIBUTION, ImmutableList.of()), node.getOutputSymbols()),
partitionedChildren,
partitionedOutputLayouts));
}
}
@Override
public PlanWithProperties visitApply(ApplyNode node, Context context)
{
PlanWithProperties input = node.getInput().accept(this, context);
PlanWithProperties subquery = node.getSubquery().accept(this, context.withCorrelations(node.getCorrelation()));
ApplyNode rewritten = new ApplyNode(
node.getId(),
input.getNode(),
subquery.getNode(),
node.getSubqueryAssignments(),
node.getCorrelation());
return new PlanWithProperties(rewritten, deriveProperties(rewritten, ImmutableList.of(input.getProperties(), subquery.getProperties())));
}
private PlanWithProperties planChild(PlanNode node, Context context)
{
return getOnlyElement(node.getSources()).accept(this, context);
}
private PlanWithProperties rebaseAndDeriveProperties(PlanNode node, PlanWithProperties child)
{
return withDerivedProperties(
ChildReplacer.replaceChildren(node, ImmutableList.of(child.getNode())),
child.getProperties());
}
private PlanWithProperties rebaseAndDeriveProperties(PlanNode node, List<PlanWithProperties> children)
{
PlanNode result = node.replaceChildren(
children.stream()
.map(PlanWithProperties::getNode)
.collect(toList()));
return new PlanWithProperties(result, deriveProperties(result, children.stream().map(PlanWithProperties::getProperties).collect(toList())));
}
private PlanWithProperties withDerivedProperties(PlanNode node, ActualProperties inputProperties)
{
return new PlanWithProperties(node, deriveProperties(node, inputProperties));
}
private ActualProperties deriveProperties(PlanNode result, ActualProperties inputProperties)
{
return PropertyDerivations.deriveProperties(result, inputProperties, metadata, session, types, parser);
}
private ActualProperties deriveProperties(PlanNode result, List<ActualProperties> inputProperties)
{
return PropertyDerivations.deriveProperties(result, inputProperties, metadata, session, types, parser);
}
}
private static Map<Symbol, Symbol> computeIdentityTranslations(Assignments assignments)
{
Map<Symbol, Symbol> outputToInput = new HashMap<>();
for (Map.Entry<Symbol, Expression> assignment : assignments.getMap().entrySet()) {
if (assignment.getValue() instanceof SymbolReference) {
outputToInput.put(assignment.getKey(), Symbol.from(assignment.getValue()));
}
}
return outputToInput;
}
@VisibleForTesting
static Comparator<ActualProperties> streamingExecutionPreference(PreferredProperties preferred)
{
// Calculating the matches can be a bit expensive, so cache the results between comparisons
LoadingCache<List<LocalProperty<Symbol>>, List<Optional<LocalProperty<Symbol>>>> matchCache = CacheBuilder.newBuilder()
.build(new CacheLoader<List<LocalProperty<Symbol>>, List<Optional<LocalProperty<Symbol>>>>()
{
@Override
public List<Optional<LocalProperty<Symbol>>> load(List<LocalProperty<Symbol>> actualProperties)
{
return LocalProperties.match(actualProperties, preferred.getLocalProperties());
}
});
return (actual1, actual2) -> {
List<Optional<LocalProperty<Symbol>>> matchLayout1 = matchCache.getUnchecked(actual1.getLocalProperties());
List<Optional<LocalProperty<Symbol>>> matchLayout2 = matchCache.getUnchecked(actual2.getLocalProperties());
return ComparisonChain.start()
.compareTrueFirst(hasLocalOptimization(preferred.getLocalProperties(), matchLayout1), hasLocalOptimization(preferred.getLocalProperties(), matchLayout2))
.compareTrueFirst(meetsPartitioningRequirements(preferred, actual1), meetsPartitioningRequirements(preferred, actual2))
.compare(matchLayout1, matchLayout2, matchedLayoutPreference())
.result();
};
}
private static <T> boolean hasLocalOptimization(List<LocalProperty<T>> desiredLayout, List<Optional<LocalProperty<T>>> matchResult)
{
checkArgument(desiredLayout.size() == matchResult.size());
if (matchResult.isEmpty()) {
return false;
}
// Optimizations can be applied if the first LocalProperty has been modified in the match in any way
return !matchResult.get(0).equals(Optional.of(desiredLayout.get(0)));
}
private static boolean meetsPartitioningRequirements(PreferredProperties preferred, ActualProperties actual)
{
if (!preferred.getGlobalProperties().isPresent()) {
return true;
}
PreferredProperties.Global preferredGlobal = preferred.getGlobalProperties().get();
if (!preferredGlobal.isDistributed()) {
return actual.isSingleNode();
}
if (!preferredGlobal.getPartitioningProperties().isPresent()) {
return !actual.isSingleNode();
}
return actual.isStreamPartitionedOn(preferredGlobal.getPartitioningProperties().get().getPartitioningColumns());
}
// Prefer the match result that satisfied the most requirements
private static <T> Comparator<List<Optional<LocalProperty<T>>>> matchedLayoutPreference()
{
return (matchLayout1, matchLayout2) -> {
Iterator<Optional<LocalProperty<T>>> match1Iterator = matchLayout1.iterator();
Iterator<Optional<LocalProperty<T>>> match2Iterator = matchLayout2.iterator();
while (match1Iterator.hasNext() && match2Iterator.hasNext()) {
Optional<LocalProperty<T>> match1 = match1Iterator.next();
Optional<LocalProperty<T>> match2 = match2Iterator.next();
if (match1.isPresent() && match2.isPresent()) {
return Integer.compare(match1.get().getColumns().size(), match2.get().getColumns().size());
}
else if (match1.isPresent()) {
return 1;
}
else if (match2.isPresent()) {
return -1;
}
}
checkState(!match1Iterator.hasNext() && !match2Iterator.hasNext()); // Should be the same size
return 0;
};
}
@VisibleForTesting
static class PlanWithProperties
{
private final PlanNode node;
private final ActualProperties properties;
public PlanWithProperties(PlanNode node)
{
this(node, ActualProperties.builder().build());
}
public PlanWithProperties(PlanNode node, ActualProperties properties)
{
this.node = node;
this.properties = properties;
}
public PlanNode getNode()
{
return node;
}
public ActualProperties getProperties()
{
return properties;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal.membership;
import static org.apache.geode.distributed.ConfigurationProperties.DISABLE_TCP;
import static org.apache.geode.distributed.ConfigurationProperties.GROUPS;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.LOG_FILE;
import static org.apache.geode.distributed.ConfigurationProperties.LOG_LEVEL;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.distributed.ConfigurationProperties.MEMBER_TIMEOUT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.net.InetAddress;
import java.util.List;
import java.util.Properties;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.GemFireConfigException;
import org.apache.geode.distributed.ConfigurationProperties;
import org.apache.geode.distributed.Locator;
import org.apache.geode.distributed.internal.ClusterDistributionManager;
import org.apache.geode.distributed.internal.DMStats;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.distributed.internal.DistributionConfigImpl;
import org.apache.geode.distributed.internal.InternalLocator;
import org.apache.geode.distributed.internal.SerialAckedMessage;
import org.apache.geode.distributed.internal.membership.gms.GMSUtil;
import org.apache.geode.distributed.internal.membership.gms.ServiceConfig;
import org.apache.geode.distributed.internal.membership.gms.Services;
import org.apache.geode.distributed.internal.membership.gms.interfaces.JoinLeave;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave;
import org.apache.geode.distributed.internal.membership.gms.messages.HeartbeatMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.HeartbeatRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.InstallViewMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.JoinRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.JoinResponseMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.LeaveRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.RemoveMemberMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.SuspectMembersMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.ViewAckMessage;
import org.apache.geode.distributed.internal.membership.gms.mgr.GMSMembershipManager;
import org.apache.geode.internal.AvailablePortHelper;
import org.apache.geode.internal.admin.remote.RemoteTransportConfig;
import org.apache.geode.internal.net.SocketCreator;
import org.apache.geode.internal.security.SecurityServiceFactory;
import org.apache.geode.test.junit.categories.IntegrationTest;
@Category({IntegrationTest.class, MembershipJUnitTest.class})
public class MembershipJUnitTest {
/**
* This test creates a locator with a colocated membership manager and then creates a second
* manager that joins the system of the first.
*
* It then makes assertions about the state of the membership view, closes one of the managers and
* makes more assertions. It also ensures that a cache message can be sent from one manager to the
* other.
*/
@Test
public void testMultipleManagersInSameProcess() throws Exception {
doTestMultipleManagersInSameProcessWithGroups("red, blue");
}
/**
* Ensure that a large membership group doesn't cause communication issues
*/
@Test
public void testManagersWithLargeGroups() throws Exception {
StringBuilder stringBuilder = new StringBuilder(80000);
boolean first = true;
// create 8000 10-byte group names
for (int thousands = 1; thousands < 9; thousands++) {
for (int group = 0; group < 1000; group++) {
if (!first) {
stringBuilder.append(',');
}
first = false;
stringBuilder.append(String.format("%1$02d%2$08d", thousands, group));
}
}
List<String> result = doTestMultipleManagersInSameProcessWithGroups(stringBuilder.toString());
assertEquals(8000, result.size());
for (String group : result) {
assertEquals(10, group.length());
}
}
/**
* this runs the test with a given set of member groups. Returns the groups of the member that was
* not the coordinator for verification that they were correctly transmitted
*/
private List<String> doTestMultipleManagersInSameProcessWithGroups(String groups)
throws Exception {
MembershipManager m1 = null, m2 = null;
Locator l = null;
// int mcastPort = AvailablePortHelper.getRandomAvailableUDPPort();
try {
// boot up a locator
int port = AvailablePortHelper.getRandomAvailableTCPPort();
InetAddress localHost = SocketCreator.getLocalHost();
// this locator will hook itself up with the first MembershipManager
// to be created
l = InternalLocator.startLocator(port, new File(""), null, null, localHost, false,
new Properties(), null);
// create configuration objects
Properties nonDefault = new Properties();
nonDefault.put(DISABLE_TCP, "true");
nonDefault.put(MCAST_PORT, "0");
nonDefault.put(LOG_FILE, "");
nonDefault.put(LOG_LEVEL, "fine");
nonDefault.put(GROUPS, groups);
nonDefault.put(MEMBER_TIMEOUT, "2000");
nonDefault.put(LOCATORS, localHost.getHostName() + '[' + port + ']');
DistributionConfigImpl config = new DistributionConfigImpl(nonDefault);
RemoteTransportConfig transport =
new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
// start the first membership manager
try {
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
DistributedMembershipListener listener1 = mock(DistributedMembershipListener.class);
DMStats stats1 = mock(DMStats.class);
System.out.println("creating 1st membership manager");
m1 = MemberFactory.newMembershipManager(listener1, config, transport, stats1,
SecurityServiceFactory.create());
m1.startEventProcessing();
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
// start the second membership manager
DistributedMembershipListener listener2 = mock(DistributedMembershipListener.class);
DMStats stats2 = mock(DMStats.class);
System.out.println("creating 2nd membership manager");
m2 = MemberFactory.newMembershipManager(listener2, config, transport, stats2,
SecurityServiceFactory.create());
m2.startEventProcessing();
// we have to check the views with JoinLeave because the membership
// manager queues new views for processing through the DM listener,
// which is a mock object in this test
System.out.println("waiting for views to stabilize");
JoinLeave jl1 = ((GMSMembershipManager) m1).getServices().getJoinLeave();
JoinLeave jl2 = ((GMSMembershipManager) m2).getServices().getJoinLeave();
long giveUp = System.currentTimeMillis() + 15000;
for (;;) {
try {
assertTrue("view = " + jl2.getView(), jl2.getView().size() == 2);
assertTrue("view = " + jl1.getView(), jl1.getView().size() == 2);
assertTrue(jl1.getView().getCreator().equals(jl2.getView().getCreator()));
assertTrue(jl1.getView().getViewId() == jl2.getView().getViewId());
break;
} catch (AssertionError e) {
if (System.currentTimeMillis() > giveUp) {
throw e;
}
}
}
NetView view = jl1.getView();
InternalDistributedMember notCreator;
if (view.getCreator().equals(jl1.getMemberID())) {
notCreator = view.getMembers().get(1);
} else {
notCreator = view.getMembers().get(0);
}
List<String> result = notCreator.getGroups();
System.out.println("sending SerialAckedMessage from m1 to m2");
SerialAckedMessage msg = new SerialAckedMessage();
msg.setRecipient(m2.getLocalMember());
msg.setMulticast(false);
m1.send(new InternalDistributedMember[] {m2.getLocalMember()}, msg, null);
giveUp = System.currentTimeMillis() + 15000;
boolean verified = false;
Throwable problem = null;
while (giveUp > System.currentTimeMillis()) {
try {
verify(listener2).messageReceived(isA(SerialAckedMessage.class));
verified = true;
break;
} catch (Error e) {
problem = e;
Thread.sleep(500);
}
}
if (!verified) {
AssertionError error = new AssertionError("Expected a message to be received");
if (problem != null) {
error.initCause(error);
}
throw error;
}
// let the managers idle for a while and get used to each other
// Thread.sleep(4000l);
m2.disconnect(false);
assertTrue(!m2.isConnected());
assertTrue(m1.getView().size() == 1);
return result;
} finally {
if (m2 != null) {
m2.shutdown();
}
if (m1 != null) {
m1.shutdown();
}
if (l != null) {
l.stop();
}
}
}
/**
* This test ensures that secure communications are enabled.
*
* This test creates a locator with a colocated membership manager and then creates a second
* manager that joins the system of the first.
*
* It then makes assertions about the state of the membership view, closes one of the managers and
* makes more assertions.
*/
@Test
public void testLocatorAndTwoServersJoinUsingDiffeHellman() throws Exception {
MembershipManager m1 = null, m2 = null;
Locator l = null;
int mcastPort = AvailablePortHelper.getRandomAvailableUDPPort();
try {
// boot up a locator
int port = AvailablePortHelper.getRandomAvailableTCPPort();
InetAddress localHost = SocketCreator.getLocalHost();
Properties p = new Properties();
p.setProperty(ConfigurationProperties.SECURITY_UDP_DHALGO, "AES:128");
// this locator will hook itself up with the first MembershipManager
// to be created
l = InternalLocator.startLocator(port, new File(""), null, null, localHost, false, p, null);
// create configuration objects
Properties nonDefault = new Properties();
nonDefault.put(DistributionConfig.DISABLE_TCP_NAME, "true");
nonDefault.put(DistributionConfig.MCAST_PORT_NAME, String.valueOf(mcastPort));
nonDefault.put(DistributionConfig.LOG_FILE_NAME, "");
nonDefault.put(DistributionConfig.LOG_LEVEL_NAME, "fine");
nonDefault.put(DistributionConfig.GROUPS_NAME, "red, blue");
nonDefault.put(DistributionConfig.MEMBER_TIMEOUT_NAME, "2000");
nonDefault.put(DistributionConfig.LOCATORS_NAME, localHost.getHostName() + '[' + port + ']');
nonDefault.put(ConfigurationProperties.SECURITY_UDP_DHALGO, "AES:128");
DistributionConfigImpl config = new DistributionConfigImpl(nonDefault);
RemoteTransportConfig transport =
new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
// start the first membership manager
try {
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
DistributedMembershipListener listener1 = mock(DistributedMembershipListener.class);
DMStats stats1 = mock(DMStats.class);
System.out.println("creating 1st membership manager");
m1 = MemberFactory.newMembershipManager(listener1, config, transport, stats1,
SecurityServiceFactory.create());
m1.startEventProcessing();
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
// start the second membership manager
DistributedMembershipListener listener2 = mock(DistributedMembershipListener.class);
DMStats stats2 = mock(DMStats.class);
System.out.println("creating 2nd membership manager");
m2 = MemberFactory.newMembershipManager(listener2, config, transport, stats2,
SecurityServiceFactory.create());
m2.startEventProcessing();
// we have to check the views with JoinLeave because the membership
// manager queues new views for processing through the DM listener,
// which is a mock object in this test
System.out.println("waiting for views to stabilize");
JoinLeave jl1 = ((GMSMembershipManager) m1).getServices().getJoinLeave();
JoinLeave jl2 = ((GMSMembershipManager) m2).getServices().getJoinLeave();
long giveUp = System.currentTimeMillis() + 15000;
for (;;) {
try {
assertTrue("view = " + jl2.getView(), jl2.getView().size() == 2);
assertTrue("view = " + jl1.getView(), jl1.getView().size() == 2);
assertTrue(jl1.getView().getCreator().equals(jl2.getView().getCreator()));
assertTrue(jl1.getView().getViewId() == jl2.getView().getViewId());
break;
} catch (AssertionError e) {
if (System.currentTimeMillis() > giveUp) {
throw e;
}
}
}
System.out.println("testing multicast availability");
assertTrue(m1.testMulticast());
System.out.println("multicasting SerialAckedMessage from m1 to m2");
SerialAckedMessage msg = new SerialAckedMessage();
msg.setRecipient(m2.getLocalMember());
msg.setMulticast(true);
m1.send(new InternalDistributedMember[] {m2.getLocalMember()}, msg, null);
giveUp = System.currentTimeMillis() + 5000;
boolean verified = false;
Throwable problem = null;
while (giveUp > System.currentTimeMillis()) {
try {
verify(listener2).messageReceived(isA(SerialAckedMessage.class));
verified = true;
break;
} catch (Error e) {
problem = e;
Thread.sleep(500);
}
}
if (!verified) {
if (problem != null) {
problem.printStackTrace();
}
fail("Expected a multicast message to be received");
}
// let the managers idle for a while and get used to each other
Thread.sleep(4000l);
m2.disconnect(false);
assertTrue(!m2.isConnected());
assertTrue(m1.getView().size() == 1);
} finally {
if (m2 != null) {
m2.disconnect(false);
}
if (m1 != null) {
m1.disconnect(false);
}
if (l != null) {
l.stop();
}
}
}
@Test
public void testJoinTimeoutSetting() throws Exception {
long timeout = 30000;
Properties nonDefault = new Properties();
nonDefault.put(MEMBER_TIMEOUT, "" + timeout);
DistributionConfigImpl config = new DistributionConfigImpl(nonDefault);
RemoteTransportConfig transport =
new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
ServiceConfig sc = new ServiceConfig(transport, config);
assertEquals(2 * timeout + ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL,
sc.getJoinTimeout());
nonDefault.clear();
config = new DistributionConfigImpl(nonDefault);
transport = new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
sc = new ServiceConfig(transport, config);
assertEquals(24000, sc.getJoinTimeout());
nonDefault.clear();
nonDefault.put(LOCATORS, SocketCreator.getLocalHost().getHostAddress() + "[" + 12345 + "]");
config = new DistributionConfigImpl(nonDefault);
transport = new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
sc = new ServiceConfig(transport, config);
assertEquals(60000, sc.getJoinTimeout());
timeout = 2000;
System.setProperty("p2p.joinTimeout", "" + timeout);
try {
config = new DistributionConfigImpl(nonDefault);
transport = new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
sc = new ServiceConfig(transport, config);
assertEquals(timeout, sc.getJoinTimeout());
} finally {
System.getProperties().remove("p2p.joinTimeout");
}
}
@Test
public void testMulticastDiscoveryNotAllowed() {
Properties nonDefault = new Properties();
nonDefault.put(DISABLE_TCP, "true");
nonDefault.put(MCAST_PORT, "12345");
nonDefault.put(LOG_FILE, "");
nonDefault.put(LOG_LEVEL, "fine");
nonDefault.put(LOCATORS, "");
DistributionConfigImpl config = new DistributionConfigImpl(nonDefault);
RemoteTransportConfig transport =
new RemoteTransportConfig(config, ClusterDistributionManager.NORMAL_DM_TYPE);
ServiceConfig serviceConfig = mock(ServiceConfig.class);
when(serviceConfig.getDistributionConfig()).thenReturn(config);
when(serviceConfig.getTransport()).thenReturn(transport);
Services services = mock(Services.class);
when(services.getConfig()).thenReturn(serviceConfig);
GMSJoinLeave joinLeave = new GMSJoinLeave();
try {
joinLeave.init(services);
throw new Error(
"expected a GemFireConfigException to be thrown because no locators are configured");
} catch (GemFireConfigException e) {
// expected
}
}
/**
* test the GMSUtil.formatBytes() method
*/
@Test
public void testFormatBytes() throws Exception {
byte[] bytes = new byte[200];
for (int i = 0; i < bytes.length; i++) {
bytes[i] = (byte) (i % 255);
}
String str = GMSUtil.formatBytes(bytes, 0, bytes.length);
System.out.println(str);
assertEquals(600 + 4, str.length());
}
@Test
public void testMessagesThrowExceptionIfProcessed() throws Exception {
ClusterDistributionManager dm = null;
try {
new HeartbeatMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new HeartbeatRequestMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new InstallViewMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new JoinRequestMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new JoinResponseMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new LeaveRequestMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new RemoveMemberMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new SuspectMembersMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
try {
new ViewAckMessage().process(dm);
fail("expected an exception to be thrown");
} catch (Exception e) {
// okay
}
}
}
| |
/*
* Copyright 2001-2011 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sop4j.base.joda.time.base;
import com.sop4j.base.joda.time.Chronology;
import com.sop4j.base.joda.time.DateTime;
import com.sop4j.base.joda.time.DateTimeField;
import com.sop4j.base.joda.time.DateTimeFieldType;
import com.sop4j.base.joda.time.DateTimeUtils;
import com.sop4j.base.joda.time.DurationFieldType;
import com.sop4j.base.joda.time.ReadableInstant;
import com.sop4j.base.joda.time.ReadablePartial;
import com.sop4j.base.joda.time.field.FieldUtils;
import com.sop4j.base.joda.time.format.DateTimeFormatter;
/**
* AbstractPartial provides a standard base implementation of most methods
* in the ReadablePartial interface.
* <p>
* Calculations on are performed using a {@link Chronology}.
* This chronology is set to be in the UTC time zone for all calculations.
* <p>
* The methods on this class use {@link ReadablePartial#size()},
* {@link AbstractPartial#getField(int, Chronology)} and
* {@link ReadablePartial#getValue(int)} to calculate their results.
* Subclasses may have a better implementation.
* <p>
* AbstractPartial allows subclasses may be mutable and not thread-safe.
*
* @author Stephen Colebourne
* @since 1.0
*/
public abstract class AbstractPartial
implements ReadablePartial, Comparable<ReadablePartial> {
//-----------------------------------------------------------------------
/**
* Constructor.
*/
protected AbstractPartial() {
super();
}
//-----------------------------------------------------------------------
/**
* Gets the field for a specific index in the chronology specified.
* <p>
* This method must not use any instance variables.
*
* @param index the index to retrieve
* @param chrono the chronology to use
* @return the field
* @throws IndexOutOfBoundsException if the index is invalid
*/
protected abstract DateTimeField getField(int index, Chronology chrono);
//-----------------------------------------------------------------------
/**
* Gets the field type at the specifed index.
*
* @param index the index
* @return the field type
* @throws IndexOutOfBoundsException if the index is invalid
*/
public DateTimeFieldType getFieldType(int index) {
return getField(index, getChronology()).getType();
}
/**
* Gets an array of the field types that this partial supports.
* <p>
* The fields are returned largest to smallest, for example Hour, Minute, Second.
*
* @return the fields supported in an array that may be altered, largest to smallest
*/
public DateTimeFieldType[] getFieldTypes() {
DateTimeFieldType[] result = new DateTimeFieldType[size()];
for (int i = 0; i < result.length; i++) {
result[i] = getFieldType(i);
}
return result;
}
/**
* Gets the field at the specifed index.
*
* @param index the index
* @return the field
* @throws IndexOutOfBoundsException if the index is invalid
*/
public DateTimeField getField(int index) {
return getField(index, getChronology());
}
/**
* Gets an array of the fields that this partial supports.
* <p>
* The fields are returned largest to smallest, for example Hour, Minute, Second.
*
* @return the fields supported in an array that may be altered, largest to smallest
*/
public DateTimeField[] getFields() {
DateTimeField[] result = new DateTimeField[size()];
for (int i = 0; i < result.length; i++) {
result[i] = getField(i);
}
return result;
}
/**
* Gets an array of the value of each of the fields that this partial supports.
* <p>
* The fields are returned largest to smallest, for example Hour, Minute, Second.
* Each value corresponds to the same array index as <code>getFields()</code>
*
* @return the current values of each field in an array that may be altered, largest to smallest
*/
public int[] getValues() {
int[] result = new int[size()];
for (int i = 0; i < result.length; i++) {
result[i] = getValue(i);
}
return result;
}
//-----------------------------------------------------------------------
/**
* Get the value of one of the fields of a datetime.
* <p>
* The field specified must be one of those that is supported by the partial.
*
* @param type a DateTimeFieldType instance that is supported by this partial
* @return the value of that field
* @throws IllegalArgumentException if the field is null or not supported
*/
public int get(DateTimeFieldType type) {
return getValue(indexOfSupported(type));
}
/**
* Checks whether the field specified is supported by this partial.
*
* @param type the type to check, may be null which returns false
* @return true if the field is supported
*/
public boolean isSupported(DateTimeFieldType type) {
return (indexOf(type) != -1);
}
/**
* Gets the index of the specified field, or -1 if the field is unsupported.
*
* @param type the type to check, may be null which returns -1
* @return the index of the field, -1 if unsupported
*/
public int indexOf(DateTimeFieldType type) {
for (int i = 0, isize = size(); i < isize; i++) {
if (getFieldType(i) == type) {
return i;
}
}
return -1;
}
/**
* Gets the index of the specified field, throwing an exception if the
* field is unsupported.
*
* @param type the type to check, not null
* @return the index of the field
* @throws IllegalArgumentException if the field is null or not supported
*/
protected int indexOfSupported(DateTimeFieldType type) {
int index = indexOf(type);
if (index == -1) {
throw new IllegalArgumentException("Field '" + type + "' is not supported");
}
return index;
}
/**
* Gets the index of the first fields to have the specified duration,
* or -1 if the field is unsupported.
*
* @param type the type to check, may be null which returns -1
* @return the index of the field, -1 if unsupported
*/
protected int indexOf(DurationFieldType type) {
for (int i = 0, isize = size(); i < isize; i++) {
if (getFieldType(i).getDurationType() == type) {
return i;
}
}
return -1;
}
/**
* Gets the index of the first fields to have the specified duration,
* throwing an exception if the field is unsupported.
*
* @param type the type to check, not null
* @return the index of the field
* @throws IllegalArgumentException if the field is null or not supported
*/
protected int indexOfSupported(DurationFieldType type) {
int index = indexOf(type);
if (index == -1) {
throw new IllegalArgumentException("Field '" + type + "' is not supported");
}
return index;
}
//-----------------------------------------------------------------------
/**
* Resolves this partial against another complete instant to create a new
* full instant. The combination is performed using the chronology of the
* specified instant.
* <p>
* For example, if this partial represents a time, then the result of this
* method will be the datetime from the specified base instant plus the
* time from this partial.
*
* @param baseInstant the instant that provides the missing fields, null means now
* @return the combined datetime
*/
public DateTime toDateTime(ReadableInstant baseInstant) {
Chronology chrono = DateTimeUtils.getInstantChronology(baseInstant);
long instantMillis = DateTimeUtils.getInstantMillis(baseInstant);
long resolved = chrono.set(this, instantMillis);
return new DateTime(resolved, chrono);
}
//-----------------------------------------------------------------------
/**
* Compares this ReadablePartial with another returning true if the chronology,
* field types and values are equal.
*
* @param partial an object to check against
* @return true if fields and values are equal
*/
public boolean equals(Object partial) {
if (this == partial) {
return true;
}
if (partial instanceof ReadablePartial == false) {
return false;
}
ReadablePartial other = (ReadablePartial) partial;
if (size() != other.size()) {
return false;
}
for (int i = 0, isize = size(); i < isize; i++) {
if (getValue(i) != other.getValue(i) || getFieldType(i) != other.getFieldType(i)) {
return false;
}
}
return FieldUtils.equals(getChronology(), other.getChronology());
}
/**
* Gets a hash code for the ReadablePartial that is compatible with the
* equals method.
*
* @return a suitable hash code
*/
public int hashCode() {
int total = 157;
for (int i = 0, isize = size(); i < isize; i++) {
total = 23 * total + getValue(i);
total = 23 * total + getFieldType(i).hashCode();
}
total += getChronology().hashCode();
return total;
}
//-----------------------------------------------------------------------
/**
* Compares this partial with another returning an integer
* indicating the order.
* <p>
* The fields are compared in order, from largest to smallest.
* The first field that is non-equal is used to determine the result.
* <p>
* The specified object must be a partial instance whose field types
* match those of this partial.
* <p>
* NOTE: Prior to v2.0, the {@code Comparable} interface was only implemented
* in this class and not in the {@code ReadablePartial} interface.
*
* @param other an object to check against
* @return negative if this is less, zero if equal, positive if greater
* @throws ClassCastException if the partial is the wrong class
* or if it has field types that don't match
* @throws NullPointerException if the partial is null
* @since 1.1
*/
public int compareTo(ReadablePartial other) {
if (this == other) {
return 0;
}
if (size() != other.size()) {
throw new ClassCastException("ReadablePartial objects must have matching field types");
}
for (int i = 0, isize = size(); i < isize; i++) {
if (getFieldType(i) != other.getFieldType(i)) {
throw new ClassCastException("ReadablePartial objects must have matching field types");
}
}
// fields are ordered largest first
for (int i = 0, isize = size(); i < isize; i++) {
if (getValue(i) > other.getValue(i)) {
return 1;
}
if (getValue(i) < other.getValue(i)) {
return -1;
}
}
return 0;
}
/**
* Is this partial later than the specified partial.
* <p>
* The fields are compared in order, from largest to smallest.
* The first field that is non-equal is used to determine the result.
* <p>
* You may not pass null into this method. This is because you need
* a time zone to accurately determine the current date.
*
* @param partial a partial to check against, must not be null
* @return true if this date is after the date passed in
* @throws IllegalArgumentException if the specified partial is null
* @throws ClassCastException if the partial has field types that don't match
* @since 1.1
*/
public boolean isAfter(ReadablePartial partial) {
if (partial == null) {
throw new IllegalArgumentException("Partial cannot be null");
}
return compareTo(partial) > 0;
}
/**
* Is this partial earlier than the specified partial.
* <p>
* The fields are compared in order, from largest to smallest.
* The first field that is non-equal is used to determine the result.
* <p>
* You may not pass null into this method. This is because you need
* a time zone to accurately determine the current date.
*
* @param partial a partial to check against, must not be null
* @return true if this date is before the date passed in
* @throws IllegalArgumentException if the specified partial is null
* @throws ClassCastException if the partial has field types that don't match
* @since 1.1
*/
public boolean isBefore(ReadablePartial partial) {
if (partial == null) {
throw new IllegalArgumentException("Partial cannot be null");
}
return compareTo(partial) < 0;
}
/**
* Is this partial the same as the specified partial.
* <p>
* The fields are compared in order, from largest to smallest.
* If all fields are equal, the result is true.
* <p>
* You may not pass null into this method. This is because you need
* a time zone to accurately determine the current date.
*
* @param partial a partial to check against, must not be null
* @return true if this date is the same as the date passed in
* @throws IllegalArgumentException if the specified partial is null
* @throws ClassCastException if the partial has field types that don't match
* @since 1.1
*/
public boolean isEqual(ReadablePartial partial) {
if (partial == null) {
throw new IllegalArgumentException("Partial cannot be null");
}
return compareTo(partial) == 0;
}
//-----------------------------------------------------------------------
/**
* Uses the specified formatter to convert this partial to a String.
*
* @param formatter the formatter to use, null means use <code>toString()</code>.
* @return the formatted string
* @since 1.1
*/
public String toString(DateTimeFormatter formatter) {
if (formatter == null) {
return toString();
}
return formatter.print(this);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.lockmgr;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
import org.apache.hadoop.hive.metastore.api.TxnToWriteId;
import org.apache.hadoop.hive.metastore.api.TxnType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.ValidReadTxnList;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.DriverState;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.util.ReflectionUtils;
import java.util.*;
/**
* An implementation of {@link HiveTxnManager} that does not support
* transactions. This provides default Hive behavior.
*/
class DummyTxnManager extends HiveTxnManagerImpl {
static final private Logger LOG =
LoggerFactory.getLogger(DummyTxnManager.class.getName());
private HiveLockManager lockMgr;
private HiveLockManagerCtx lockManagerCtx;
@Override
public long openTxn(Context ctx, String user, TxnType txnType) throws LockException {
// No-op
return 0L;
}
@Override
public long openTxn(Context ctx, String user) throws LockException {
// No-op
return 0L;
}
@Override
public List<Long> replOpenTxn(String replPolicy, List<Long> srcTxnIds, String user) throws LockException {
return null;
}
@Override
public boolean isTxnOpen() {
return false;
}
@Override
public long getCurrentTxnId() {
return 0L;
}
@Override
public int getStmtIdAndIncrement() {
return 0;
}
@Override
public int getCurrentStmtId() {
return 0;
}
@Override
public long getLatestTxnIdInConflict() throws LockException {
return 0;
}
@Override
public long getTableWriteId(String dbName, String tableName) throws LockException {
return 0L;
}
@Override
public long getAllocatedTableWriteId(String dbName, String tableName) throws LockException {
return 0L;
}
@Override
public void replAllocateTableWriteIdsBatch(String dbName, String tableName, String replPolicy,
List<TxnToWriteId> srcTxnToWriteIdList) throws LockException {
return;
}
@Override
public HiveLockManager getLockManager() throws LockException {
if (lockMgr == null) {
boolean supportConcurrency =
conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
if (supportConcurrency) {
String lockMgrName =
conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
if ((lockMgrName == null) || (lockMgrName.isEmpty())) {
throw new LockException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
}
try {
LOG.info("Creating lock manager of type " + lockMgrName);
lockMgr = (HiveLockManager)ReflectionUtils.newInstance(
conf.getClassByName(lockMgrName), conf);
lockManagerCtx = new HiveLockManagerCtx(conf);
lockMgr.setContext(lockManagerCtx);
} catch (Exception e) {
// set hiveLockMgr to null just in case this invalid manager got set to
// next query's ctx.
if (lockMgr != null) {
try {
lockMgr.close();
} catch (LockException e1) {
//nothing can do here
}
lockMgr = null;
}
throw new LockException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() +
e.getMessage());
}
} else {
LOG.info("Concurrency mode is disabled, not creating a lock manager");
return null;
}
}
// Force a re-read of the configuration file. This is done because
// different queries in the session may be using the same lock manager.
lockManagerCtx.setConf(conf);
lockMgr.refresh();
return lockMgr;
}
@Override
public void acquireLocks(QueryPlan plan, Context ctx, String username) throws LockException {
acquireLocks(plan,ctx,username,null);
}
@Override
public void acquireLocks(QueryPlan plan, Context ctx, String username, DriverState driverState) throws LockException {
// Make sure we've built the lock manager
getLockManager();
// If the lock manager is still null, then it means we aren't using a
// lock manager
if (lockMgr == null) {
return;
}
List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
// Sort all the inputs, outputs.
// If a lock needs to be acquired on any partition, a read lock needs to be acquired on all
// its parents also
for (ReadEntity input : plan.getInputs()) {
if (!input.needsLock()) {
continue;
}
LOG.debug("Adding " + input.getName() + " to list of lock inputs");
if (input.getType() == ReadEntity.Type.DATABASE) {
lockObjects.addAll(getLockObjects(plan, input.getDatabase(), null,
null, HiveLockMode.SHARED));
} else if (input.getType() == ReadEntity.Type.TABLE) {
lockObjects.addAll(getLockObjects(plan, null, input.getTable(), null,
HiveLockMode.SHARED));
} else {
lockObjects.addAll(getLockObjects(plan, null, null,
input.getPartition(),
HiveLockMode.SHARED));
}
}
for (WriteEntity output : plan.getOutputs()) {
HiveLockMode lockMode = getWriteEntityLockMode(output);
if (lockMode == null) {
continue;
}
LOG.debug("Adding " + output.getName() + " to list of lock outputs");
List<HiveLockObj> lockObj = null;
if (output.getType() == WriteEntity.Type.DATABASE) {
lockObjects.addAll(getLockObjects(plan, output.getDatabase(), null, null, lockMode));
} else if (output.getTyp() == WriteEntity.Type.TABLE) {
lockObj = getLockObjects(plan, null, output.getTable(), null,lockMode);
} else if (output.getTyp() == WriteEntity.Type.PARTITION) {
lockObj = getLockObjects(plan, null, null, output.getPartition(), lockMode);
}
// In case of dynamic queries, it is possible to have incomplete dummy partitions
else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
lockObj = getLockObjects(plan, null, null, output.getPartition(),
HiveLockMode.SHARED);
}
if(lockObj != null) {
lockObjects.addAll(lockObj);
ctx.getOutputLockObjects().put(output, lockObj);
}
}
if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
return;
}
dedupLockObjects(lockObjects);
List<HiveLock> hiveLocks = lockMgr.lock(lockObjects, false, driverState);
if (hiveLocks == null) {
throw new LockException(ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
} else {
ctx.setHiveLocks(hiveLocks);
}
}
@Override
public void releaseLocks(List<HiveLock> hiveLocks) throws LockException {
// If there's no lock manager, it essentially means we didn't acquire locks in the first place,
// thus no need to release locks
if (lockMgr != null) {
lockMgr.releaseLocks(hiveLocks);
}
}
@Override
public void commitTxn() throws LockException {
// No-op
}
@Override
public void replCommitTxn(CommitTxnRequest rqst) throws LockException {
// No-op
}
@Override
public void rollbackTxn() throws LockException {
// No-op
}
@Override
public void replRollbackTxn(String replPolicy, long srcTxnId) throws LockException {
// No-op
}
@Override
public void replTableWriteIdState(String validWriteIdList, String dbName, String tableName, List<String> partNames)
throws LockException {
// No-op
}
@Override
public void heartbeat() throws LockException {
// No-op
}
@Override
public GetOpenTxnsResponse getOpenTxns() throws LockException {
return new GetOpenTxnsResponse();
}
@Override
public ValidTxnList getValidTxns() throws LockException {
return new ValidReadTxnList();
}
@Override
public ValidTxnList getValidTxns(List<TxnType> excludeTxnTypes) throws LockException {
return new ValidReadTxnList();
}
@Override
public ValidTxnWriteIdList getValidWriteIds(List<String> tableList,
String validTxnList) throws LockException {
return new ValidTxnWriteIdList(getCurrentTxnId());
}
@Override
public String getTxnManagerName() {
return DummyTxnManager.class.getName();
}
@Override
public boolean supportsExplicitLock() {
return true;
}
@Override
public boolean useNewShowLocksFormat() {
return false;
}
@Override
public boolean supportsAcid() {
return false;
}
@Override
protected void destruct() {
if (lockMgr != null) {
try {
lockMgr.close();
} catch (LockException e) {
// Not much I can do about it.
LOG.warn("Got exception when closing lock manager " + e.getMessage());
}
}
}
/**
* Dedup the list of lock objects so that there is only one lock per table/partition.
* If there is both a shared and exclusive lock for the same object, this will deduped
* to just a single exclusive lock. Package level so that the unit tests
* can access it. Not intended for use outside this class.
* @param lockObjects
*/
static void dedupLockObjects(List<HiveLockObj> lockObjects) {
Map<String, HiveLockObj> lockMap = new HashMap<String, HiveLockObj>();
for (HiveLockObj lockObj : lockObjects) {
String lockName = lockObj.getName();
HiveLockObj foundLock = lockMap.get(lockName);
if (foundLock == null || lockObj.getMode() == HiveLockMode.EXCLUSIVE) {
lockMap.put(lockName, lockObj);
}
}
// copy set of deduped locks back to original list
lockObjects.clear();
for (HiveLockObj lockObj : lockMap.values()) {
lockObjects.add(lockObj);
}
}
private HiveLockMode getWriteEntityLockMode (WriteEntity we) {
HiveLockMode lockMode = we.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED;
//but the writeEntity is complete in DDL operations, instead DDL sets the writeType, so
//we use it to determine its lockMode, and first we check if the writeType was set
WriteEntity.WriteType writeType = we.getWriteType();
if (writeType == null) {
return lockMode;
}
switch (writeType) {
case DDL_EXCLUSIVE:
return HiveLockMode.EXCLUSIVE;
case DDL_SHARED:
return HiveLockMode.SHARED;
case DDL_NO_LOCK:
return null;
default: //other writeTypes related to DMLs
return lockMode;
}
}
private List<HiveLockObj> getLockObjects(QueryPlan plan, Database db,
Table t, Partition p,
HiveLockMode mode)
throws LockException {
List<HiveLockObj> locks = new LinkedList<HiveLockObj>();
HiveLockObject.HiveLockObjectData lockData =
new HiveLockObject.HiveLockObjectData(plan.getQueryId(),
String.valueOf(System.currentTimeMillis()),
"IMPLICIT",
plan.getQueryStr(),
conf);
if (db != null) {
locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData),
mode));
return locks;
}
if (t != null) {
locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
mode = HiveLockMode.SHARED;
locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode));
return locks;
}
if (p != null) {
if (!(p instanceof DummyPartition)) {
locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
}
// All the parents are locked in shared mode
mode = HiveLockMode.SHARED;
// For dummy partitions, only partition name is needed
String name = p.getName();
if (p instanceof DummyPartition) {
name = p.getName().split("@")[2];
}
String partialName = "";
String[] partns = name.split("/");
int len = p instanceof DummyPartition ? partns.length : partns.length - 1;
Map<String, String> partialSpec = new LinkedHashMap<String, String>();
for (int idx = 0; idx < len; idx++) {
String partn = partns[idx];
partialName += partn;
String[] nameValue = partn.split("=");
assert(nameValue.length == 2);
partialSpec.put(nameValue[0], nameValue[1]);
try {
locks.add(new HiveLockObj(
new HiveLockObject(new DummyPartition(p.getTable(), p.getTable().getDbName()
+ "/" + FileUtils.escapePathName(p.getTable().getTableName()).toLowerCase()
+ "/" + partialName,
partialSpec), lockData), mode));
partialName += "/";
} catch (HiveException e) {
throw new LockException(e.getMessage());
}
}
locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode));
locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
}
return locks;
}
@Override
public String getQueryid() {
return null;
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.datapipeline.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Contains the parameters for ReportTaskRunnerHeartbeat.
* </p>
*/
public class ReportTaskRunnerHeartbeatRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The ID of the task runner. This value should be unique across your AWS
* account. In the case of AWS Data Pipeline Task Runner launched on a
* resource managed by AWS Data Pipeline, the web service provides a unique
* identifier when it launches the application. If you have written a custom
* task runner, you should assign a unique identifier for the task runner.
* </p>
*/
private String taskrunnerId;
/**
* <p>
* The type of task the task runner is configured to accept and process. The
* worker group is set as a field on objects in the pipeline when they are
* created. You can only specify a single value for <code>workerGroup</code>
* . There are no wildcard values permitted in <code>workerGroup</code>; the
* string must be an exact, case-sensitive, match.
* </p>
*/
private String workerGroup;
/**
* <p>
* The public DNS name of the task runner.
* </p>
*/
private String hostname;
/**
* <p>
* The ID of the task runner. This value should be unique across your AWS
* account. In the case of AWS Data Pipeline Task Runner launched on a
* resource managed by AWS Data Pipeline, the web service provides a unique
* identifier when it launches the application. If you have written a custom
* task runner, you should assign a unique identifier for the task runner.
* </p>
*
* @param taskrunnerId
* The ID of the task runner. This value should be unique across your
* AWS account. In the case of AWS Data Pipeline Task Runner launched
* on a resource managed by AWS Data Pipeline, the web service
* provides a unique identifier when it launches the application. If
* you have written a custom task runner, you should assign a unique
* identifier for the task runner.
*/
public void setTaskrunnerId(String taskrunnerId) {
this.taskrunnerId = taskrunnerId;
}
/**
* <p>
* The ID of the task runner. This value should be unique across your AWS
* account. In the case of AWS Data Pipeline Task Runner launched on a
* resource managed by AWS Data Pipeline, the web service provides a unique
* identifier when it launches the application. If you have written a custom
* task runner, you should assign a unique identifier for the task runner.
* </p>
*
* @return The ID of the task runner. This value should be unique across
* your AWS account. In the case of AWS Data Pipeline Task Runner
* launched on a resource managed by AWS Data Pipeline, the web
* service provides a unique identifier when it launches the
* application. If you have written a custom task runner, you should
* assign a unique identifier for the task runner.
*/
public String getTaskrunnerId() {
return this.taskrunnerId;
}
/**
* <p>
* The ID of the task runner. This value should be unique across your AWS
* account. In the case of AWS Data Pipeline Task Runner launched on a
* resource managed by AWS Data Pipeline, the web service provides a unique
* identifier when it launches the application. If you have written a custom
* task runner, you should assign a unique identifier for the task runner.
* </p>
*
* @param taskrunnerId
* The ID of the task runner. This value should be unique across your
* AWS account. In the case of AWS Data Pipeline Task Runner launched
* on a resource managed by AWS Data Pipeline, the web service
* provides a unique identifier when it launches the application. If
* you have written a custom task runner, you should assign a unique
* identifier for the task runner.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ReportTaskRunnerHeartbeatRequest withTaskrunnerId(String taskrunnerId) {
setTaskrunnerId(taskrunnerId);
return this;
}
/**
* <p>
* The type of task the task runner is configured to accept and process. The
* worker group is set as a field on objects in the pipeline when they are
* created. You can only specify a single value for <code>workerGroup</code>
* . There are no wildcard values permitted in <code>workerGroup</code>; the
* string must be an exact, case-sensitive, match.
* </p>
*
* @param workerGroup
* The type of task the task runner is configured to accept and
* process. The worker group is set as a field on objects in the
* pipeline when they are created. You can only specify a single
* value for <code>workerGroup</code>. There are no wildcard values
* permitted in <code>workerGroup</code>; the string must be an
* exact, case-sensitive, match.
*/
public void setWorkerGroup(String workerGroup) {
this.workerGroup = workerGroup;
}
/**
* <p>
* The type of task the task runner is configured to accept and process. The
* worker group is set as a field on objects in the pipeline when they are
* created. You can only specify a single value for <code>workerGroup</code>
* . There are no wildcard values permitted in <code>workerGroup</code>; the
* string must be an exact, case-sensitive, match.
* </p>
*
* @return The type of task the task runner is configured to accept and
* process. The worker group is set as a field on objects in the
* pipeline when they are created. You can only specify a single
* value for <code>workerGroup</code>. There are no wildcard values
* permitted in <code>workerGroup</code>; the string must be an
* exact, case-sensitive, match.
*/
public String getWorkerGroup() {
return this.workerGroup;
}
/**
* <p>
* The type of task the task runner is configured to accept and process. The
* worker group is set as a field on objects in the pipeline when they are
* created. You can only specify a single value for <code>workerGroup</code>
* . There are no wildcard values permitted in <code>workerGroup</code>; the
* string must be an exact, case-sensitive, match.
* </p>
*
* @param workerGroup
* The type of task the task runner is configured to accept and
* process. The worker group is set as a field on objects in the
* pipeline when they are created. You can only specify a single
* value for <code>workerGroup</code>. There are no wildcard values
* permitted in <code>workerGroup</code>; the string must be an
* exact, case-sensitive, match.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ReportTaskRunnerHeartbeatRequest withWorkerGroup(String workerGroup) {
setWorkerGroup(workerGroup);
return this;
}
/**
* <p>
* The public DNS name of the task runner.
* </p>
*
* @param hostname
* The public DNS name of the task runner.
*/
public void setHostname(String hostname) {
this.hostname = hostname;
}
/**
* <p>
* The public DNS name of the task runner.
* </p>
*
* @return The public DNS name of the task runner.
*/
public String getHostname() {
return this.hostname;
}
/**
* <p>
* The public DNS name of the task runner.
* </p>
*
* @param hostname
* The public DNS name of the task runner.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ReportTaskRunnerHeartbeatRequest withHostname(String hostname) {
setHostname(hostname);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTaskrunnerId() != null)
sb.append("TaskrunnerId: " + getTaskrunnerId() + ",");
if (getWorkerGroup() != null)
sb.append("WorkerGroup: " + getWorkerGroup() + ",");
if (getHostname() != null)
sb.append("Hostname: " + getHostname());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ReportTaskRunnerHeartbeatRequest == false)
return false;
ReportTaskRunnerHeartbeatRequest other = (ReportTaskRunnerHeartbeatRequest) obj;
if (other.getTaskrunnerId() == null ^ this.getTaskrunnerId() == null)
return false;
if (other.getTaskrunnerId() != null
&& other.getTaskrunnerId().equals(this.getTaskrunnerId()) == false)
return false;
if (other.getWorkerGroup() == null ^ this.getWorkerGroup() == null)
return false;
if (other.getWorkerGroup() != null
&& other.getWorkerGroup().equals(this.getWorkerGroup()) == false)
return false;
if (other.getHostname() == null ^ this.getHostname() == null)
return false;
if (other.getHostname() != null
&& other.getHostname().equals(this.getHostname()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getTaskrunnerId() == null) ? 0 : getTaskrunnerId()
.hashCode());
hashCode = prime
* hashCode
+ ((getWorkerGroup() == null) ? 0 : getWorkerGroup().hashCode());
hashCode = prime * hashCode
+ ((getHostname() == null) ? 0 : getHostname().hashCode());
return hashCode;
}
@Override
public ReportTaskRunnerHeartbeatRequest clone() {
return (ReportTaskRunnerHeartbeatRequest) super.clone();
}
}
| |
/*
* Copyright 2006-2021 Prowide
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.prowidesoftware.swift.model.field;
import com.prowidesoftware.swift.model.Tag;
import com.prowidesoftware.Generated;
import com.prowidesoftware.deprecation.ProwideDeprecated;
import com.prowidesoftware.deprecation.TargetYear;
import java.io.Serializable;
import java.util.Locale;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Currency;
import com.prowidesoftware.swift.model.field.CurrencyContainer;
import com.prowidesoftware.swift.model.field.CurrencyResolver;
import org.apache.commons.lang3.StringUtils;
import com.prowidesoftware.swift.model.field.SwiftParseUtils;
import com.prowidesoftware.swift.model.field.Field;
import com.prowidesoftware.swift.model.*;
import com.prowidesoftware.swift.utils.SwiftFormatUtils;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* SWIFT MT Field 32E.
* <p>
* Model and parser for field 32E of a SWIFT MT message.
*
* <p>Subfields (components) Data types
* <ol>
* <li><code>Currency</code></li>
* </ol>
*
* <p>Structure definition
* <ul>
* <li>validation pattern: <code><CUR></code></li>
* <li>parser pattern: <code>S</code></li>
* <li>components pattern: <code>C</code></li>
* </ul>
*
* <p>
* This class complies with standard release <strong>SRU2021</strong>
*/
@SuppressWarnings("unused")
@Generated
public class Field32E extends Field implements Serializable, CurrencyContainer {
/**
* Constant identifying the SRU to which this class belongs to.
*/
public static final int SRU = 2021;
private static final long serialVersionUID = 1L;
/**
* Constant with the field name 32E.
*/
public static final String NAME = "32E";
/**
* Same as NAME, intended to be clear when using static imports.
*/
public static final String F_32E = "32E";
/**
* @deprecated use {@link #parserPattern()} method instead.
*/
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public static final String PARSER_PATTERN = "S";
/**
* @deprecated use {@link #typesPattern()} method instead.
*/
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public static final String COMPONENTS_PATTERN = "C";
/**
* @deprecated use {@link #typesPattern()} method instead.
*/
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public static final String TYPES_PATTERN = "C";
/**
* Component number for the Currency subfield.
*/
public static final Integer CURRENCY = 1;
/**
* Default constructor. Creates a new field setting all components to null.
*/
public Field32E() {
super(1);
}
/**
* Creates a new field and initializes its components with content from the parameter value.
* @param value complete field value including separators and CRLF
*/
public Field32E(final String value) {
super(value);
}
/**
* Creates a new field and initializes its components with content from the parameter tag.
* The value is parsed with {@link #parse(String)}
* @throws IllegalArgumentException if the parameter tag is null or its tagname does not match the field name
* @since 7.8
*/
public Field32E(final Tag tag) {
this();
if (tag == null) {
throw new IllegalArgumentException("tag cannot be null.");
}
if (!StringUtils.equals(tag.getName(), "32E")) {
throw new IllegalArgumentException("cannot create field 32E from tag "+tag.getName()+", tagname must match the name of the field.");
}
parse(tag.getValue());
}
/**
* Copy constructor.
* Initializes the components list with a deep copy of the source components list.
* @param source a field instance to copy
* @since 7.7
*/
public static Field32E newInstance(Field32E source) {
Field32E cp = new Field32E();
cp.setComponents(new ArrayList<>(source.getComponents()));
return cp;
}
/**
* Create a Tag with this field name and the given value.
* Shorthand for <code>new Tag(NAME, value)</code>
* @see #NAME
* @since 7.5
*/
public static Tag tag(final String value) {
return new Tag(NAME, value);
}
/**
* Create a Tag with this field name and an empty string as value.
* Shorthand for <code>new Tag(NAME, "")</code>
* @see #NAME
* @since 7.5
*/
public static Tag emptyTag() {
return new Tag(NAME, "");
}
/**
* Parses the parameter value into the internal components structure.
*
* <p>Used to update all components from a full new value, as an alternative
* to setting individual components. Previous component values are overwritten.
*
* @param value complete field value including separators and CRLF
* @since 7.8
*/
@Override
public void parse(final String value) {
init(1);
setComponent1(value);
}
/**
* Serializes the fields' components into the single string value (SWIFT format)
*/
@Override
public String getValue() {
final StringBuilder result = new StringBuilder();
append(result, 1);
return result.toString();
}
/**
* Returns a localized suitable for showing to humans string of a field component.<br>
*
* @param component number of the component to display
* @param locale optional locale to format date and amounts, if null, the default locale is used
* @return formatted component value or null if component number is invalid or not present
* @throws IllegalArgumentException if component number is invalid for the field
* @since 7.8
*/
@Override
public String getValueDisplay(int component, Locale locale) {
if (component < 1 || component > 1) {
throw new IllegalArgumentException("invalid component number " + component + " for field 32E");
}
if (component == 1) {
//default format (as is)
return getComponent(1);
}
return null;
}
/**
* @deprecated use {@link #typesPattern()} instead.
*/
@Override
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public String componentsPattern() {
return "C";
}
/**
* Returns the field component types pattern.
*
* This method returns a letter representing the type for each component in the Field. It supersedes
* the Components Pattern because it distinguishes between N (Number) and I (BigDecimal).
* @since 9.2.7
*/
@Override
public String typesPattern() {
return "C";
}
/**
* Returns the field parser pattern.
*/
@Override
public String parserPattern() {
return "S";
}
/**
* Returns the field validator pattern
*/
@Override
public String validatorPattern() {
return "<CUR>";
}
/**
* Given a component number it returns true if the component is optional,
* regardless of the field being mandatory in a particular message.<br>
* Being the field's value conformed by a composition of one or several
* internal component values, the field may be present in a message with
* a proper value but with some of its internal components not set.
*
* @param component component number, first component of a field is referenced as 1
* @return true if the component is optional for this field, false otherwise
*/
@Override
public boolean isOptional(int component) {
return false;
}
/**
* Returns true if the field is a GENERIC FIELD as specified by the standard.
* @return true if the field is generic, false otherwise
*/
@Override
public boolean isGeneric() {
return false;
}
/**
* Returns the defined amount of components.<br>
* This is not the amount of components present in the field instance, but the total amount of components
* that this field accepts as defined.
* @since 7.7
*/
@Override
public int componentsSize() {
return 1;
}
/**
* Returns english label for components.
* <br>
* The index in the list is in sync with specific field component structure.
* @see #getComponentLabel(int)
* @since 7.8.4
*/
@Override
public List<String> getComponentLabels() {
List<String> result = new ArrayList<>();
result.add("Currency");
return result;
}
/**
* Returns a mapping between component numbers and their label in camel case format.
* @since 7.10.3
*/
@Override
protected Map<Integer, String> getComponentMap() {
Map<Integer, String> result = new HashMap<>();
result.put(1, "currency");
return result;
}
/**
* Gets the component 1 (Currency).
* @return the component 1
*/
public String getComponent1() {
return getComponent(1);
}
/**
* Get the component 1 as Currency
*
* @return the component 1 converted to Currency or null if cannot be converted
*/
public java.util.Currency getComponent1AsCurrency() {
return SwiftFormatUtils.getCurrency(getComponent(1));
}
/**
* Gets the Currency (component 1).
* @return the Currency from component 1
*/
public String getCurrency() {
return getComponent1();
}
/**
* Get the Currency (component 1) as Currency
* @return the Currency from component 1 converted to Currency or null if cannot be converted
*/
public java.util.Currency getCurrencyAsCurrency() {
return getComponent1AsCurrency();
}
/**
* Set the component 1 (Currency).
*
* @param component1 the Currency to set
* @return the field object to enable build pattern
*/
public Field32E setComponent1(String component1) {
setComponent(1, component1);
return this;
}
/**
* Set the component1 from a Currency object.
*
* @param component1 the Currency with the Currency content to set
* @return the field object to enable build pattern
*/
public Field32E setComponent1(java.util.Currency component1) {
setComponent(1, SwiftFormatUtils.getCurrency(component1));
return this;
}
/**
* Set the Currency (component 1).
*
* @param component1 the Currency to set
* @return the field object to enable build pattern
*/
public Field32E setCurrency(String component1) {
return setComponent1(component1);
}
/**
* Set the Currency (component 1) from a Currency object.
*
* @see #setComponent1(java.util.Currency)
*
* @param component1 Currency with the Currency content to set
* @return the field object to enable build pattern
*/
public Field32E setCurrency(java.util.Currency component1) {
return setComponent1(component1);
}
public List<String> currencyStrings() {
return CurrencyResolver.currencyStrings(this);
}
public List<Currency> currencies() {
return CurrencyResolver.currencies(this);
}
public Currency currency() {
return CurrencyResolver.resolveCurrency(this);
}
public String currencyString() {
return CurrencyResolver.resolveCurrencyString(this);
}
public void initializeCurrencies(String cur) {
CurrencyResolver.resolveSetCurrency(this, cur);
}
public void initializeCurrencies(Currency cur) {
CurrencyResolver.resolveSetCurrency(this, cur);
}
/**
* Returns the field's name composed by the field number and the letter option (if any).
* @return the static value of Field32E.NAME
*/
@Override
public String getName() {
return NAME;
}
/**
* Gets the first occurrence form the tag list or null if not found.
* @return null if not found o block is null or empty
* @param block may be null or empty
*/
public static Field32E get(final SwiftTagListBlock block) {
if (block == null || block.isEmpty()) {
return null;
}
final Tag t = block.getTagByName(NAME);
if (t == null) {
return null;
}
return new Field32E(t);
}
/**
* Gets the first instance of Field32E in the given message.
* @param msg may be empty or null
* @return null if not found or msg is empty or null
* @see #get(SwiftTagListBlock)
*/
public static Field32E get(final SwiftMessage msg) {
if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) {
return null;
}
return get(msg.getBlock4());
}
/**
* Gets a list of all occurrences of the field Field32E in the given message
* an empty list is returned if none found.
* @param msg may be empty or null in which case an empty list is returned
* @see #getAll(SwiftTagListBlock)
*/
public static List<Field32E> getAll(final SwiftMessage msg) {
if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) {
return java.util.Collections.emptyList();
}
return getAll(msg.getBlock4());
}
/**
* Gets a list of all occurrences of the field Field32E from the given block
* an empty list is returned if none found.
*
* @param block may be empty or null in which case an empty list is returned
*/
public static List<Field32E> getAll(final SwiftTagListBlock block) {
final List<Field32E> result = new ArrayList<>();
if (block == null || block.isEmpty()) {
return result;
}
final Tag[] arr = block.getTagsByName(NAME);
if (arr != null && arr.length > 0) {
for (final Tag f : arr) {
result.add(new Field32E(f));
}
}
return result;
}
/**
* This method deserializes the JSON data into a Field32E object.
* @param json JSON structure including tuples with label and value for all field components
* @return a new field instance with the JSON data parsed into field components or an empty field id the JSON is invalid
* @since 7.10.3
* @see Field#fromJson(String)
*/
public static Field32E fromJson(final String json) {
final Field32E field = new Field32E();
final JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();
// **** COMPONENT 1 - Currency
if (jsonObject.get("currency") != null) {
field.setComponent1(jsonObject.get("currency").getAsString());
}
return field;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.device.mgt.jaxrs.service.api;
import io.swagger.annotations.SwaggerDefinition;
import io.swagger.annotations.Info;
import io.swagger.annotations.ExtensionProperty;
import io.swagger.annotations.Extension;
import io.swagger.annotations.Tag;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.ResponseHeader;
import org.wso2.carbon.apimgt.annotations.api.Scope;
import org.wso2.carbon.apimgt.annotations.api.Scopes;
import org.wso2.carbon.device.mgt.common.notification.mgt.Notification;
import org.wso2.carbon.device.mgt.jaxrs.NotificationList;
import org.wso2.carbon.device.mgt.jaxrs.beans.ErrorResponse;
import org.wso2.carbon.device.mgt.jaxrs.util.Constants;
import javax.validation.constraints.Max;
import javax.validation.constraints.Size;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
* Notifications related REST-API.
*/
@SwaggerDefinition(
info = @Info(
version = "0.9.0",
title = "",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = "name", value = "DeviceNotificationManagement"),
@ExtensionProperty(name = "context", value = "/api/device-mgt/v0.9/notifications"),
})
}
),
tags = {
@Tag(name = "device_management", description = "")
}
)
@Scopes(
scopes = {
@Scope(
name = "Getting All Device Notification Details",
description = "Getting All Device Notification Details",
key = "perm:notifications:view",
permissions = {"/device-mgt/notifications/view"}
),
@Scope(
name = "Updating the Device Notification Status",
description = "Updating the Device Notification Status",
key = "perm:notifications:mark-checked",
permissions = {"/device-mgt/notifications/view"}
)
}
)
@Api(value = "Device Notification Management", description = "Device notification related operations can be found here.")
@Path("/notifications")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public interface NotificationManagementService {
@GET
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting All Device Notification Details",
notes = "Get the details of all the notifications that were pushed to the devices registered with WSO2 EMM using this REST API.",
tags = "Device Notification Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:notifications:view")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully fetched the list of notifications.",
response = NotificationList.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. \n Empty body because the client already has the latest version " +
"of the requested resource."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid notification status type received. \n" +
"Valid status types are NEW | CHECKED",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n There are no notification.",
response = ErrorResponse.class),
@ApiResponse(
code = 406,
message = "Not Acceptable.\n The requested media type is not supported"),
@ApiResponse(
code = 500,
message = "Internal Server Error. " +
"\n Server error occurred while fetching the notification list.",
response = ErrorResponse.class)
})
Response getNotifications(
@ApiParam(
name = "status",
value = "The status of the notification. Provide any of the following values: \n" +
" - NEW: Will keep the message in the unread state.\n" +
" - CHECKED: Will keep the message in the read state.",
allowableValues = "NEW, CHECKED",
required = false)
@QueryParam("status") @Size(max = 45)
String status,
@ApiParam(
name = "If-Modified-Since",
value = "Checks if the requested variant was modified, since the specified date-time. \n" +
"Provide the value in the following format: EEE, d MMM yyyy HH:mm:ss Z.\n" +
"Example: Mon, 05 Jan 2014 15:10:00 +0200",
required = false)
@HeaderParam("If-Modified-Since")
String ifModifiedSince,
@ApiParam(
name = "offset",
value = "The starting pagination index for the complete list of qualified items.",
required = false,
defaultValue = "0")
@QueryParam("offset")
int offset,
@ApiParam(
name = "limit",
value = "Provide how many notification details you require from the starting pagination index/offset.",
required = false,
defaultValue = "5")
@QueryParam("limit")
int limit);
@PUT
@Path("/{id}/mark-checked")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "PUT",
value = "Updating the Device Notification Status",
notes = "When a user has read the the device notification the device notification status must "
+ "change from NEW to CHECKED. This API is used to update device notification status.",
tags = "Device Notification Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:notifications:mark-checked")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK",
response = Notification.class),
@ApiResponse(
code = 200,
message = "Notification updated successfully. But the retrial of the updated "
+ "notification failed.",
response = Notification.class),
@ApiResponse(
code = 500,
message = "Error occurred while updating notification status.")
}
)
Response updateNotificationStatus(
@ApiParam(
name = "id",
value = "The notification ID.",
required = true,
defaultValue = "1")
@PathParam("id") @Max(45)
int id);
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.appservice.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
/** Triggered Web Job Run Information. */
@Fluent
public final class TriggeredJobRun {
@JsonIgnore private final ClientLogger logger = new ClientLogger(TriggeredJobRun.class);
/*
* Job ID.
*/
@JsonProperty(value = "web_job_id")
private String webJobId;
/*
* Job name.
*/
@JsonProperty(value = "web_job_name")
private String webJobName;
/*
* Job status.
*/
@JsonProperty(value = "status")
private TriggeredWebJobStatus status;
/*
* Start time.
*/
@JsonProperty(value = "start_time")
private OffsetDateTime startTime;
/*
* End time.
*/
@JsonProperty(value = "end_time")
private OffsetDateTime endTime;
/*
* Job duration.
*/
@JsonProperty(value = "duration")
private String duration;
/*
* Output URL.
*/
@JsonProperty(value = "output_url")
private String outputUrl;
/*
* Error URL.
*/
@JsonProperty(value = "error_url")
private String errorUrl;
/*
* Job URL.
*/
@JsonProperty(value = "url")
private String url;
/*
* Job name.
*/
@JsonProperty(value = "job_name")
private String jobName;
/*
* Job trigger.
*/
@JsonProperty(value = "trigger")
private String trigger;
/**
* Get the webJobId property: Job ID.
*
* @return the webJobId value.
*/
public String webJobId() {
return this.webJobId;
}
/**
* Set the webJobId property: Job ID.
*
* @param webJobId the webJobId value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withWebJobId(String webJobId) {
this.webJobId = webJobId;
return this;
}
/**
* Get the webJobName property: Job name.
*
* @return the webJobName value.
*/
public String webJobName() {
return this.webJobName;
}
/**
* Set the webJobName property: Job name.
*
* @param webJobName the webJobName value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withWebJobName(String webJobName) {
this.webJobName = webJobName;
return this;
}
/**
* Get the status property: Job status.
*
* @return the status value.
*/
public TriggeredWebJobStatus status() {
return this.status;
}
/**
* Set the status property: Job status.
*
* @param status the status value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withStatus(TriggeredWebJobStatus status) {
this.status = status;
return this;
}
/**
* Get the startTime property: Start time.
*
* @return the startTime value.
*/
public OffsetDateTime startTime() {
return this.startTime;
}
/**
* Set the startTime property: Start time.
*
* @param startTime the startTime value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withStartTime(OffsetDateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Get the endTime property: End time.
*
* @return the endTime value.
*/
public OffsetDateTime endTime() {
return this.endTime;
}
/**
* Set the endTime property: End time.
*
* @param endTime the endTime value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withEndTime(OffsetDateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Get the duration property: Job duration.
*
* @return the duration value.
*/
public String duration() {
return this.duration;
}
/**
* Set the duration property: Job duration.
*
* @param duration the duration value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withDuration(String duration) {
this.duration = duration;
return this;
}
/**
* Get the outputUrl property: Output URL.
*
* @return the outputUrl value.
*/
public String outputUrl() {
return this.outputUrl;
}
/**
* Set the outputUrl property: Output URL.
*
* @param outputUrl the outputUrl value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withOutputUrl(String outputUrl) {
this.outputUrl = outputUrl;
return this;
}
/**
* Get the errorUrl property: Error URL.
*
* @return the errorUrl value.
*/
public String errorUrl() {
return this.errorUrl;
}
/**
* Set the errorUrl property: Error URL.
*
* @param errorUrl the errorUrl value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withErrorUrl(String errorUrl) {
this.errorUrl = errorUrl;
return this;
}
/**
* Get the url property: Job URL.
*
* @return the url value.
*/
public String url() {
return this.url;
}
/**
* Set the url property: Job URL.
*
* @param url the url value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withUrl(String url) {
this.url = url;
return this;
}
/**
* Get the jobName property: Job name.
*
* @return the jobName value.
*/
public String jobName() {
return this.jobName;
}
/**
* Set the jobName property: Job name.
*
* @param jobName the jobName value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withJobName(String jobName) {
this.jobName = jobName;
return this;
}
/**
* Get the trigger property: Job trigger.
*
* @return the trigger value.
*/
public String trigger() {
return this.trigger;
}
/**
* Set the trigger property: Job trigger.
*
* @param trigger the trigger value to set.
* @return the TriggeredJobRun object itself.
*/
public TriggeredJobRun withTrigger(String trigger) {
this.trigger = trigger;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
}
}
| |
package com.darkhouse.gdefence.Screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.InputListener;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.TextButton;
import com.badlogic.gdx.scenes.scene2d.utils.DragAndDrop;
import com.darkhouse.gdefence.GDefence;
import com.darkhouse.gdefence.InventorySystem.inventory.DoubleClickListener;
import com.darkhouse.gdefence.InventorySystem.inventory.Inventory;
import com.darkhouse.gdefence.InventorySystem.inventory.InventoryActor;
import com.darkhouse.gdefence.InventorySystem.inventory.OverallInventory;
import com.darkhouse.gdefence.Model.PreparationSpellInventoryActor;
import com.darkhouse.gdefence.Model.PreparationTowerInventoryActor;
import com.darkhouse.gdefence.User;
public class LevelPreparationScreen extends AbstractCampainScreen{
// private int level;
// private InventoryActor inventoryActor;
private OverallInventory inventoryActor;
private PreparationTowerInventoryActor preparationTowerInventoryActor;
private PreparationSpellInventoryActor preparationSpellInventoryActor;
private Inventory saveInventory[];
private TextButton startButton;
public LevelPreparationScreen() {
super("prepare_to_level");
}
public void setLevel(int level){
setName("prepare_to_level", String.valueOf(level));
// this.level = level;
load(level);
}
@Override
public void show() {
super.show();
//saveInventory = User.getTowerInventory().copy();
//saveInventory.copy(User.getTowerInventory());
// saveInventory = new Inventory[3];
// saveInventory[0] = new Inventory(User.getTowerInventory());
// saveInventory[1] = new Inventory(User.getSpellInventory());
// saveInventory[2] = new Inventory(User.getDetailInventory());
// load(level);
//System.out.println(saveInventory.getSlots());
}
public void flush(){
preparationSpellInventoryActor.flush();
preparationTowerInventoryActor.flush();
// saveInventory = null;
// inventoryActor.flush();
}
public void init(){
saveInventory = new Inventory[3];
saveInventory[0] = new Inventory(User.getTowerInventory());
saveInventory[1] = new Inventory(User.getSpellInventory());
saveInventory[2] = new Inventory(User.getDetailInventory());
inventoryActor = new OverallInventory(saveInventory);
stage.addActor(inventoryActor);
inventoryActor.init();
preparationTowerInventoryActor = new PreparationTowerInventoryActor(new DragAndDrop(),
GDefence.getInstance().assetLoader.getSkin());
inventoryActor.getActor(0).addFastMoving(preparationTowerInventoryActor.getInventory());
preparationTowerInventoryActor.setPosition(700, 250);
inventoryActor.addTarget(preparationTowerInventoryActor);
inventoryActor.addSlotAsTarget(preparationTowerInventoryActor.getDragAndDrop());
// inventoryActor.addSlots(preparationTowerInventoryActor);
// preparationTowerInventoryActor.addSlots(inventoryActor);
stage.addActor(preparationTowerInventoryActor);
preparationTowerInventoryActor.init();
inventoryActor.setPosition(100, 50);
preparationSpellInventoryActor = new PreparationSpellInventoryActor(new DragAndDrop(),
GDefence.getInstance().assetLoader.getSkin());
inventoryActor.getActor(1).addFastMoving(preparationSpellInventoryActor.getInventory());
preparationSpellInventoryActor.setPosition(700, 100);
inventoryActor.addTarget(preparationSpellInventoryActor);
inventoryActor.addSlotAsTarget(preparationSpellInventoryActor.getDragAndDrop());
// preparationSpellInventoryActor.addSlots(inventoryActor);
stage.addActor(preparationSpellInventoryActor);
preparationSpellInventoryActor.init();
startButton = new TextButton(GDefence.getInstance().assetLoader.getWord("start"), GDefence.getInstance().assetLoader.getSkin());
startButton.setSize(150, 70);
startButton.setPosition(/*Gdx.graphics.getWidth() - 200*/1080, 30);
stage.addActor(startButton);
}
public void load(final int level){
// saveInventory = new Inventory[3];
// saveInventory[0] = new Inventory(User.getTowerInventory());
// saveInventory[1] = new Inventory(User.getSpellInventory());
// saveInventory[2] = new Inventory(User.getDetailInventory());
// inventoryActor = new InventoryActor(saveInventory, new DragAndDrop(),
// GDefence.getInstance().assetLoader.get("skins/uiskin.json", Skin.class));
// inventoryActor = new OverallInventory(saveInventory);
// stage.addActor(inventoryActor);
// inventoryActor.init();
// preparationTowerInventoryActor = new PreparationTowerInventoryActor(new DragAndDrop(),
// GDefence.getInstance().assetLoader.getSkin());
//
// inventoryActor.getActor(0).addFastMoving(preparationTowerInventoryActor.getInventory());
// preparationTowerInventoryActor.setPosition(700, 250);
// inventoryActor.addTarget(preparationTowerInventoryActor);
// inventoryActor.addSlotAsTarget(preparationTowerInventoryActor.getDragAndDrop());
//// inventoryActor.addSlots(preparationTowerInventoryActor);
//// preparationTowerInventoryActor.addSlots(inventoryActor);
// stage.addActor(preparationTowerInventoryActor);
// preparationTowerInventoryActor.init();
// inventoryActor.setPosition(100, 50);
//
// preparationSpellInventoryActor = new PreparationSpellInventoryActor(new DragAndDrop(),
// GDefence.getInstance().assetLoader.getSkin());
// inventoryActor.getActor(1).addFastMoving(preparationSpellInventoryActor.getInventory());
// preparationSpellInventoryActor.setPosition(700, 100);
// inventoryActor.addTarget(preparationSpellInventoryActor);
// inventoryActor.addSlotAsTarget(preparationSpellInventoryActor.getDragAndDrop());
//
//// preparationSpellInventoryActor.addSlots(inventoryActor);
// stage.addActor(preparationSpellInventoryActor);
// preparationSpellInventoryActor.init();
//
//
// TextButton startButton = new TextButton(GDefence.getInstance().assetLoader.getWord("start"), GDefence.getInstance().assetLoader.getSkin());
// startButton.setSize(150, 70);
// startButton.setPosition(/*Gdx.graphics.getWidth() - 200*/1080, 30);
// startButton.addListener(new InputListener(){
// public boolean touchDown (InputEvent event, float x, float y, int pointer, int button) {
// GDefence.getInstance().setScreen(new LevelLoadingScreen(level, preparationTowerInventoryActor.getInventory(),
// preparationSpellInventoryActor.getInventory()));
// return true;
// }
// });
// stage.addActor(startButton);
// saveInventory = new Inventory[3];
// saveInventory[0] = new Inventory(User.getTowerInventory());
// saveInventory[1] = new Inventory(User.getSpellInventory());
// saveInventory[2] = new Inventory(User.getDetailInventory());
saveInventory = new Inventory[3];
saveInventory[0] = new Inventory(User.getTowerInventory());
saveInventory[1] = new Inventory(User.getSpellInventory());
saveInventory[2] = new Inventory(User.getDetailInventory());
inventoryActor.getActor(0).update(saveInventory[0]);
inventoryActor.getActor(1).update(saveInventory[1]);
inventoryActor.getActor(2).update(saveInventory[2]);
inventoryActor.getActor(0).addFastMoving(preparationTowerInventoryActor.getInventory());
inventoryActor.getActor(1).addFastMoving(preparationSpellInventoryActor.getInventory());
inventoryActor.notifyListeners();
preparationTowerInventoryActor.notifyListeners();
preparationSpellInventoryActor.notifyListeners();
startButton.clearListeners();
startButton.addListener(new InputListener(){
public boolean touchDown (InputEvent event, float x, float y, int pointer, int button) {
Inventory s = new Inventory(preparationTowerInventoryActor.getInventory());
Inventory a = new Inventory(preparationSpellInventoryActor.getInventory());
GDefence.getInstance().setScreen(new LevelLoadingScreen(level, s, a));
return true;
}
});
}
@Override
public void hide() {
flush();
//User.setTowerInventory(saveInventory/*.copy()*/);
super.hide();
}
// @Override
// public void resize(int width, int height) {
//
// }
@Override
public void pause() {
}
@Override
public void resume() {
}
}
| |
/*
* Copyright (C) 2013 Evgeny Shishkin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.devspark.progressfragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AnimationUtils;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockFragment;
import pro.jariz.reisplanner.R;
import pro.jariz.reisplanner.api.NSTaskInvokable;
/**
* The implementation of the fragment to display content. Based on {@link android.support.v4.app.ListFragment}.
* If you are waiting for the initial data, you'll can displaying during this time an indeterminate progress indicator.
*
* @author Evgeny Shishkin
*/
public class ProgressSherlockFragment extends NSTaskInvokable {
private View mProgressContainer;
private View mContentContainer;
private View mContentView;
private View mEmptyView;
private boolean mContentShown;
private boolean mIsContentEmpty;
/**
* Provide default implementation to return a simple view. Subclasses
* can override to replace with their own layout. If doing so, the
* returned view hierarchy <em>must</em> have a progress container whose id
* is {@link R.id#progress_container R.id.progress_container}, content container whose id
* is {@link R.id#content_container R.id.content_container} and can optionally
* have a sibling view id {@link android.R.id#empty android.R.id.empty}
* that is to be shown when the content is empty.
* <p/>
* <p>If you are overriding this method with your own custom content,
* consider including the standard layout {@link R.layout#fragment_progress}
* in your layout file, so that you continue to retain all of the standard
* behavior of ProgressFragment. In particular, this is currently the only
* way to have the built-in indeterminant progress state be shown.
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_progress, container, false);
}
/**
* Attach to view once the view hierarchy has been created.
*/
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
ensureContent();
}
/**
* Detach from view.
*/
@Override
public void onDestroyView() {
mContentShown = false;
mIsContentEmpty = false;
mProgressContainer = mContentContainer = mContentView = mEmptyView = null;
super.onDestroyView();
}
/**
* Return content view or null if the content view has not been initialized.
*
* @return content view or null
* @see #setContentView(android.view.View)
* @see #setContentView(int)
*/
public View getContentView() {
return mContentView;
}
/**
* Set the content content from a layout resource.
*
* @param layoutResId Resource ID to be inflated.
* @see #setContentView(android.view.View)
* @see #getContentView()
*/
public void setContentView(int layoutResId) {
LayoutInflater layoutInflater = LayoutInflater.from(getActivity());
View contentView = layoutInflater.inflate(layoutResId, null);
setContentView(contentView);
}
/**
* Set the content view to an explicit view. If the content view was installed earlier,
* the content will be replaced with a new view.
*
* @param view The desired content to display. Value can't be null.
* @see #setContentView(int)
* @see #getContentView()
*/
public void setContentView(View view) {
ensureContent();
if (view == null) {
throw new IllegalArgumentException("Content view can't be null");
}
if (mContentContainer instanceof ViewGroup) {
ViewGroup contentContainer = (ViewGroup) mContentContainer;
if (mContentView == null) {
contentContainer.addView(view);
} else {
int index = contentContainer.indexOfChild(mContentView);
// replace content view
contentContainer.removeView(mContentView);
contentContainer.addView(view, index);
}
mContentView = view;
} else {
throw new IllegalStateException("Can't be used with a custom content view");
}
}
/**
* The default content for a ProgressFragment has a TextView that can be shown when
* the content is empty {@link #setContentEmpty(boolean)}.
* If you would like to have it shown, call this method to supply the text it should use.
*
* @param resId Identification of string from a resources
* @see #setEmptyText(CharSequence)
*/
public void setEmptyText(int resId) {
setEmptyText(getString(resId));
}
/**
* The default content for a ProgressFragment has a TextView that can be shown when
* the content is empty {@link #setContentEmpty(boolean)}.
* If you would like to have it shown, call this method to supply the text it should use.
*
* @param text Text for empty view
* @see #setEmptyText(int)
*/
public void setEmptyText(CharSequence text) {
ensureContent();
if (mEmptyView != null && mEmptyView instanceof TextView) {
((TextView) mEmptyView).setText(text);
} else {
throw new IllegalStateException("Can't be used with a custom content view");
}
}
/**
* Control whether the content is being displayed. You can make it not
* displayed if you are waiting for the initial data to show in it. During
* this time an indeterminant progress indicator will be shown instead.
*
* @param shown If true, the content view is shown; if false, the progress
* indicator. The initial value is true.
* @see #setContentShownNoAnimation(boolean)
*/
public void setContentShown(boolean shown) {
setContentShown(shown, true);
}
/**
* Like {@link #setContentShown(boolean)}, but no animation is used when
* transitioning from the previous state.
*
* @param shown If true, the content view is shown; if false, the progress
* indicator. The initial value is true.
* @see #setContentShown(boolean)
*/
public void setContentShownNoAnimation(boolean shown) {
setContentShown(shown, false);
}
/**
* Control whether the content is being displayed. You can make it not
* displayed if you are waiting for the initial data to show in it. During
* this time an indeterminant progress indicator will be shown instead.
*
* @param shown If true, the content view is shown; if false, the progress
* indicator. The initial value is true.
* @param animate If true, an animation will be used to transition to the
* new state.
*/
private void setContentShown(boolean shown, boolean animate) {
ensureContent();
if (mContentShown == shown) {
return;
}
mContentShown = shown;
if (shown) {
if (animate) {
mProgressContainer.startAnimation(AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_out));
mContentContainer.startAnimation(AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_in));
} else {
mProgressContainer.clearAnimation();
mContentContainer.clearAnimation();
}
mProgressContainer.setVisibility(View.GONE);
mContentContainer.setVisibility(View.VISIBLE);
} else {
if (animate) {
mProgressContainer.startAnimation(AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_in));
mContentContainer.startAnimation(AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_out));
} else {
mProgressContainer.clearAnimation();
mContentContainer.clearAnimation();
}
mProgressContainer.setVisibility(View.VISIBLE);
mContentContainer.setVisibility(View.GONE);
}
}
/**
* Returns true if content is empty. The default content is not empty.
*
* @return true if content is null or empty
* @see #setContentEmpty(boolean)
*/
public boolean isContentEmpty() {
return mIsContentEmpty;
}
/**
* If the content is empty, then set true otherwise false. The default content is not empty.
* You can't call this method if the content view has not been initialized before
* {@link #setContentView(android.view.View)} and content view not null.
*
* @param isEmpty true if content is empty else false
* @see #isContentEmpty()
*/
public void setContentEmpty(boolean isEmpty) {
ensureContent();
if (mContentView == null) {
throw new IllegalStateException("Content view must be initialized before");
}
if (isEmpty) {
mEmptyView.setVisibility(View.VISIBLE);
mContentView.setVisibility(View.GONE);
} else {
mEmptyView.setVisibility(View.GONE);
mContentView.setVisibility(View.VISIBLE);
}
mIsContentEmpty = isEmpty;
}
/**
* Initialization views.
*/
private void ensureContent() {
if (mContentContainer != null && mProgressContainer != null) {
return;
}
View root = getView();
if (root == null) {
throw new IllegalStateException("Content view not yet created");
}
mProgressContainer = root.findViewById(R.id.progress_container);
if (mProgressContainer == null) {
throw new RuntimeException("Your content must have a ViewGroup whose id attribute is 'R.id.progress_container'");
}
mContentContainer = root.findViewById(R.id.content_container);
if (mContentContainer == null) {
throw new RuntimeException("Your content must have a ViewGroup whose id attribute is 'R.id.content_container'");
}
mEmptyView = root.findViewById(android.R.id.empty);
if (mEmptyView != null) {
mEmptyView.setVisibility(View.GONE);
}
mContentShown = true;
// We are starting without a content, so assume we won't
// have our data right away and start with the progress indicator.
if (mContentView == null) {
setContentShown(false, false);
}
}
}
| |
/*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: DTMException.java,v 1.3 2005/09/28 13:48:50 pvedula Exp $
*/
package com.sun.org.apache.xml.internal.dtm;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.xml.transform.SourceLocator;
import com.sun.org.apache.xml.internal.res.XMLErrorResources;
import com.sun.org.apache.xml.internal.res.XMLMessages;
/**
* This class specifies an exceptional condition that occured
* in the DTM module.
*/
public class DTMException extends RuntimeException {
static final long serialVersionUID = -775576419181334734L;
/** Field locator specifies where the error occured.
* @serial */
SourceLocator locator;
/**
* Method getLocator retrieves an instance of a SourceLocator
* object that specifies where an error occured.
*
* @return A SourceLocator object, or null if none was specified.
*/
public SourceLocator getLocator() {
return locator;
}
/**
* Method setLocator sets an instance of a SourceLocator
* object that specifies where an error occured.
*
* @param location A SourceLocator object, or null to clear the location.
*/
public void setLocator(SourceLocator location) {
locator = location;
}
/** Field containedException specifies a wrapped exception. May be null.
* @serial */
Throwable containedException;
/**
* This method retrieves an exception that this exception wraps.
*
* @return An Throwable object, or null.
* @see #getCause
*/
public Throwable getException() {
return containedException;
}
/**
* Returns the cause of this throwable or <code>null</code> if the
* cause is nonexistent or unknown. (The cause is the throwable that
* caused this throwable to get thrown.)
*/
public Throwable getCause() {
return ((containedException == this)
? null
: containedException);
}
/**
* Initializes the <i>cause</i> of this throwable to the specified value.
* (The cause is the throwable that caused this throwable to get thrown.)
*
* <p>This method can be called at most once. It is generally called from
* within the constructor, or immediately after creating the
* throwable. If this throwable was created
* with {@link #DTMException(Throwable)} or
* {@link #DTMException(String,Throwable)}, this method cannot be called
* even once.
*
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @return a reference to this <code>Throwable</code> instance.
* @throws IllegalArgumentException if <code>cause</code> is this
* throwable. (A throwable cannot
* be its own cause.)
* @throws IllegalStateException if this throwable was
* created with {@link #DTMException(Throwable)} or
* {@link #DTMException(String,Throwable)}, or this method has already
* been called on this throwable.
*/
public synchronized Throwable initCause(Throwable cause) {
if ((this.containedException == null) && (cause != null)) {
throw new IllegalStateException(XMLMessages.createXMLMessage(XMLErrorResources.ER_CANNOT_OVERWRITE_CAUSE, null)); //"Can't overwrite cause");
}
if (cause == this) {
throw new IllegalArgumentException(
XMLMessages.createXMLMessage(XMLErrorResources.ER_SELF_CAUSATION_NOT_PERMITTED, null)); //"Self-causation not permitted");
}
this.containedException = cause;
return this;
}
/**
* Create a new DTMException.
*
* @param message The error or warning message.
*/
public DTMException(String message) {
super(message);
this.containedException = null;
this.locator = null;
}
/**
* Create a new DTMException wrapping an existing exception.
*
* @param e The exception to be wrapped.
*/
public DTMException(Throwable e) {
super(e.getMessage());
this.containedException = e;
this.locator = null;
}
/**
* Wrap an existing exception in a DTMException.
*
* <p>This is used for throwing processor exceptions before
* the processing has started.</p>
*
* @param message The error or warning message, or null to
* use the message from the embedded exception.
* @param e Any exception
*/
public DTMException(String message, Throwable e) {
super(((message == null) || (message.length() == 0))
? e.getMessage()
: message);
this.containedException = e;
this.locator = null;
}
/**
* Create a new DTMException from a message and a Locator.
*
* <p>This constructor is especially useful when an application is
* creating its own exception from within a DocumentHandler
* callback.</p>
*
* @param message The error or warning message.
* @param locator The locator object for the error or warning.
*/
public DTMException(String message, SourceLocator locator) {
super(message);
this.containedException = null;
this.locator = locator;
}
/**
* Wrap an existing exception in a DTMException.
*
* @param message The error or warning message, or null to
* use the message from the embedded exception.
* @param locator The locator object for the error or warning.
* @param e Any exception
*/
public DTMException(String message, SourceLocator locator,
Throwable e) {
super(message);
this.containedException = e;
this.locator = locator;
}
/**
* Get the error message with location information
* appended.
*/
public String getMessageAndLocation() {
StringBuffer sbuffer = new StringBuffer();
String message = super.getMessage();
if (null != message) {
sbuffer.append(message);
}
if (null != locator) {
String systemID = locator.getSystemId();
int line = locator.getLineNumber();
int column = locator.getColumnNumber();
if (null != systemID) {
sbuffer.append("; SystemID: ");
sbuffer.append(systemID);
}
if (0 != line) {
sbuffer.append("; Line#: ");
sbuffer.append(line);
}
if (0 != column) {
sbuffer.append("; Column#: ");
sbuffer.append(column);
}
}
return sbuffer.toString();
}
/**
* Get the location information as a string.
*
* @return A string with location info, or null
* if there is no location information.
*/
public String getLocationAsString() {
if (null != locator) {
StringBuffer sbuffer = new StringBuffer();
String systemID = locator.getSystemId();
int line = locator.getLineNumber();
int column = locator.getColumnNumber();
if (null != systemID) {
sbuffer.append("; SystemID: ");
sbuffer.append(systemID);
}
if (0 != line) {
sbuffer.append("; Line#: ");
sbuffer.append(line);
}
if (0 != column) {
sbuffer.append("; Column#: ");
sbuffer.append(column);
}
return sbuffer.toString();
} else {
return null;
}
}
/**
* Print the the trace of methods from where the error
* originated. This will trace all nested exception
* objects, as well as this object.
*/
public void printStackTrace() {
printStackTrace(new java.io.PrintWriter(System.err, true));
}
/**
* Print the the trace of methods from where the error
* originated. This will trace all nested exception
* objects, as well as this object.
* @param s The stream where the dump will be sent to.
*/
public void printStackTrace(java.io.PrintStream s) {
printStackTrace(new java.io.PrintWriter(s));
}
/**
* Print the the trace of methods from where the error
* originated. This will trace all nested exception
* objects, as well as this object.
* @param s The writer where the dump will be sent to.
*/
public void printStackTrace(java.io.PrintWriter s) {
if (s == null) {
s = new java.io.PrintWriter(System.err, true);
}
try {
String locInfo = getLocationAsString();
if (null != locInfo) {
s.println(locInfo);
}
super.printStackTrace(s);
} catch (Throwable e) {}
boolean isJdk14OrHigher = false;
try {
Throwable.class.getMethod("getCause", (Class[]) null);
isJdk14OrHigher = true;
} catch (NoSuchMethodException nsme) {
// do nothing
}
// The printStackTrace method of the Throwable class in jdk 1.4
// and higher will include the cause when printing the backtrace.
// The following code is only required when using jdk 1.3 or lower
if (!isJdk14OrHigher) {
Throwable exception = getException();
for (int i = 0; (i < 10) && (null != exception); i++) {
s.println("---------");
try {
if (exception instanceof DTMException) {
String locInfo =
((DTMException) exception)
.getLocationAsString();
if (null != locInfo) {
s.println(locInfo);
}
}
exception.printStackTrace(s);
} catch (Throwable e) {
s.println("Could not print stack trace...");
}
try {
Method meth =
((Object) exception).getClass().getMethod("getException",
(Class[]) null);
if (null != meth) {
Throwable prev = exception;
exception = (Throwable) meth.invoke(exception, (Object[]) null);
if (prev == exception) {
break;
}
} else {
exception = null;
}
} catch (InvocationTargetException ite) {
exception = null;
} catch (IllegalAccessException iae) {
exception = null;
} catch (NoSuchMethodException nsme) {
exception = null;
}
}
}
}
}
| |
// Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import static com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression.COMPRESSED;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.RunfilesSupport;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.packages.Type;
import com.google.devtools.build.lib.rules.RuleConfiguredTargetFactory;
import com.google.devtools.build.lib.rules.cpp.CppHelper;
import com.google.devtools.build.lib.rules.cpp.LinkerInput;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgs.ClasspathType;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* An implementation of java_binary.
*/
public class JavaBinary implements RuleConfiguredTargetFactory {
private static final PathFragment CPP_RUNTIMES = new PathFragment("_cpp_runtimes");
private final JavaSemantics semantics;
protected JavaBinary(JavaSemantics semantics) {
this.semantics = semantics;
}
@Override
public ConfiguredTarget create(RuleContext ruleContext) {
final JavaCommon common = new JavaCommon(ruleContext, semantics);
DeployArchiveBuilder deployArchiveBuilder = new DeployArchiveBuilder(semantics, ruleContext);
Runfiles.Builder runfilesBuilder = new Runfiles.Builder(ruleContext.getWorkspaceName());
List<String> jvmFlags = new ArrayList<>();
common.initializeJavacOpts();
JavaTargetAttributes.Builder attributesBuilder = common.initCommon();
attributesBuilder.addClassPathResources(
ruleContext.getPrerequisiteArtifacts("classpath_resources", Mode.TARGET).list());
List<String> userJvmFlags = common.getJvmFlags();
ruleContext.checkSrcsSamePackage(true);
boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN);
List<TransitiveInfoCollection> deps =
Lists.newArrayList(common.targetsTreatedAsDeps(ClasspathType.COMPILE_ONLY));
semantics.checkRule(ruleContext, common);
String mainClass = semantics.getMainClass(ruleContext, common);
String originalMainClass = mainClass;
if (ruleContext.hasErrors()) {
return null;
}
// Collect the transitive dependencies.
JavaCompilationHelper helper = new JavaCompilationHelper(
ruleContext, semantics, common.getJavacOpts(), attributesBuilder);
helper.addLibrariesToAttributes(deps);
helper.addProvidersToAttributes(common.compilationArgsFromSources(), /* isNeverLink */ false);
attributesBuilder.addNativeLibraries(
collectNativeLibraries(common.targetsTreatedAsDeps(ClasspathType.BOTH)));
// deploy_env is valid for java_binary, but not for java_test.
if (ruleContext.getRule().isAttrDefined("deploy_env", Type.LABEL_LIST)) {
for (JavaRuntimeClasspathProvider envTarget : ruleContext.getPrerequisites(
"deploy_env", Mode.TARGET, JavaRuntimeClasspathProvider.class)) {
attributesBuilder.addExcludedArtifacts(envTarget.getRuntimeClasspath());
}
}
Artifact srcJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_SOURCE_JAR);
Artifact classJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_CLASS_JAR);
ImmutableList<Artifact> srcJars = ImmutableList.of(srcJar);
Artifact launcher = semantics.getLauncher(ruleContext, common, deployArchiveBuilder,
runfilesBuilder, jvmFlags, attributesBuilder);
JavaCompilationArtifacts.Builder javaArtifactsBuilder = new JavaCompilationArtifacts.Builder();
Artifact instrumentationMetadata =
helper.createInstrumentationMetadata(classJar, javaArtifactsBuilder);
NestedSetBuilder<Artifact> filesBuilder = NestedSetBuilder.stableOrder();
Artifact executable = null;
if (createExecutable) {
executable = ruleContext.createOutputArtifact(); // the artifact for the rule itself
filesBuilder.add(classJar).add(executable);
if (ruleContext.getConfiguration().isCodeCoverageEnabled()) {
mainClass = semantics.addCoverageSupport(helper, attributesBuilder,
executable, instrumentationMetadata, javaArtifactsBuilder, mainClass);
}
} else {
filesBuilder.add(classJar);
}
JavaTargetAttributes attributes = helper.getAttributes();
List<Artifact> nativeLibraries = attributes.getNativeLibraries();
if (!nativeLibraries.isEmpty()) {
jvmFlags.add("-Djava.library.path="
+ JavaCommon.javaLibraryPath(nativeLibraries, ruleContext.getRule().getWorkspaceName()));
}
JavaConfiguration javaConfig = ruleContext.getFragment(JavaConfiguration.class);
if (attributes.hasMessages()) {
helper.addTranslations(semantics.translate(ruleContext, javaConfig,
attributes.getMessages()));
}
if (attributes.hasSourceFiles() || attributes.hasSourceJars()
|| attributes.hasResources() || attributes.hasClassPathResources()) {
// We only want to add a jar to the classpath of a dependent rule if it has content.
javaArtifactsBuilder.addRuntimeJar(classJar);
}
// Any JAR files should be added to the collection of runtime jars.
javaArtifactsBuilder.addRuntimeJars(attributes.getJarFiles());
Artifact outputDepsProto = helper.createOutputDepsProtoArtifact(classJar, javaArtifactsBuilder);
common.setJavaCompilationArtifacts(javaArtifactsBuilder.build());
// The gensrc jar is created only if the target uses annotation processing. Otherwise,
// it is null, and the source jar action will not depend on the compile action.
Artifact gensrcJar = helper.createGensrcJar(classJar);
Artifact manifestProtoOutput = helper.createManifestProtoOutput(classJar);
helper.createCompileAction(
classJar, manifestProtoOutput, gensrcJar, outputDepsProto, instrumentationMetadata);
helper.createSourceJarAction(srcJar, gensrcJar);
Artifact genClassJar = ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_GEN_JAR);
helper.createGenJarAction(classJar, manifestProtoOutput, genClassJar);
common.setClassPathFragment(new ClasspathConfiguredFragment(
common.getJavaCompilationArtifacts(), attributes, false));
// Collect the action inputs for the runfiles collector here because we need to access the
// analysis environment, and that may no longer be safe when the runfiles collector runs.
Iterable<Artifact> dynamicRuntimeActionInputs =
CppHelper.getToolchain(ruleContext).getDynamicRuntimeLinkInputs();
Iterables.addAll(jvmFlags, semantics.getJvmFlags(ruleContext, common, launcher, userJvmFlags));
if (ruleContext.hasErrors()) {
return null;
}
if (createExecutable) {
// Create a shell stub for a Java application
semantics.createStubAction(ruleContext, common, jvmFlags, executable, mainClass,
common.getJavaBinSubstitution(launcher));
}
NestedSet<Artifact> transitiveSourceJars = collectTransitiveSourceJars(common, srcJar);
// TODO(bazel-team): if (getOptions().sourceJars) then make this a dummy prerequisite for the
// DeployArchiveAction ? Needs a few changes there as we can't pass inputs
helper.createSourceJarAction(semantics, ImmutableList.<Artifact>of(),
transitiveSourceJars.toCollection(),
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_DEPLOY_SOURCE_JAR));
RuleConfiguredTargetBuilder builder =
new RuleConfiguredTargetBuilder(ruleContext);
semantics.addProviders(ruleContext, common, jvmFlags, classJar, srcJar, genClassJar, gensrcJar,
ImmutableMap.<Artifact, Artifact>of(), helper, filesBuilder, builder);
NestedSet<Artifact> filesToBuild = filesBuilder.build();
collectDefaultRunfiles(runfilesBuilder, ruleContext, common, filesToBuild, launcher,
dynamicRuntimeActionInputs);
Runfiles defaultRunfiles = runfilesBuilder.build();
RunfilesSupport runfilesSupport = createExecutable
? runfilesSupport = RunfilesSupport.withExecutable(
ruleContext, defaultRunfiles, executable,
semantics.getExtraArguments(ruleContext, common))
: null;
RunfilesProvider runfilesProvider = RunfilesProvider.withData(
defaultRunfiles,
new Runfiles.Builder(ruleContext.getWorkspaceName()).merge(runfilesSupport).build());
ImmutableList<String> deployManifestLines =
getDeployManifestLines(ruleContext, originalMainClass);
Artifact deployJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_DEPLOY_JAR);
deployArchiveBuilder
.setOutputJar(deployJar)
.setJavaStartClass(mainClass)
.setDeployManifestLines(deployManifestLines)
.setAttributes(attributes)
.addRuntimeJars(common.getJavaCompilationArtifacts().getRuntimeJars())
.setIncludeBuildData(true)
.setRunfilesMiddleman(
runfilesSupport == null ? null : runfilesSupport.getRunfilesMiddleman())
.setCompression(COMPRESSED)
.setLauncher(launcher);
deployArchiveBuilder.build();
common.addTransitiveInfoProviders(builder, filesToBuild, classJar);
return builder
.setFilesToBuild(filesToBuild)
.add(RunfilesProvider.class, runfilesProvider)
.setRunfilesSupport(runfilesSupport, executable)
.add(JavaRuntimeClasspathProvider.class,
new JavaRuntimeClasspathProvider(common.getRuntimeClasspath()))
.add(JavaSourceJarsProvider.class,
new JavaSourceJarsProvider(transitiveSourceJars, srcJars))
.addOutputGroup(JavaSemantics.SOURCE_JARS_OUTPUT_GROUP, transitiveSourceJars)
.addOutputGroup(JavaSemantics.GENERATED_JARS_OUTPUT_GROUP, genClassJar)
.build();
}
// Create the deploy jar and make it dependent on the runfiles middleman if an executable is
// created. Do not add the deploy jar to files to build, so we will only build it when it gets
// requested.
private ImmutableList<String> getDeployManifestLines(RuleContext ruleContext,
String originalMainClass) {
ImmutableList.Builder<String> builder = ImmutableList.<String>builder()
.addAll(ruleContext.attributes().get("deploy_manifest_lines", Type.STRING_LIST));
if (ruleContext.getConfiguration().isCodeCoverageEnabled()) {
builder.add("Coverage-Main-Class: " + originalMainClass);
}
return builder.build();
}
private void collectDefaultRunfiles(Runfiles.Builder builder, RuleContext ruleContext,
JavaCommon common, NestedSet<Artifact> filesToBuild, Artifact launcher,
Iterable<Artifact> dynamicRuntimeActionInputs) {
// Convert to iterable: filesToBuild has a different order.
builder.addArtifacts((Iterable<Artifact>) filesToBuild);
builder.addArtifacts(common.getJavaCompilationArtifacts().getRuntimeJars());
if (launcher != null) {
final TransitiveInfoCollection defaultLauncher =
JavaHelper.launcherForTarget(semantics, ruleContext);
final Artifact defaultLauncherArtifact =
JavaHelper.launcherArtifactForTarget(semantics, ruleContext);
if (!defaultLauncherArtifact.equals(launcher)) {
builder.addArtifact(launcher);
// N.B. The "default launcher" referred to here is the launcher target specified through
// an attribute or flag. We wish to retain the runfiles of the default launcher, *except*
// for the original cc_binary artifact, because we've swapped it out with our custom
// launcher. Hence, instead of calling builder.addTarget(), or adding an odd method
// to Runfiles.Builder, we "unravel" the call and manually add things to the builder.
// Because the NestedSet representing each target's launcher runfiles is re-built here,
// we may see increased memory consumption for representing the target's runfiles.
Runfiles runfiles =
defaultLauncher.getProvider(RunfilesProvider.class)
.getDefaultRunfiles();
NestedSetBuilder<Artifact> unconditionalArtifacts = NestedSetBuilder.compileOrder();
for (Artifact a : runfiles.getUnconditionalArtifacts()) {
if (!a.equals(defaultLauncherArtifact)) {
unconditionalArtifacts.add(a);
}
}
builder.addTransitiveArtifacts(unconditionalArtifacts.build());
builder.addSymlinks(runfiles.getSymlinks());
builder.addRootSymlinks(runfiles.getRootSymlinks());
builder.addPruningManifests(runfiles.getPruningManifests());
} else {
builder.addTarget(defaultLauncher, RunfilesProvider.DEFAULT_RUNFILES);
}
}
semantics.addRunfilesForBinary(ruleContext, launcher, builder);
builder.addRunfiles(ruleContext, RunfilesProvider.DEFAULT_RUNFILES);
builder.add(ruleContext, JavaRunfilesProvider.TO_RUNFILES);
List<? extends TransitiveInfoCollection> runtimeDeps =
ruleContext.getPrerequisites("runtime_deps", Mode.TARGET);
builder.addTargets(runtimeDeps, JavaRunfilesProvider.TO_RUNFILES);
builder.addTargets(runtimeDeps, RunfilesProvider.DEFAULT_RUNFILES);
semantics.addDependenciesForRunfiles(ruleContext, builder);
if (ruleContext.getConfiguration().isCodeCoverageEnabled()) {
Artifact instrumentedJar = common.getJavaCompilationArtifacts().getInstrumentedJar();
if (instrumentedJar != null) {
builder.addArtifact(instrumentedJar);
}
}
builder.addArtifacts((Iterable<Artifact>) common.getRuntimeClasspath());
// Add the JDK files if it comes from the source repository (see java_stub_template.txt).
TransitiveInfoCollection javabaseTarget = ruleContext.getPrerequisite(":jvm", Mode.HOST);
if (javabaseTarget != null) {
builder.addArtifacts(
(Iterable<Artifact>) javabaseTarget.getProvider(FileProvider.class).getFilesToBuild());
// Add symlinks to the C++ runtime libraries under a path that can be built
// into the Java binary without having to embed the crosstool, gcc, and grte
// version information contained within the libraries' package paths.
for (Artifact lib : dynamicRuntimeActionInputs) {
PathFragment path = CPP_RUNTIMES.getRelative(lib.getExecPath().getBaseName());
builder.addSymlink(path, lib);
}
}
}
private NestedSet<Artifact> collectTransitiveSourceJars(JavaCommon common, Artifact srcJar) {
NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder();
builder.add(srcJar);
for (JavaSourceJarsProvider dep : common.getDependencies(JavaSourceJarsProvider.class)) {
builder.addTransitive(dep.getTransitiveSourceJars());
}
return builder.build();
}
/**
* Collects the native libraries in the transitive closure of the deps.
*
* @param deps the dependencies to be included as roots of the transitive closure.
* @return the native libraries found in the transitive closure of the deps.
*/
public static Collection<Artifact> collectNativeLibraries(
Iterable<? extends TransitiveInfoCollection> deps) {
NestedSet<LinkerInput> linkerInputs = new NativeLibraryNestedSetBuilder()
.addJavaTargets(deps)
.build();
ImmutableList.Builder<Artifact> result = ImmutableList.builder();
for (LinkerInput linkerInput : linkerInputs) {
result.add(linkerInput.getArtifact());
}
return result.build();
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.resolve.reference.impl.providers;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.util.Function;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Maxim.Mossienko
*/
public class FileReferenceSet {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReferenceSet");
private static final FileType[] EMPTY_FILE_TYPES = {};
public static final CustomizableReferenceProvider.CustomizationKey<Function<PsiFile, Collection<PsiFileSystemItem>>>
DEFAULT_PATH_EVALUATOR_OPTION =
new CustomizableReferenceProvider.CustomizationKey<Function<PsiFile, Collection<PsiFileSystemItem>>>(
PsiBundle.message("default.path.evaluator.option"));
public static final Function<PsiFile, Collection<PsiFileSystemItem>> ABSOLUTE_TOP_LEVEL =
new Function<PsiFile, Collection<PsiFileSystemItem>>() {
@Override
@Nullable
public Collection<PsiFileSystemItem> fun(final PsiFile file) {
return getAbsoluteTopLevelDirLocations(file);
}
};
public static final Condition<PsiFileSystemItem> FILE_FILTER = new Condition<PsiFileSystemItem>() {
@Override
public boolean value(final PsiFileSystemItem item) {
return item instanceof PsiFile;
}
};
public static final Condition<PsiFileSystemItem> DIRECTORY_FILTER = new Condition<PsiFileSystemItem>() {
@Override
public boolean value(final PsiFileSystemItem item) {
return item instanceof PsiDirectory;
}
};
protected FileReference[] myReferences;
private PsiElement myElement;
private final int myStartInElement;
private final boolean myCaseSensitive;
private final String myPathStringNonTrimmed;
private final String myPathString;
private Collection<PsiFileSystemItem> myDefaultContexts;
private final boolean myEndingSlashNotAllowed;
private boolean myEmptyPathAllowed;
private @Nullable Map<CustomizableReferenceProvider.CustomizationKey, Object> myOptions;
private @Nullable FileType[] mySuitableFileTypes;
public FileReferenceSet(String str,
PsiElement element,
int startInElement,
PsiReferenceProvider provider,
boolean caseSensitive,
boolean endingSlashNotAllowed,
@Nullable FileType[] suitableFileTypes) {
this(str, element, startInElement, provider, caseSensitive, endingSlashNotAllowed, suitableFileTypes, true);
}
public FileReferenceSet(String str,
PsiElement element,
int startInElement,
PsiReferenceProvider provider,
boolean caseSensitive,
boolean endingSlashNotAllowed,
@Nullable FileType[] suitableFileTypes,
boolean init) {
myElement = element;
myStartInElement = startInElement;
myCaseSensitive = caseSensitive;
myPathStringNonTrimmed = str;
myPathString = str.trim();
myEndingSlashNotAllowed = endingSlashNotAllowed;
myEmptyPathAllowed = !endingSlashNotAllowed;
myOptions = provider instanceof CustomizableReferenceProvider ? ((CustomizableReferenceProvider)provider).getOptions() : null;
mySuitableFileTypes = suitableFileTypes;
if (init) {
reparse();
}
}
protected String getNewAbsolutePath(PsiFileSystemItem root, String relativePath) {
return absoluteUrlNeedsStartSlash() ? "/" + relativePath : relativePath;
}
public String getSeparatorString() {
return "/";
}
protected Collection<PsiFileSystemItem> getExtraContexts() {
return Collections.emptyList();
}
public static FileReferenceSet createSet(PsiElement element,
final boolean soft,
boolean endingSlashNotAllowed,
final boolean urlEncoded) {
String text;
int offset;
final ElementManipulator<PsiElement> manipulator = ElementManipulators.getManipulator(element);
assert manipulator != null;
final TextRange range = manipulator.getRangeInElement(element);
offset = range.getStartOffset();
text = range.substring(element.getText());
for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) {
text = helper.trimUrl(text);
}
return new FileReferenceSet(text, element, offset, null, true, endingSlashNotAllowed) {
@Override
protected boolean isUrlEncoded() {
return urlEncoded;
}
@Override
protected boolean isSoft() {
return soft;
}
};
}
public FileReferenceSet(String str,
PsiElement element,
int startInElement,
@Nullable PsiReferenceProvider provider,
final boolean isCaseSensitive) {
this(str, element, startInElement, provider, isCaseSensitive, true);
}
public FileReferenceSet(@NotNull String str,
PsiElement element,
int startInElement,
PsiReferenceProvider provider,
final boolean isCaseSensitive,
boolean endingSlashNotAllowed) {
this(str, element, startInElement, provider, isCaseSensitive, endingSlashNotAllowed, null);
}
public FileReferenceSet(final @NotNull PsiElement element) {
myElement = element;
TextRange range = ElementManipulators.getValueTextRange(element);
myStartInElement = range.getStartOffset();
myPathStringNonTrimmed = range.substring(element.getText());
myPathString = myPathStringNonTrimmed.trim();
myEndingSlashNotAllowed = true;
myCaseSensitive = false;
reparse();
}
public PsiElement getElement() {
return myElement;
}
void setElement(final PsiElement element) {
myElement = element;
}
public boolean isCaseSensitive() {
return myCaseSensitive;
}
public boolean isEndingSlashNotAllowed() {
return myEndingSlashNotAllowed;
}
public int getStartInElement() {
return myStartInElement;
}
public FileReference createFileReference(final TextRange range, final int index, final String text) {
return new FileReference(this, range, index, text);
}
protected void reparse() {
String str = myPathStringNonTrimmed;
final List<FileReference> referencesList = reparse(str, myStartInElement);
myReferences = referencesList.toArray(new FileReference[referencesList.size()]);
}
protected List<FileReference> reparse(String str, int startInElement) {
final List<FileReference> referencesList = new ArrayList<FileReference>();
String separatorString = getSeparatorString(); // separator's length can be more then 1 char
int sepLen = separatorString.length();
int currentSlash = -sepLen;
// skip white space
while (currentSlash + sepLen < str.length() && Character.isWhitespace(str.charAt(currentSlash + sepLen))) {
currentSlash++;
}
if (currentSlash + sepLen + sepLen < str.length() &&
str.substring(currentSlash + sepLen, currentSlash + sepLen + sepLen).equals(separatorString)) {
currentSlash+=sepLen;
}
int index = 0;
if (str.equals(separatorString)) {
final FileReference fileReference =
createFileReference(new TextRange(startInElement, startInElement + sepLen), index++, separatorString);
referencesList.add(fileReference);
}
while (true) {
final int nextSlash = str.indexOf(separatorString, currentSlash + sepLen);
final String subreferenceText = nextSlash > 0 ? str.substring(currentSlash + sepLen, nextSlash) : str.substring(currentSlash + sepLen);
final FileReference ref = createFileReference(
new TextRange(startInElement + currentSlash + sepLen, startInElement + (nextSlash > 0 ? nextSlash : str.length())),
index++,
subreferenceText);
referencesList.add(ref);
if ((currentSlash = nextSlash) < 0) {
break;
}
}
return referencesList;
}
public FileReference getReference(int index) {
return myReferences[index];
}
@NotNull
public FileReference[] getAllReferences() {
return myReferences;
}
protected boolean isSoft() {
return false;
}
protected boolean isUrlEncoded() {
return false;
}
@NotNull
public Collection<PsiFileSystemItem> getDefaultContexts() {
if (myDefaultContexts == null) {
myDefaultContexts = computeDefaultContexts();
}
return myDefaultContexts;
}
@NotNull
public Collection<PsiFileSystemItem> computeDefaultContexts() {
final PsiFile file = getContainingFile();
if (file == null) return Collections.emptyList();
if (myOptions != null) {
final Function<PsiFile, Collection<PsiFileSystemItem>> value = DEFAULT_PATH_EVALUATOR_OPTION.getValue(myOptions);
if (value != null) {
final Collection<PsiFileSystemItem> roots = value.fun(file);
if (roots != null) {
for (PsiFileSystemItem root : roots) {
if (root == null) {
LOG.error("Default path evaluator " + value + " produced a null root for " + file);
}
}
return roots;
}
}
}
if (isAbsolutePathReference()) {
return getAbsoluteTopLevelDirLocations(file);
}
return getContextByFile(file);
}
@Nullable
protected PsiFile getContainingFile() {
PsiFile cf = myElement.getContainingFile();
final PsiFile file = InjectedLanguageManager.getInstance(cf.getProject()).getTopLevelFile(cf);
if (file == null) {
LOG.error("Invalid element: " + myElement);
}
return file.getOriginalFile();
}
@NotNull
private Collection<PsiFileSystemItem> getContextByFile(@NotNull PsiFile file) {
final PsiElement context = file.getContext();
if (context != null) file = context.getContainingFile();
if (useIncludingFileAsContext()) {
final FileContextProvider contextProvider = FileContextProvider.getProvider(file);
if (contextProvider != null) {
final Collection<PsiFileSystemItem> folders = contextProvider.getContextFolders(file);
if (!folders.isEmpty()) {
return folders;
}
final PsiFile contextFile = contextProvider.getContextFile(file);
if (contextFile != null) {
return Collections.<PsiFileSystemItem>singleton(contextFile.getParent());
}
}
}
VirtualFile virtualFile = file.getOriginalFile().getVirtualFile();
if (virtualFile != null) {
final FileReferenceHelper[] helpers = FileReferenceHelperRegistrar.getHelpers();
final ArrayList<PsiFileSystemItem> list = new ArrayList<PsiFileSystemItem>();
final Project project = file.getProject();
for (FileReferenceHelper helper : helpers) {
if (helper.isMine(project, virtualFile)) {
list.addAll(helper.getContexts(project, virtualFile));
}
}
if (list.size() > 0) {
return list;
}
final VirtualFile parent = virtualFile.getParent();
if (parent != null) {
final PsiDirectory directory = file.getManager().findDirectory(parent);
if (directory != null) {
return Collections.<PsiFileSystemItem>singleton(directory);
}
}
}
return Collections.emptyList();
}
public String getPathString() {
return myPathString;
}
public boolean isAbsolutePathReference() {
return myPathString.startsWith(getSeparatorString());
}
protected boolean useIncludingFileAsContext() {
return true;
}
@Nullable
public PsiFileSystemItem resolve() {
final FileReference lastReference = getLastReference();
return lastReference == null ? null : lastReference.resolve();
}
@Nullable
public FileReference getLastReference() {
return myReferences == null || myReferences.length == 0 ? null : myReferences[myReferences.length - 1];
}
@NotNull
public static Collection<PsiFileSystemItem> getAbsoluteTopLevelDirLocations(final @NotNull PsiFile file) {
final VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) {
return Collections.emptyList();
}
final Project project = file.getProject();
PsiDirectory parent = file.getParent();
final Module module = ModuleUtilCore.findModuleForPsiElement(parent == null ? file : parent);
if (module == null) {
return Collections.emptyList();
}
final FileReferenceHelper[] helpers = FileReferenceHelperRegistrar.getHelpers();
final ArrayList<PsiFileSystemItem> list = new ArrayList<PsiFileSystemItem>();
for (FileReferenceHelper helper : helpers) {
if (helper.isMine(project, virtualFile)) {
final Collection<PsiFileSystemItem> roots = helper.getRoots(module);
for (PsiFileSystemItem root : roots) {
LOG.assertTrue(root != null, "Helper " + helper + " produced a null root for " + file);
}
list.addAll(roots);
}
}
if (list.size() == 0) {
list.addAll(FileReferenceHelperRegistrar.getNotNullHelper(file).getRoots(module));
}
return list;
}
protected Condition<PsiFileSystemItem> getReferenceCompletionFilter() {
return Conditions.alwaysTrue();
}
public <Option> void addCustomization(CustomizableReferenceProvider.CustomizationKey<Option> key, Option value) {
if (myOptions == null) {
myOptions = new HashMap<CustomizableReferenceProvider.CustomizationKey, Object>(5);
}
myOptions.put(key, value);
}
public boolean couldBeConvertedTo(final boolean relative) {
return true;
}
public boolean absoluteUrlNeedsStartSlash() {
return true;
}
@NotNull
public FileType[] getSuitableFileTypes() {
return mySuitableFileTypes == null ? EMPTY_FILE_TYPES : mySuitableFileTypes;
}
public boolean isEmptyPathAllowed() {
return myEmptyPathAllowed;
}
public void setEmptyPathAllowed(boolean emptyPathAllowed) {
myEmptyPathAllowed = emptyPathAllowed;
}
}
| |
/**
* The MIT License
* Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package ee.ria.xroad.proxy.conf;
import ee.ria.xroad.common.SystemProperties;
import ee.ria.xroad.common.conf.serverconf.model.AccessRightType;
import ee.ria.xroad.common.conf.serverconf.model.CertificateType;
import ee.ria.xroad.common.conf.serverconf.model.ClientType;
import ee.ria.xroad.common.conf.serverconf.model.GroupMemberType;
import ee.ria.xroad.common.conf.serverconf.model.LocalGroupType;
import ee.ria.xroad.common.conf.serverconf.model.ServerConfType;
import ee.ria.xroad.common.conf.serverconf.model.ServiceType;
import ee.ria.xroad.common.conf.serverconf.model.TspType;
import ee.ria.xroad.common.conf.serverconf.model.WsdlType;
import ee.ria.xroad.common.identifier.ClientId;
import ee.ria.xroad.common.identifier.LocalGroupId;
import ee.ria.xroad.common.identifier.SecurityCategoryId;
import ee.ria.xroad.common.identifier.ServiceId;
import ee.ria.xroad.common.identifier.XRoadId;
import org.hibernate.Query;
import java.util.Date;
import static ee.ria.xroad.common.conf.serverconf.ServerConfDatabaseCtx.doInTransaction;
import static ee.ria.xroad.common.util.CryptoUtils.decodeBase64;
/**
* Contains server conf test utility methods.
*/
public final class TestUtil {
static final String SERVER_CODE = "TestServer";
static final String XROAD_INSTANCE = "XX";
static final String MEMBER_CLASS = "FooClass";
static final String MEMBER_CODE = "BarCode";
static final String SUBSYSTEM = "SubSystem";
static final String CLIENT_STATUS = "status";
static final String CLIENT_CODE = "client";
static final String WSDL_LOCATION = "wsdllocation";
static final String WSDL_URL = "wsdlurl";
static final String SERVICE_URL = "serviceUrl";
static final String SERVICE_VERSION = "v1";
static final String SERVICE_CODE = "serviceCode";
static final String SERVICE_TITLE = "service";
static final int SERVICE_TIMEOUT = 1234;
static final String SECURITY_CATEGORY = "securityCategory";
static final int NUM_CLIENTS = 5;
static final int NUM_WSDLS = 2;
static final int NUM_SERVICES = 4;
static final int NUM_TSPS = 2;
static final String BASE64_CERT =
"MIIDiDCCAnCgAwIBAgIIVYNTWA8JcLwwDQYJKoZIhvcNAQEFBQAwNzERMA8GA1UE"
+ "AwwIQWRtaW5DQTExFTATBgNVBAoMDEVKQkNBIFNhbXBsZTELMAkGA1UEBhMCU0Uw"
+ "HhcNMTIxMTE5MDkxNDIzWhcNMTQxMTE5MDkxNDIzWjATMREwDwYDVQQDDAhwcm9k"
+ "dWNlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALKNC381RiACCftv"
+ "ApBzk5HD5YHw0u9SOkwcIkn4cZ4eQWrlROnqHTpS9IVSBoOz6pjCx/FwxZTdpw0j"
+ "X+bRYpxnj11I2XKzHfhfa6BvL5VkaDtjGpOdSGMJUtrI6m9jFiYryEmYHWxPlL9V"
+ "pDK0KknevYm2BR23/xDHweBSZ7tkMENU1kXFWLunoBys+W0waR+Z8HH5WNuBLz8X"
+ "z2iz/6KQ5BoWSPJc9P5TXNOBB+5XyjBR2ogoAOtX53OJzu0wMgLpjuJGdfcpy1S9"
+ "ukU27B21i2MfZ6Tjhu9oKrAIgcMWJaHJ/gRX6iX1vXlfhUTkE1ACSfvhZdntKLzN"
+ "TZGEcxsCAwEAAaOBuzCBuDBYBggrBgEFBQcBAQRMMEowSAYIKwYBBQUHMAGGPGh0"
+ "dHA6Ly9pa3MyLXVidW50dS5jeWJlci5lZTo4MDgwL2VqYmNhL3B1YmxpY3dlYi9z"
+ "dGF0dXMvb2NzcDAdBgNVHQ4EFgQUUHtGmEl0Cuh/x/wj+UU5S7Wui48wDAYDVR0T"
+ "AQH/BAIwADAfBgNVHSMEGDAWgBR3LYkuA7b9+NJlOTE1ItBGGujSCTAOBgNVHQ8B"
+ "Af8EBAMCBeAwDQYJKoZIhvcNAQEFBQADggEBACJqqey5Ywoegq+Rjo4v89AN78Ou"
+ "tKtRzQZtuCZP9+ZhY6ivCPK4F8Ne6qpWZb63OLORyQosDAvj6m0iCFMsUZS3nC0U"
+ "DR0VyP2WrOihBOFC4CA7H2X4l7pkSyMN73ZC6icXkbj9H0ix5/Bv3Ug64DK9SixG"
+ "RxMwLxouIzk7WvePQ6ywlhGvZRTXxhr0DwvfZnPXxHDPB2q+9pKzC9h2txG1tyD9"
+ "ffohEC/LKdGrHSe6hnTRedQUN3hcMQqCTc5cHsaB8bh5EaHrib3RR0YsOhjAd6IC"
+ "ms33BZnfNWQuGVTXw74Eu/P1JkwR0ReO+XuxxMp3DW2epMfL44OHWTb6JGY=";
private TestUtil() {
}
/**
* Creates in-memory test database and fills it with test data.
* @throws Exception if an error occurs
*/
public static void prepareDB() throws Exception {
System.setProperty(
SystemProperties.DATABASE_PROPERTIES,
"src/test/resources/hibernate.properties");
prepareDB(true);
}
/**
* Creates in-memory test database and fills it with test data.
* @param clean if true, database is cleaned
* @throws Exception if an error occurs
*/
public static void prepareDB(boolean clean) throws Exception {
if (clean) {
cleanDB();
}
doInTransaction(session -> {
ServerConfType conf = createTestData();
session.save(conf);
return null;
});
}
static void cleanDB() throws Exception {
doInTransaction(session -> {
Query q = session.createSQLQuery(
// Since we are using HSQLDB for tests, we can use
// special commands to completely wipe out the database
"TRUNCATE SCHEMA public AND COMMIT");
q.executeUpdate();
return null;
});
}
static ServerConfType createTestData() {
ServerConfType conf = new ServerConfType();
conf.setServerCode(SERVER_CODE);
for (int i = 0; i < NUM_CLIENTS; i++) {
ClientType client = new ClientType();
client.setConf(conf);
conf.getClient().add(client);
if (i == 0) {
client.setIdentifier(createTestClientId());
conf.setOwner(client);
continue;
} else {
ClientId id;
if (i == NUM_CLIENTS - 1) {
id = createTestClientId(client(i), SUBSYSTEM);
} else {
id = createTestClientId(client(i));
}
client.setIdentifier(id);
client.setClientStatus(CLIENT_STATUS + i);
}
switch (i) {
case 1:
client.setIsAuthentication("SSLAUTH");
CertificateType ct = new CertificateType();
ct.setData(decodeBase64(BASE64_CERT));
client.getIsCert().add(ct);
break;
case 2:
client.setIsAuthentication("SSLNOAUTH");
break;
default:
client.setIsAuthentication("NOSSL");
break;
}
for (int j = 0; j < NUM_WSDLS; j++) {
WsdlType wsdl = new WsdlType();
wsdl.setClient(client);
wsdl.setUrl(WSDL_URL + j);
wsdl.setWsdlLocation(WSDL_LOCATION + j);
for (int k = 0; k < NUM_SERVICES; k++) {
ServiceType service = new ServiceType();
service.setWsdl(wsdl);
service.setTitle(SERVICE_TITLE + k);
service.setServiceCode(service(j, k));
if (k != NUM_SERVICES - 2) {
service.setServiceVersion(SERVICE_VERSION);
}
service.setUrl(SERVICE_URL + k);
service.setTimeout(SERVICE_TIMEOUT);
service.getRequiredSecurityCategory().add(
SecurityCategoryId.create(XROAD_INSTANCE,
SECURITY_CATEGORY + k));
service.setSslAuthentication(k % 2 == 0);
wsdl.getService().add(service);
}
if (j == NUM_WSDLS - 1) {
wsdl.setDisabled(true);
wsdl.setDisabledNotice("disabledNotice");
}
client.getWsdl().add(wsdl);
}
String serviceCode = service(1, 1);
client.getAcl().add(
createAccessRight(serviceCode, client.getIdentifier()));
ClientId cl = ClientId.create("XX", "memberClass", "memberCode" + i);
client.getAcl().add(createAccessRight(serviceCode, cl));
ServiceId se = ServiceId.create("XX", "memberClass",
"memberCode" + i, null, "serviceCode" + i);
client.getAcl().add(createAccessRight(serviceCode, se));
LocalGroupId lg = LocalGroupId.create("testGroup" + i);
client.getAcl().add(createAccessRight(serviceCode, lg));
LocalGroupType localGroup = new LocalGroupType();
localGroup.setGroupCode("localGroup" + i);
localGroup.setDescription("local group description");
localGroup.setUpdated(new Date());
GroupMemberType localGroupMember = new GroupMemberType();
localGroupMember.setAdded(new Date());
localGroupMember.setGroupMemberId(cl);
localGroup.getGroupMember().add(localGroupMember);
client.getLocalGroup().add(localGroup);
}
for (int j = 0; j < NUM_TSPS; j++) {
TspType tsp = new TspType();
tsp.setName("tsp" + j);
tsp.setUrl("tspUtl" + j);
conf.getTsp().add(tsp);
}
return conf;
}
static ServiceId createTestServiceId(String memberCode,
String serviceCode) {
return ServiceId.create(XROAD_INSTANCE, MEMBER_CLASS, memberCode, null,
serviceCode);
}
static ServiceId createTestServiceId(String memberCode, String serviceCode,
String serviceVerison) {
return ServiceId.create(XROAD_INSTANCE, MEMBER_CLASS, memberCode, null,
serviceCode, serviceVerison);
}
static ServiceId createTestServiceId(ClientId member, String serviceCode,
String serviceVerison) {
return ServiceId.create(member, serviceCode, serviceVerison);
}
static ClientId createTestClientId() {
return ClientId.create(XROAD_INSTANCE, MEMBER_CLASS, MEMBER_CODE);
}
static ClientId createTestClientId(String memberCode) {
return createTestClientId(memberCode, null);
}
static ClientId createTestClientId(String memberCode,
String subsystemCode) {
return ClientId.create(XROAD_INSTANCE, MEMBER_CLASS, memberCode,
subsystemCode);
}
static String client(int idx) {
return CLIENT_CODE + "-" + idx;
}
static String service(int wsdlIdx, int serviceIdx) {
return SERVICE_CODE + "-" + wsdlIdx + "-" + serviceIdx;
}
static AccessRightType createAccessRight(String serviceCode,
XRoadId xRoadId) {
AccessRightType accessRight = new AccessRightType();
accessRight.setServiceCode(serviceCode);
accessRight.setSubjectId(xRoadId);
accessRight.setRightsGiven(new Date());
return accessRight;
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.head.ScriptRuntime;
import com.google.javascript.rhino.jstype.TernaryValue;
/**
* Peephole optimization to fold constants (e.g. x + 1 + 7 --> x + 8).
*
*/
class PeepholeFoldConstants extends AbstractPeepholeOptimization {
// TODO(johnlenz): optimizations should not be emiting errors. Move these to
// a check pass.
static final DiagnosticType INVALID_GETELEM_INDEX_ERROR =
DiagnosticType.warning(
"JSC_INVALID_GETELEM_INDEX_ERROR",
"Array index not integer: {0}");
static final DiagnosticType INDEX_OUT_OF_BOUNDS_ERROR =
DiagnosticType.warning(
"JSC_INDEX_OUT_OF_BOUNDS_ERROR",
"Array index out of bounds: {0}");
static final DiagnosticType NEGATING_A_NON_NUMBER_ERROR =
DiagnosticType.warning(
"JSC_NEGATING_A_NON_NUMBER_ERROR",
"Can't negate non-numeric value: {0}");
static final DiagnosticType BITWISE_OPERAND_OUT_OF_RANGE =
DiagnosticType.warning(
"JSC_BITWISE_OPERAND_OUT_OF_RANGE",
"Operand out of range, bitwise operation will lose information: {0}");
static final DiagnosticType SHIFT_AMOUNT_OUT_OF_BOUNDS =
DiagnosticType.warning(
"JSC_SHIFT_AMOUNT_OUT_OF_BOUNDS",
"Shift amount out of bounds: {0}");
static final DiagnosticType FRACTIONAL_BITWISE_OPERAND =
DiagnosticType.warning(
"JSC_FRACTIONAL_BITWISE_OPERAND",
"Fractional bitwise operand: {0}");
private static final double MAX_FOLD_NUMBER = Math.pow(2, 53);
private final boolean late;
/**
* @param late When late is false, this mean we are currently running before
* most of the other optimizations. In this case we would avoid optimizations
* that would make the code harder to analyze. When this is true, we would
* do anything to minimize for size.
*/
PeepholeFoldConstants(boolean late) {
this.late = late;
}
@Override
Node optimizeSubtree(Node subtree) {
switch(subtree.getType()) {
case Token.NEW:
return tryFoldCtorCall(subtree);
case Token.TYPEOF:
return tryFoldTypeof(subtree);
case Token.NOT:
case Token.POS:
case Token.NEG:
case Token.BITNOT:
tryReduceOperandsForOp(subtree);
return tryFoldUnaryOperator(subtree);
case Token.VOID:
return tryReduceVoid(subtree);
default:
tryReduceOperandsForOp(subtree);
return tryFoldBinaryOperator(subtree);
}
}
private Node tryFoldBinaryOperator(Node subtree) {
Node left = subtree.getFirstChild();
if (left == null) {
return subtree;
}
Node right = left.getNext();
if (right == null) {
return subtree;
}
// If we've reached here, node is truly a binary operator.
switch(subtree.getType()) {
case Token.GETPROP:
return tryFoldGetProp(subtree, left, right);
case Token.GETELEM:
return tryFoldGetElem(subtree, left, right);
case Token.INSTANCEOF:
return tryFoldInstanceof(subtree, left, right);
case Token.AND:
case Token.OR:
return tryFoldAndOr(subtree, left, right);
case Token.LSH:
case Token.RSH:
case Token.URSH:
return tryFoldShift(subtree, left, right);
case Token.ASSIGN:
return tryFoldAssign(subtree, left, right);
case Token.ASSIGN_BITOR:
case Token.ASSIGN_BITXOR:
case Token.ASSIGN_BITAND:
case Token.ASSIGN_LSH:
case Token.ASSIGN_RSH:
case Token.ASSIGN_URSH:
case Token.ASSIGN_ADD:
case Token.ASSIGN_SUB:
case Token.ASSIGN_MUL:
case Token.ASSIGN_DIV:
case Token.ASSIGN_MOD:
return tryUnfoldAssignOp(subtree, left, right);
case Token.ADD:
return tryFoldAdd(subtree, left, right);
case Token.SUB:
case Token.DIV:
case Token.MOD:
return tryFoldArithmeticOp(subtree, left, right);
case Token.MUL:
case Token.BITAND:
case Token.BITOR:
case Token.BITXOR:
Node result = tryFoldArithmeticOp(subtree, left, right);
if (result != subtree) {
return result;
}
return tryFoldLeftChildOp(subtree, left, right);
case Token.LT:
case Token.GT:
case Token.LE:
case Token.GE:
case Token.EQ:
case Token.NE:
case Token.SHEQ:
case Token.SHNE:
return tryFoldComparison(subtree, left, right);
default:
return subtree;
}
}
private Node tryReduceVoid(Node n) {
Node child = n.getFirstChild();
if (!child.isNumber() || child.getDouble() != 0.0) {
if (!mayHaveSideEffects(n)) {
n.replaceChild(child, IR.number(0));
reportCodeChange();
}
}
return n;
}
private void tryReduceOperandsForOp(Node n) {
switch (n.getType()) {
case Token.ADD:
Node left = n.getFirstChild();
Node right = n.getLastChild();
if (!NodeUtil.mayBeString(left) && !NodeUtil.mayBeString(right)) {
tryConvertOperandsToNumber(n);
}
break;
case Token.ASSIGN_BITOR:
case Token.ASSIGN_BITXOR:
case Token.ASSIGN_BITAND:
// TODO(johnlenz): convert these to integers.
case Token.ASSIGN_LSH:
case Token.ASSIGN_RSH:
case Token.ASSIGN_URSH:
case Token.ASSIGN_SUB:
case Token.ASSIGN_MUL:
case Token.ASSIGN_MOD:
case Token.ASSIGN_DIV:
tryConvertToNumber(n.getLastChild());
break;
case Token.BITNOT:
case Token.BITOR:
case Token.BITXOR:
case Token.BITAND:
case Token.LSH:
case Token.RSH:
case Token.URSH:
case Token.SUB:
case Token.MUL:
case Token.MOD:
case Token.DIV:
case Token.POS:
case Token.NEG:
tryConvertOperandsToNumber(n);
break;
}
}
private void tryConvertOperandsToNumber(Node n) {
Node next;
for (Node c = n.getFirstChild(); c != null; c = next) {
next = c.getNext();
tryConvertToNumber(c);
}
}
private void tryConvertToNumber(Node n) {
switch (n.getType()) {
case Token.NUMBER:
// Nothing to do
return;
case Token.AND:
case Token.OR:
case Token.COMMA:
tryConvertToNumber(n.getLastChild());
return;
case Token.HOOK:
tryConvertToNumber(n.getChildAtIndex(1));
tryConvertToNumber(n.getLastChild());
return;
case Token.NAME:
if (!NodeUtil.isUndefined(n)) {
return;
}
break;
}
Double result = NodeUtil.getNumberValue(n);
if (result == null) {
return;
}
double value = result;
Node replacement = NodeUtil.numberNode(value, n);
if (replacement.isEquivalentTo(n)) {
return;
}
n.getParent().replaceChild(n, replacement);
reportCodeChange();
}
/**
* Folds 'typeof(foo)' if foo is a literal, e.g.
* typeof("bar") --> "string"
* typeof(6) --> "number"
*/
private Node tryFoldTypeof(Node originalTypeofNode) {
Preconditions.checkArgument(originalTypeofNode.isTypeOf());
Node argumentNode = originalTypeofNode.getFirstChild();
if (argumentNode == null || !NodeUtil.isLiteralValue(argumentNode, true)) {
return originalTypeofNode;
}
String typeNameString = null;
switch (argumentNode.getType()) {
case Token.FUNCTION:
typeNameString = "function";
break;
case Token.STRING:
typeNameString = "string";
break;
case Token.NUMBER:
typeNameString = "number";
break;
case Token.TRUE:
case Token.FALSE:
typeNameString = "boolean";
break;
case Token.NULL:
case Token.OBJECTLIT:
case Token.ARRAYLIT:
typeNameString = "object";
break;
case Token.VOID:
typeNameString = "undefined";
break;
case Token.NAME:
// We assume here that programs don't change the value of the
// keyword undefined to something other than the value undefined.
if ("undefined".equals(argumentNode.getString())) {
typeNameString = "undefined";
}
break;
}
if (typeNameString != null) {
Node newNode = IR.string(typeNameString);
originalTypeofNode.getParent().replaceChild(originalTypeofNode, newNode);
reportCodeChange();
return newNode;
}
return originalTypeofNode;
}
private Node tryFoldUnaryOperator(Node n) {
Preconditions.checkState(n.hasOneChild());
Node left = n.getFirstChild();
Node parent = n.getParent();
if (left == null) {
return n;
}
TernaryValue leftVal = NodeUtil.getPureBooleanValue(left);
if (leftVal == TernaryValue.UNKNOWN) {
return n;
}
switch (n.getType()) {
case Token.NOT:
// Don't fold !0 and !1 back to false.
if (late && left.isNumber()) {
double numValue = left.getDouble();
if (numValue == 0 || numValue == 1) {
return n;
}
}
Node replacementNode = NodeUtil.booleanNode(!leftVal.toBoolean(true));
parent.replaceChild(n, replacementNode);
reportCodeChange();
return replacementNode;
case Token.POS:
if (NodeUtil.isNumericResult(left)) {
// POS does nothing to numeric values.
parent.replaceChild(n, left.detachFromParent());
reportCodeChange();
return left;
}
return n;
case Token.NEG:
if (left.isName()) {
if (left.getString().equals("Infinity")) {
// "-Infinity" is valid and a literal, don't modify it.
return n;
} else if (left.getString().equals("NaN")) {
// "-NaN" is "NaN".
n.removeChild(left);
parent.replaceChild(n, left);
reportCodeChange();
return left;
}
}
if (left.isNumber()) {
double negNum = -left.getDouble();
Node negNumNode = IR.number(negNum);
parent.replaceChild(n, negNumNode);
reportCodeChange();
return negNumNode;
} else {
// left is not a number node, so do not replace, but warn the
// user because they can't be doing anything good
report(NEGATING_A_NON_NUMBER_ERROR, left);
return n;
}
case Token.BITNOT:
try {
double val = left.getDouble();
if (val >= Integer.MIN_VALUE && val <= Integer.MAX_VALUE) {
int intVal = (int) val;
if (intVal == val) {
Node notIntValNode = IR.number(~intVal);
parent.replaceChild(n, notIntValNode);
reportCodeChange();
return notIntValNode;
} else {
report(FRACTIONAL_BITWISE_OPERAND, left);
return n;
}
} else {
report(BITWISE_OPERAND_OUT_OF_RANGE, left);
return n;
}
} catch (UnsupportedOperationException ex) {
// left is not a number node, so do not replace, but warn the
// user because they can't be doing anything good
report(NEGATING_A_NON_NUMBER_ERROR, left);
return n;
}
default:
return n;
}
}
/**
* Try to fold {@code left instanceof right} into {@code true}
* or {@code false}.
*/
private Node tryFoldInstanceof(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isInstanceOf());
// TODO(johnlenz) Use type information if available to fold
// instanceof.
if (NodeUtil.isLiteralValue(left, true)
&& !mayHaveSideEffects(right)) {
Node replacementNode = null;
if (NodeUtil.isImmutableValue(left)) {
// Non-object types are never instances.
replacementNode = IR.falseNode();
} else if (right.isName()
&& "Object".equals(right.getString())) {
replacementNode = IR.trueNode();
}
if (replacementNode != null) {
n.getParent().replaceChild(n, replacementNode);
reportCodeChange();
return replacementNode;
}
}
return n;
}
private Node tryFoldAssign(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isAssign());
if (!late) {
return n;
}
// Tries to convert x = x + y -> x += y;
if (!right.hasChildren() ||
right.getFirstChild().getNext() != right.getLastChild()) {
// RHS must have two children.
return n;
}
if (mayHaveSideEffects(left)) {
return n;
}
Node newRight;
if (areNodesEqualForInlining(left, right.getFirstChild())) {
newRight = right.getLastChild();
} else if (NodeUtil.isCommutative(right.getType()) &&
areNodesEqualForInlining(left, right.getLastChild())) {
newRight = right.getFirstChild();
} else {
return n;
}
int newType = -1;
switch (right.getType()) {
case Token.ADD:
newType = Token.ASSIGN_ADD;
break;
case Token.BITAND:
newType = Token.ASSIGN_BITAND;
break;
case Token.BITOR:
newType = Token.ASSIGN_BITOR;
break;
case Token.BITXOR:
newType = Token.ASSIGN_BITXOR;
break;
case Token.DIV:
newType = Token.ASSIGN_DIV;
break;
case Token.LSH:
newType = Token.ASSIGN_LSH;
break;
case Token.MOD:
newType = Token.ASSIGN_MOD;
break;
case Token.MUL:
newType = Token.ASSIGN_MUL;
break;
case Token.RSH:
newType = Token.ASSIGN_RSH;
break;
case Token.SUB:
newType = Token.ASSIGN_SUB;
break;
case Token.URSH:
newType = Token.ASSIGN_URSH;
break;
default:
return n;
}
Node newNode = new Node(newType,
left.detachFromParent(), newRight.detachFromParent());
n.getParent().replaceChild(n, newNode);
reportCodeChange();
return newNode;
}
private Node tryUnfoldAssignOp(Node n, Node left, Node right) {
if (late) {
return n;
}
if (!n.hasChildren() ||
n.getFirstChild().getNext() != n.getLastChild()) {
return n;
}
if (mayHaveSideEffects(left)) {
return n;
}
// Tries to convert x += y -> x = x + y;
int op = NodeUtil.getOpFromAssignmentOp(n);
Node replacement = IR.assign(left.detachFromParent(),
new Node(op, left.cloneTree(), right.detachFromParent())
.srcref(n));
n.getParent().replaceChild(n, replacement);
reportCodeChange();
return replacement;
}
/**
* Try to fold a AND/OR node.
*/
private Node tryFoldAndOr(Node n, Node left, Node right) {
Node parent = n.getParent();
Node result = null;
int type = n.getType();
TernaryValue leftVal = NodeUtil.getImpureBooleanValue(left);
if (leftVal != TernaryValue.UNKNOWN) {
boolean lval = leftVal.toBoolean(true);
// (TRUE || x) => TRUE (also, (3 || x) => 3)
// (FALSE && x) => FALSE
if (lval && type == Token.OR ||
!lval && type == Token.AND) {
result = left;
} else if (!mayHaveSideEffects(left)) {
// (FALSE || x) => x
// (TRUE && x) => x
result = right;
}
}
// Note: Right hand side folding is handled by
// PeepholeSubstituteAlternateSyntax#tryMinimizeCondition
if (result != null) {
// Fold it!
n.removeChild(result);
parent.replaceChild(n, result);
reportCodeChange();
return result;
} else {
return n;
}
}
/**
* Expressions such as [foo() + 'a' + 'b'] generate parse trees
* where no node has two const children ((foo() + 'a') + 'b'), so
* tryFoldAdd() won't fold it -- tryFoldLeftChildAdd() will (for Strings).
* Specifically, it folds Add expressions where:
* - The left child is also and add expression
* - The right child is a constant value
* - The left child's right child is a STRING constant.
*/
private Node tryFoldChildAddString(Node n, Node left, Node right) {
if (NodeUtil.isLiteralValue(right, false) &&
left.isAdd()) {
Node ll = left.getFirstChild();
Node lr = ll.getNext();
// Left's right child MUST be a string. We would not want to fold
// foo() + 2 + 'a' because we don't know what foo() will return, and
// therefore we don't know if left is a string concat, or a numeric add.
if (lr.isString()) {
String leftString = NodeUtil.getStringValue(lr);
String rightString = NodeUtil.getStringValue(right);
if (leftString != null && rightString != null) {
left.removeChild(ll);
String result = leftString + rightString;
n.replaceChild(left, ll);
n.replaceChild(right, IR.string(result));
reportCodeChange();
return n;
}
}
}
if (NodeUtil.isLiteralValue(left, false) &&
right.isAdd()) {
Node rl = right.getFirstChild();
Node rr = right.getLastChild();
// Left's right child MUST be a string. We would not want to fold
// foo() + 2 + 'a' because we don't know what foo() will return, and
// therefore we don't know if left is a string concat, or a numeric add.
if (rl.isString()) {
String leftString = NodeUtil.getStringValue(left);
String rightString = NodeUtil.getStringValue(rl);
if (leftString != null && rightString != null) {
right.removeChild(rr);
String result = leftString + rightString;
n.replaceChild(right, rr);
n.replaceChild(left, IR.string(result));
reportCodeChange();
return n;
}
}
}
return n;
}
/**
* Try to fold an ADD node with constant operands
*/
private Node tryFoldAddConstantString(Node n, Node left, Node right) {
if (left.isString() ||
right.isString()) {
// Add strings.
String leftString = NodeUtil.getStringValue(left);
String rightString = NodeUtil.getStringValue(right);
if (leftString != null && rightString != null) {
Node newStringNode = IR.string(leftString + rightString);
n.getParent().replaceChild(n, newStringNode);
reportCodeChange();
return newStringNode;
}
}
return n;
}
/**
* Try to fold arithmetic binary operators
*/
private Node tryFoldArithmeticOp(Node n, Node left, Node right) {
Node result = performArithmeticOp(n.getType(), left, right);
if (result != null) {
result.copyInformationFromForTree(n);
n.getParent().replaceChild(n, result);
reportCodeChange();
return result;
}
return n;
}
/**
* Try to fold arithmetic binary operators
*/
private Node performArithmeticOp(int opType, Node left, Node right) {
// Unlike other operations, ADD operands are not always converted
// to Number.
if (opType == Token.ADD
&& (NodeUtil.mayBeString(left, false)
|| NodeUtil.mayBeString(right, false))) {
return null;
}
double result;
// TODO(johnlenz): Handle NaN with unknown value. BIT ops convert NaN
// to zero so this is a little awkward here.
Double lValObj = NodeUtil.getNumberValue(left);
if (lValObj == null) {
return null;
}
Double rValObj = NodeUtil.getNumberValue(right);
if (rValObj == null) {
return null;
}
double lval = lValObj;
double rval = rValObj;
switch (opType) {
case Token.BITAND:
result = ScriptRuntime.toInt32(lval) & ScriptRuntime.toInt32(rval);
break;
case Token.BITOR:
result = ScriptRuntime.toInt32(lval) | ScriptRuntime.toInt32(rval);
break;
case Token.BITXOR:
result = ScriptRuntime.toInt32(lval) ^ ScriptRuntime.toInt32(rval);
break;
case Token.ADD:
result = lval + rval;
break;
case Token.SUB:
result = lval - rval;
break;
case Token.MUL:
result = lval * rval;
break;
case Token.MOD:
if (rval == 0) {
return null;
}
result = lval % rval;
break;
case Token.DIV:
if (rval == 0) {
return null;
}
result = lval / rval;
break;
default:
throw new Error("Unexpected arithmetic operator");
}
// TODO(johnlenz): consider removing the result length check.
// length of the left and right value plus 1 byte for the operator.
if ((String.valueOf(result).length() <=
String.valueOf(lval).length() + String.valueOf(rval).length() + 1
// Do not try to fold arithmetic for numbers > 2^53. After that
// point, fixed-point math starts to break down and become inaccurate.
&& Math.abs(result) <= MAX_FOLD_NUMBER)
|| Double.isNaN(result)
|| result == Double.POSITIVE_INFINITY
|| result == Double.NEGATIVE_INFINITY) {
return NodeUtil.numberNode(result, null);
}
return null;
}
/**
* Expressions such as [foo() * 10 * 20] generate parse trees
* where no node has two const children ((foo() * 10) * 20), so
* performArithmeticOp() won't fold it -- tryFoldLeftChildOp() will.
* Specifically, it folds associative expressions where:
* - The left child is also an associative expression of the same time.
* - The right child is a constant NUMBER constant.
* - The left child's right child is a NUMBER constant.
*/
private Node tryFoldLeftChildOp(Node n, Node left, Node right) {
int opType = n.getType();
Preconditions.checkState(
(NodeUtil.isAssociative(opType) && NodeUtil.isCommutative(opType))
|| n.isAdd());
Preconditions.checkState(!n.isAdd() || !NodeUtil.mayBeString(n));
// Use getNumberValue to handle constants like "NaN" and "Infinity"
// other values are converted to numbers elsewhere.
Double rightValObj = NodeUtil.getNumberValue(right);
if (rightValObj != null && left.getType() == opType) {
Preconditions.checkState(left.getChildCount() == 2);
Node ll = left.getFirstChild();
Node lr = ll.getNext();
Node valueToCombine = ll;
Node replacement = performArithmeticOp(opType, valueToCombine, right);
if (replacement == null) {
valueToCombine = lr;
replacement = performArithmeticOp(opType, valueToCombine, right);
}
if (replacement != null) {
// Remove the child that has been combined
left.removeChild(valueToCombine);
// Replace the left op with the remaining child.
n.replaceChild(left, left.removeFirstChild());
// New "-Infinity" node need location info explicitly
// added.
replacement.copyInformationFromForTree(right);
n.replaceChild(right, replacement);
reportCodeChange();
}
}
return n;
}
private Node tryFoldAdd(Node node, Node left, Node right) {
Preconditions.checkArgument(node.isAdd());
if (NodeUtil.mayBeString(node, true)) {
if (NodeUtil.isLiteralValue(left, false) &&
NodeUtil.isLiteralValue(right, false)) {
// '6' + 7
return tryFoldAddConstantString(node, left, right);
} else {
// a + 7 or 6 + a
return tryFoldChildAddString(node, left, right);
}
} else {
// Try arithmetic add
Node result = tryFoldArithmeticOp(node, left, right);
if (result != node) {
return result;
}
return tryFoldLeftChildOp(node, left, right);
}
}
/**
* Try to fold shift operations
*/
private Node tryFoldShift(Node n, Node left, Node right) {
if (left.isNumber() &&
right.isNumber()) {
double result;
double lval = left.getDouble();
double rval = right.getDouble();
// check ranges. We do not do anything that would clip the double to
// a 32-bit range, since the user likely does not intend that.
if (!(lval >= Integer.MIN_VALUE && lval <= Integer.MAX_VALUE)) {
report(BITWISE_OPERAND_OUT_OF_RANGE, left);
return n;
}
// only the lower 5 bits are used when shifting, so don't do anything
// if the shift amount is outside [0,32)
if (!(rval >= 0 && rval < 32)) {
report(SHIFT_AMOUNT_OUT_OF_BOUNDS, right);
return n;
}
// Convert the numbers to ints
int lvalInt = (int) lval;
if (lvalInt != lval) {
report(FRACTIONAL_BITWISE_OPERAND, left);
return n;
}
int rvalInt = (int) rval;
if (rvalInt != rval) {
report(FRACTIONAL_BITWISE_OPERAND, right);
return n;
}
switch (n.getType()) {
case Token.LSH:
result = lvalInt << rvalInt;
break;
case Token.RSH:
result = lvalInt >> rvalInt;
break;
case Token.URSH:
// JavaScript handles zero shifts on signed numbers differently than
// Java as an Java int can not represent the unsigned 32-bit number
// where JavaScript can so use a long here.
long lvalLong = lvalInt & 0xffffffffL;
result = lvalLong >>> rvalInt;
break;
default:
throw new AssertionError("Unknown shift operator: " +
Token.name(n.getType()));
}
Node newNumber = IR.number(result);
n.getParent().replaceChild(n, newNumber);
reportCodeChange();
return newNumber;
}
return n;
}
/**
* Try to fold comparison nodes, e.g ==
*/
@SuppressWarnings("fallthrough")
private Node tryFoldComparison(Node n, Node left, Node right) {
TernaryValue result = evaluateComparison(n.getType(), left, right);
if (result == TernaryValue.UNKNOWN) {
return n;
}
Node newNode = NodeUtil.booleanNode(result.toBoolean(true));
n.getParent().replaceChild(n, newNode);
reportCodeChange();
return newNode;
}
static TernaryValue evaluateComparison(int op, Node left, Node right) {
boolean leftLiteral = NodeUtil.isLiteralValue(left, true);
boolean rightLiteral = NodeUtil.isLiteralValue(right, true);
if (!leftLiteral || !rightLiteral) {
// We only handle literal operands for LT and GT.
if (op != Token.GT && op != Token.LT) {
return TernaryValue.UNKNOWN;
}
}
boolean undefinedRight = NodeUtil.isUndefined(right) && rightLiteral;
boolean nullRight = right.isNull();
int lhType = getNormalizedNodeType(left);
int rhType = getNormalizedNodeType(right);
switch (lhType) {
case Token.VOID:
if (!leftLiteral) {
return TernaryValue.UNKNOWN;
} else if (!rightLiteral) {
return TernaryValue.UNKNOWN;
} else {
return TernaryValue.forBoolean(compareToUndefined(right, op));
}
case Token.NULL:
if (rightLiteral && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(right, op));
}
// fallthrough
case Token.TRUE:
case Token.FALSE:
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (rhType != Token.TRUE &&
rhType != Token.FALSE &&
rhType != Token.NULL) {
return TernaryValue.UNKNOWN;
}
switch (op) {
case Token.SHEQ:
case Token.EQ:
return TernaryValue.forBoolean(lhType == rhType);
case Token.SHNE:
case Token.NE:
return TernaryValue.forBoolean(lhType != rhType);
case Token.GE:
case Token.LE:
case Token.GT:
case Token.LT:
return compareAsNumbers(op, left, right);
}
return TernaryValue.UNKNOWN;
case Token.THIS:
if (!right.isThis()) {
return TernaryValue.UNKNOWN;
}
switch (op) {
case Token.SHEQ:
case Token.EQ:
return TernaryValue.TRUE;
case Token.SHNE:
case Token.NE:
return TernaryValue.FALSE;
}
// We can only handle == and != here.
// GT, LT, GE, LE depend on the type of "this" and how it will
// be converted to number. The results are different depending on
// whether it is a string, NaN or other number value.
return TernaryValue.UNKNOWN;
case Token.STRING:
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
if (Token.STRING != right.getType()) {
return TernaryValue.UNKNOWN; // Only eval if they are the same type
}
switch (op) {
case Token.SHEQ:
case Token.EQ:
return areStringsEqual(left.getString(), right.getString());
case Token.SHNE:
case Token.NE:
return areStringsEqual(left.getString(), right.getString()).not();
}
return TernaryValue.UNKNOWN;
case Token.NUMBER:
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
if (Token.NUMBER != right.getType()) {
return TernaryValue.UNKNOWN; // Only eval if they are the same type
}
return compareAsNumbers(op, left, right);
case Token.NAME:
if (leftLiteral && undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (rightLiteral) {
boolean undefinedLeft = (left.getString().equals("undefined"));
if (undefinedLeft) {
return TernaryValue.forBoolean(compareToUndefined(right, op));
}
if (leftLiteral && nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
}
if (Token.NAME != right.getType()) {
return TernaryValue.UNKNOWN; // Only eval if they are the same type
}
String ln = left.getString();
String rn = right.getString();
if (!ln.equals(rn)) {
return TernaryValue.UNKNOWN; // Not the same value name.
}
switch (op) {
// If we knew the named value wouldn't be NaN, it would be nice
// to handle EQ,NE,LE,GE,SHEQ, and SHNE.
case Token.LT:
case Token.GT:
return TernaryValue.FALSE;
}
return TernaryValue.UNKNOWN; // don't handle that op
case Token.NEG:
if (leftLiteral) {
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
}
// Nothing else for now.
return TernaryValue.UNKNOWN;
case Token.ARRAYLIT:
case Token.OBJECTLIT:
case Token.REGEXP:
case Token.FUNCTION:
if (leftLiteral) {
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
}
// ignore the rest for now.
return TernaryValue.UNKNOWN;
default:
// assert, this should cover all consts
return TernaryValue.UNKNOWN;
}
}
/** Returns whether two JS strings are equal. */
private static TernaryValue areStringsEqual(String a, String b) {
// In JS, browsers parse \v differently. So do not consider strings
// equal if one contains \v.
if (a.indexOf('\u000B') != -1 ||
b.indexOf('\u000B') != -1) {
return TernaryValue.UNKNOWN;
} else {
return a.equals(b) ? TernaryValue.TRUE : TernaryValue.FALSE;
}
}
/**
* @return Translate NOT expressions into TRUE or FALSE when possible.
*/
private static int getNormalizedNodeType(Node n) {
int type = n.getType();
if (type == Token.NOT) {
TernaryValue value = NodeUtil.getPureBooleanValue(n);
switch (value) {
case TRUE:
return Token.TRUE;
case FALSE:
return Token.FALSE;
case UNKNOWN:
return type;
}
}
return type;
}
/**
* The result of the comparison, or UNKNOWN if the
* result could not be determined.
*/
private static TernaryValue compareAsNumbers(int op, Node left, Node right) {
Double leftValue = NodeUtil.getNumberValue(left);
if (leftValue == null) {
return TernaryValue.UNKNOWN;
}
Double rightValue = NodeUtil.getNumberValue(right);
if (rightValue == null) {
return TernaryValue.UNKNOWN;
}
double lv = leftValue;
double rv = rightValue;
switch (op) {
case Token.SHEQ:
case Token.EQ:
Preconditions.checkState(
left.isNumber() && right.isNumber());
return TernaryValue.forBoolean(lv == rv);
case Token.SHNE:
case Token.NE:
Preconditions.checkState(
left.isNumber() && right.isNumber());
return TernaryValue.forBoolean(lv != rv);
case Token.LE:
return TernaryValue.forBoolean(lv <= rv);
case Token.LT:
return TernaryValue.forBoolean(lv < rv);
case Token.GE:
return TernaryValue.forBoolean(lv >= rv);
case Token.GT:
return TernaryValue.forBoolean(lv > rv);
default:
return TernaryValue.UNKNOWN; // don't handle that op
}
}
/**
* @param value The value to compare to "undefined"
* @param op The boolean op to compare with
* @return Whether the boolean op is true or false
*/
private static boolean compareToUndefined(Node value, int op) {
Preconditions.checkState(NodeUtil.isLiteralValue(value, true));
boolean valueUndefined = NodeUtil.isUndefined(value);
boolean valueNull = (Token.NULL == value.getType());
boolean equivalent = valueUndefined || valueNull;
switch (op) {
case Token.EQ:
// undefined is only equal to null or an undefined value
return equivalent;
case Token.NE:
return !equivalent;
case Token.SHEQ:
return valueUndefined;
case Token.SHNE:
return !valueUndefined;
case Token.LT:
case Token.GT:
case Token.LE:
case Token.GE:
return false;
default:
throw new IllegalStateException("unexpected.");
}
}
private static boolean isEqualityOp(int op) {
switch (op) {
case Token.EQ:
case Token.NE:
case Token.SHEQ:
case Token.SHNE:
return true;
}
return false;
}
/**
* @param value The value to compare to "null"
* @param op The boolean op to compare with
* @return Whether the boolean op is true or false
*/
private static boolean compareToNull(Node value, int op) {
boolean valueUndefined = NodeUtil.isUndefined(value);
boolean valueNull = (Token.NULL == value.getType());
boolean equivalent = valueUndefined || valueNull;
switch (op) {
case Token.EQ:
// undefined is only equal to null or an undefined value
return equivalent;
case Token.NE:
return !equivalent;
case Token.SHEQ:
return valueNull;
case Token.SHNE:
return !valueNull;
default:
throw new IllegalStateException("unexpected.");
}
}
/**
* Try to fold away unnecessary object instantiation.
* e.g. this[new String('eval')] -> this.eval
*/
private Node tryFoldCtorCall(Node n) {
Preconditions.checkArgument(n.isNew());
// we can remove this for GETELEM calls (anywhere else?)
if (inForcedStringContext(n)) {
return tryFoldInForcedStringContext(n);
}
return n;
}
/** Returns whether this node must be coerced to a string. */
private boolean inForcedStringContext(Node n) {
if (n.getParent().isGetElem() &&
n.getParent().getLastChild() == n) {
return true;
}
// we can fold in the case "" + new String("")
if (n.getParent().isAdd()) {
return true;
}
return false;
}
private Node tryFoldInForcedStringContext(Node n) {
// For now, we only know how to fold ctors.
Preconditions.checkArgument(n.isNew());
Node objectType = n.getFirstChild();
if (!objectType.isName()) {
return n;
}
if (objectType.getString().equals("String")) {
Node value = objectType.getNext();
String stringValue = null;
if (value == null) {
stringValue = "";
} else {
if (!NodeUtil.isImmutableValue(value)) {
return n;
}
stringValue = NodeUtil.getStringValue(value);
}
if (stringValue == null) {
return n;
}
Node parent = n.getParent();
Node newString = IR.string(stringValue);
parent.replaceChild(n, newString);
newString.copyInformationFrom(parent);
reportCodeChange();
return newString;
}
return n;
}
/**
* Try to fold array-element. e.g [1, 2, 3][10];
*/
private Node tryFoldGetElem(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isGetElem());
if (left.isObjectLit()) {
return tryFoldObjectPropAccess(n, left, right);
}
if (left.isArrayLit()) {
return tryFoldArrayAccess(n, left, right);
}
return n;
}
/**
* Try to fold array-length. e.g [1, 2, 3].length ==> 3, [x, y].length ==> 2
*/
private Node tryFoldGetProp(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isGetProp());
if (left.isObjectLit()) {
return tryFoldObjectPropAccess(n, left, right);
}
if (right.isString() &&
right.getString().equals("length")) {
int knownLength = -1;
switch (left.getType()) {
case Token.ARRAYLIT:
if (mayHaveSideEffects(left)) {
// Nope, can't fold this, without handling the side-effects.
return n;
}
knownLength = left.getChildCount();
break;
case Token.STRING:
knownLength = left.getString().length();
break;
default:
// Not a foldable case, forget it.
return n;
}
Preconditions.checkState(knownLength != -1);
Node lengthNode = IR.number(knownLength);
n.getParent().replaceChild(n, lengthNode);
reportCodeChange();
return lengthNode;
}
return n;
}
private boolean isAssignmentTarget(Node n) {
Node parent = n.getParent();
if ((NodeUtil.isAssignmentOp(parent) && parent.getFirstChild() == n)
|| parent.isInc()
|| parent.isDec()) {
// If GETPROP/GETELEM is used as assignment target the object literal is
// acting as a temporary we can't fold it here:
// "{a:x}.a += 1" is not "x += 1"
return true;
}
return false;
}
private Node tryFoldArrayAccess(Node n, Node left, Node right) {
// If GETPROP/GETELEM is used as assignment target the array literal is
// acting as a temporary we can't fold it here:
// "[][0] += 1"
if (isAssignmentTarget(n)) {
return n;
}
if (!right.isNumber()) {
// Sometimes people like to use complex expressions to index into
// arrays, or strings to index into array methods.
return n;
}
double index = right.getDouble();
int intIndex = (int) index;
if (intIndex != index) {
report(INVALID_GETELEM_INDEX_ERROR, right);
return n;
}
if (intIndex < 0) {
report(INDEX_OUT_OF_BOUNDS_ERROR, right);
return n;
}
Node current = left.getFirstChild();
Node elem = null;
for (int i = 0; current != null; i++) {
if (i != intIndex) {
if (mayHaveSideEffects(current)) {
return n;
}
} else {
elem = current;
}
current = current.getNext();
}
if (elem == null) {
report(INDEX_OUT_OF_BOUNDS_ERROR, right);
return n;
}
if (elem.isEmpty()) {
elem = NodeUtil.newUndefinedNode(elem);
} else {
left.removeChild(elem);
}
// Replace the entire GETELEM with the value
n.getParent().replaceChild(n, elem);
reportCodeChange();
return elem;
}
private Node tryFoldObjectPropAccess(Node n, Node left, Node right) {
Preconditions.checkArgument(NodeUtil.isGet(n));
if (!left.isObjectLit() || !right.isString()) {
return n;
}
if (isAssignmentTarget(n)) {
// If GETPROP/GETELEM is used as assignment target the object literal is
// acting as a temporary we can't fold it here:
// "{a:x}.a += 1" is not "x += 1"
return n;
}
// find the last definition in the object literal
Node key = null;
Node value = null;
for (Node c = left.getFirstChild(); c != null; c = c.getNext()) {
if (c.getString().equals(right.getString())) {
switch (c.getType()) {
case Token.SETTER_DEF:
continue;
case Token.GETTER_DEF:
case Token.STRING_KEY:
if (value != null && mayHaveSideEffects(value)) {
// The previously found value had side-effects
return n;
}
key = c;
value = key.getFirstChild();
break;
default:
throw new IllegalStateException();
}
} else if (mayHaveSideEffects(c.getFirstChild())) {
// We don't handle the side-effects here as they might need a temporary
// or need to be reordered.
return n;
}
}
// Didn't find a definition of the name in the object literal, it might
// be coming from the Object prototype
if (value == null) {
return n;
}
if (value.isFunction() && NodeUtil.referencesThis(value)) {
// 'this' may refer to the object we are trying to remove
return n;
}
Node replacement = value.detachFromParent();
if (key.isGetterDef()){
replacement = IR.call(replacement);
replacement.putBooleanProp(Node.FREE_CALL, true);
}
n.getParent().replaceChild(n, replacement);
reportCodeChange();
return n;
}
}
| |
/**
* @author Bikas Vaibhav (http://bikasv.com) 2013
* Rewrote the plug-in at https://github.com/phonegap/phonegap-plugins/tree/master/Android/DatePicker
* It can now accept `min` and `max` dates for DatePicker.
*/
package com.plugin.datepicker;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
import java.text.SimpleDateFormat;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.annotation.SuppressLint;
import android.app.DatePickerDialog;
import android.app.DatePickerDialog.OnDateSetListener;
import android.app.Dialog;
import android.app.TimePickerDialog;
import android.app.TimePickerDialog.OnTimeSetListener;
import android.content.Context;
import android.content.DialogInterface;
import android.util.Log;
import android.view.KeyEvent;
import android.widget.DatePicker;
import android.widget.DatePicker.OnDateChangedListener;
import android.widget.TimePicker;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import android.os.Build;
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
public class DatePickerPlugin extends CordovaPlugin {
private static final String ACTION_DATE = "date";
private static final String ACTION_TIME = "time";
private final String pluginName = "DatePickerPlugin";
@Override
public boolean execute(final String action, final JSONArray data, final CallbackContext callbackContext) {
Log.d(pluginName, "DatePicker called with options: " + data);
boolean result = false;
this.show(data, callbackContext);
result = true;
return result;
}
public synchronized void show(final JSONArray data, final CallbackContext callbackContext) {
final DatePickerPlugin datePickerPlugin = this;
final Context currentCtx = cordova.getActivity();
final Calendar c = Calendar.getInstance();
final Runnable runnable;
String action = "date";
long minDateLong = 0, maxDateLong = 0;
int month = -1, day = -1, year = -1, hour = -1, min = -1;
try {
JSONObject obj = data.getJSONObject(0);
action = obj.getString("mode");
String optionDate = obj.getString("date");
String[] datePart = optionDate.split("/");
month = Integer.parseInt(datePart[0]);
day = Integer.parseInt(datePart[1]);
year = Integer.parseInt(datePart[2]);
hour = Integer.parseInt(datePart[3]);
min = Integer.parseInt(datePart[4]);
minDateLong = obj.getLong("minDate");
maxDateLong = obj.getLong("maxDate");
} catch (JSONException e) {
e.printStackTrace();
}
// By default initalize these fields to 'now'
final int mYear = year == -1 ? c.get(Calendar.YEAR) : year;
final int mMonth = month == -1 ? c.get(Calendar.MONTH) : month - 1;
final int mDay = day == -1 ? c.get(Calendar.DAY_OF_MONTH) : day;
final int mHour = hour == -1 ? c.get(Calendar.HOUR_OF_DAY) : hour;
final int mMinutes = min == -1 ? c.get(Calendar.MINUTE) : min;
final long minDate = minDateLong;
final long maxDate = maxDateLong;
if (ACTION_TIME.equalsIgnoreCase(action)) {
runnable = new Runnable() {
@Override
public void run() {
final TimeSetListener timeSetListener = new TimeSetListener(datePickerPlugin, callbackContext);
final TimePickerDialog timeDialog = new TimePickerDialog(currentCtx, timeSetListener, mHour,
mMinutes, false);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
timeDialog.setCancelable(true);
timeDialog.setCanceledOnTouchOutside(false);
timeDialog.setButton(DialogInterface.BUTTON_NEGATIVE, currentCtx.getString(android.R.string.cancel), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
callbackContext.success("cancel");
}
});
timeDialog.setOnKeyListener(new Dialog.OnKeyListener() {
@Override
public boolean onKey(DialogInterface dialog, int keyCode, KeyEvent event) {
// TODO Auto-generated method stub
//callbackContext.success("");
return false;
}
});
}
timeDialog.show();
}
};
} else if (ACTION_DATE.equalsIgnoreCase(action)) {
runnable = new Runnable() {
@Override
public void run() {
final DateSetListener dateSetListener = new DateSetListener(datePickerPlugin, callbackContext);
final DatePickerDialog dateDialog = new DatePickerDialog(currentCtx, dateSetListener, mYear,
mMonth, mDay);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
DatePicker dp = dateDialog.getDatePicker();
if(minDate > 0) {
dp.setMinDate(minDate);
}
if(maxDate > 0 && maxDate > minDate) {
dp.setMaxDate(maxDate);
}
dateDialog.setCancelable(true);
dateDialog.setCanceledOnTouchOutside(false);
dateDialog.setButton(DialogInterface.BUTTON_NEGATIVE, currentCtx.getString(android.R.string.cancel), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
callbackContext.success("cancel");
}
});
dateDialog.setOnKeyListener(new Dialog.OnKeyListener() {
@Override
public boolean onKey(DialogInterface dialog, int keyCode, KeyEvent event) {
// TODO Auto-generated method stub
//callbackContext.success("");
return false;
}
});
}
else {
java.lang.reflect.Field mDatePickerField = null;
try {
mDatePickerField = dateDialog.getClass().getDeclaredField("mDatePicker");
} catch (NoSuchFieldException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mDatePickerField.setAccessible(true);
DatePicker pickerView = null;
try {
pickerView = (DatePicker) mDatePickerField.get(dateDialog);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
final Calendar startDate = Calendar.getInstance();
startDate.setTimeInMillis(minDate);
final Calendar endDate = Calendar.getInstance();
endDate.setTimeInMillis(maxDate);
final int minYear = startDate.get(Calendar.YEAR);
final int minMonth = startDate.get(Calendar.MONTH);
final int minDay = startDate.get(Calendar.DAY_OF_MONTH);
final int maxYear = endDate.get(Calendar.YEAR);
final int maxMonth = endDate.get(Calendar.MONTH);
final int maxDay = endDate.get(Calendar.DAY_OF_MONTH);
if(startDate !=null || endDate != null) {
pickerView.init(mYear, mMonth, mDay, new OnDateChangedListener() {
@Override
public void onDateChanged(DatePicker view, int year, int month, int day) {
if(maxDate > 0 && maxDate > minDate) {
if(year > maxYear || month > maxMonth && year == maxYear || day > maxDay && year == maxYear && month == maxMonth){
view.updateDate(maxYear, maxMonth, maxDay);
}
}
if(minDate > 0) {
if(year < minYear || month < minMonth && year == minYear || day < minDay && year == minYear && month == minMonth) {
view.updateDate(minYear, minMonth, minDay);
}
}
}
});
}
}
dateDialog.show();
}
};
} else {
Log.d(pluginName, "Unknown action. Only 'date' or 'time' are valid actions");
return;
}
cordova.getActivity().runOnUiThread(runnable);
}
private final class DateSetListener implements OnDateSetListener {
private final DatePickerPlugin datePickerPlugin;
private final CallbackContext callbackContext;
private DateSetListener(DatePickerPlugin datePickerPlugin, CallbackContext callbackContext) {
this.datePickerPlugin = datePickerPlugin;
this.callbackContext = callbackContext;
}
/**
* Return a string containing the date in the format YYYY/MM/DD
*/
@Override
public void onDateSet(final DatePicker view, final int year, final int monthOfYear, final int dayOfMonth) {
String returnDate = year + "/" + (monthOfYear + 1) + "/" + dayOfMonth;
callbackContext.success(returnDate);
}
}
private final class TimeSetListener implements OnTimeSetListener {
private final DatePickerPlugin datePickerPlugin;
private final CallbackContext callbackContext;
private TimeSetListener(DatePickerPlugin datePickerPlugin, CallbackContext callbackContext) {
this.datePickerPlugin = datePickerPlugin;
this.callbackContext = callbackContext;
}
/**
* Return the current date with the time modified as it was set in the
* time picker.
*/
@Override
public void onTimeSet(final TimePicker view, final int hourOfDay, final int minute) {
Calendar calendar = Calendar.getInstance(TimeZone.getDefault());
calendar.set(Calendar.HOUR_OF_DAY, hourOfDay);
calendar.set(Calendar.MINUTE, minute);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
String toReturn = sdf.format(calendar.getTime());
callbackContext.success(toReturn);
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/environment.proto
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The request message for [Environments.ListEnvironments][google.cloud.dialogflow.v2beta1.Environments.ListEnvironments].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest}
*/
public final class ListEnvironmentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest)
ListEnvironmentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEnvironmentsRequest.newBuilder() to construct.
private ListEnvironmentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEnvironmentsRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEnvironmentsRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ListEnvironmentsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 16:
{
pageSize_ = input.readInt32();
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
pageToken_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEnvironmentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEnvironmentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.class,
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
private volatile java.lang.Object pageToken_;
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest other =
(com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for [Environments.ListEnvironments][google.cloud.dialogflow.v2beta1.Environments.ListEnvironments].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest)
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEnvironmentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEnvironmentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.class,
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEnvironmentsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest build() {
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest buildPartial() {
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest result =
new com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest(this);
result.parent_ = parent_;
result.pageSize_ = pageSize_;
result.pageToken_ = pageToken_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest other) {
if (other == com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all environments from.
* Format:
* - `projects/<Project Number / ID>/agent`
* - `projects/<Project Number / ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest)
private static final com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest();
}
public static com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEnvironmentsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEnvironmentsRequest>() {
@java.lang.Override
public ListEnvironmentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListEnvironmentsRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListEnvironmentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEnvironmentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEnvironmentsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.util.List;
import java.util.Map;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Condition;
import org.apache.jackrabbit.oak.plugins.document.cache.CacheInvalidationStats;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* The interface for the backend storage for documents.
* <p>
* In general atomicity of operations on a DocumentStore are limited to a single
* document. That is, an implementation does not have to guarantee atomicity of
* the entire effect of a method call. A method that fails with an exception may
* have modified just some documents and then abort. However, an implementation
* must not modify a document partially. Either the complete update operation is
* applied to a document or no modification is done at all.
* <p>
* The key is the id of a document. Keys are opaque strings. All characters are
* allowed. Leading and trailing whitespace is allowed. For keys, the maximum
* length is 512 bytes in the UTF-8 representation.
*/
public interface DocumentStore {
/**
* Get the document with the given {@code key}. This is a convenience method
* and equivalent to {@link #find(Collection, String, int)} with a
* {@code maxCacheAge} of {@code Integer.MAX_VALUE}.
* <p>
* The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @return the document, or null if not found
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
@Nullable
<T extends Document> T find(Collection<T> collection, String key)
throws DocumentStoreException;
/**
* Get the document with the {@code key}. The implementation may serve the
* document from a cache, but the cached document must not be older than
* the given {@code maxCacheAge} in milliseconds. An implementation must
* invalidate a cached document when it detects it is outdated. That is, a
* subsequent call to {@link #find(Collection, String)} must return the
* newer version of the document.
* <p>
* The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @param maxCacheAge the maximum age of the cached document (in ms)
* @return the document, or null if not found
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
@Nullable
<T extends Document> T find(Collection<T> collection, String key, int maxCacheAge)
throws DocumentStoreException;
/**
* Get a list of documents where the key is greater than a start value and
* less than an end value.
* <p>
* The returned documents are sorted by key and are immutable.
*
* @param <T> the document type
* @param collection the collection
* @param fromKey the start value (excluding)
* @param toKey the end value (excluding)
* @param limit the maximum number of entries to return (starting with the lowest key)
* @return the list (possibly empty)
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
@NotNull
<T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
int limit) throws DocumentStoreException;
/**
* Get a list of documents where the key is greater than a start value and
* less than an end value <em>and</em> the given "indexed property" is greater
* or equals the specified value.
* <p>
* The indexed property can either be a {@link Long} value, in which case numeric
* comparison applies, or a {@link Boolean} value, in which case "false" is mapped
* to "0" and "true" is mapped to "1".
* <p>
* The returned documents are sorted by key and are immutable.
*
* @param <T> the document type
* @param collection the collection
* @param fromKey the start value (excluding)
* @param toKey the end value (excluding)
* @param indexedProperty the name of the indexed property (optional)
* @param startValue the minimum value of the indexed property
* @param limit the maximum number of entries to return
* @return the list (possibly empty)
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
@NotNull
<T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
String indexedProperty,
long startValue,
int limit) throws DocumentStoreException;
/**
* Remove a document. This method does nothing if there is no document
* with the given key.
* <p>
* In case of a {@code DocumentStoreException}, the document with the given
* key may or may not have been removed from the store. It is the
* responsibility of the caller to check whether it still exists. The
* implementation however ensures that the result of the operation is
* properly reflected in the document cache. That is, an implementation
* could simply evict the document with the given key.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
<T extends Document> void remove(Collection<T> collection, String key)
throws DocumentStoreException;
/**
* Batch remove documents with given keys. Keys for documents that do not
* exist are simply ignored. If this method fails with an exception, then
* only some of the documents identified by {@code keys} may have been
* removed.
* <p>
* In case of a {@code DocumentStoreException}, the documents with the given
* keys may or may not have been removed from the store. It may also be
* possible that only some have been removed from the store. It is the
* responsibility of the caller to check which documents still exist. The
* implementation however ensures that the result of the operation is
* properly reflected in the document cache. That is, an implementation
* could simply evict documents with the given keys from the cache.
*
* @param <T> the document type
* @param collection the collection
* @param keys list of keys
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
<T extends Document> void remove(Collection<T> collection, List<String> keys)
throws DocumentStoreException;
/**
* Batch remove documents with given keys and corresponding equal conditions
* on {@link NodeDocument#MODIFIED_IN_SECS} values. Keys for documents that
* do not exist are simply ignored. A document is only removed if the
* corresponding condition is met.
* <p>
* In case of a {@code DocumentStoreException}, the documents with the given
* keys may or may not have been removed from the store. It may also be
* possible that only some have been removed from the store. It is the
* responsibility of the caller to check which documents still exist. The
* implementation however ensures that the result of the operation is
* properly reflected in the document cache. That is, an implementation
* could simply evict documents with the given keys from the cache.
*
* @param <T>
* the document type
* @param collection
* the collection.
* @param toRemove
* the keys of the documents to remove with the corresponding
* timestamps.
* @return the number of removed documents.
* @throws DocumentStoreException
* if the operation failed. E.g. because of an I/O error.
*/
<T extends Document> int remove(Collection<T> collection, Map<String, Long> toRemove)
throws DocumentStoreException;
/**
* Batch remove documents where the given "indexed property" is within the given
* range (exclusive) - {@code (startValue, endValue)}.
* <p>
* The indexed property is a {@link Long} value and numeric comparison applies.
* <p>
* In case of a {@code DocumentStoreException}, the documents with the given
* keys may or may not have been removed from the store. It may also be
* possible that only some have been removed from the store. It is the
* responsibility of the caller to check which documents still exist. The
* implementation however ensures that the result of the operation is
* properly reflected in the document cache. That is, an implementation
* could simply evict documents with the given keys from the cache.
*
* @param <T> the document type
* @param collection the collection.
* @param indexedProperty the name of the indexed property
* @param startValue the minimum value of the indexed property (exclusive)
* @param endValue the maximum value of the indexed property (exclusive)
* @return the number of removed documents.
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
<T extends Document> int remove(Collection<T> collection,
String indexedProperty, long startValue, long endValue)
throws DocumentStoreException;
/**
* Try to create a list of documents. This method returns {@code true} iff
* none of the documents existed before and the create was successful. This
* method will return {@code false} if one of the documents already exists
* in the store. Some documents may still have been created in the store.
* An implementation does not have to guarantee an atomic create of all the
* documents described in the {@code updateOps}. It is the responsibility of
* the caller to check, which documents were created and take appropriate
* action. The same is true when this method throws
* {@code DocumentStoreException} (e.g. when a communication error occurs).
* In this case only some documents may have been created.
*
* @param <T> the document type
* @param collection the collection
* @param updateOps the list of documents to add (where {@link Condition}s are not allowed)
* @return true if this worked (if none of the documents already existed)
* @throws IllegalArgumentException when at least one of the {@linkplain UpdateOp}s is conditional
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
<T extends Document> boolean create(Collection<T> collection,
List<UpdateOp> updateOps)
throws IllegalArgumentException, DocumentStoreException;
/**
* Atomically checks if the document exists and updates it, otherwise the
* document is created (aka "upsert"), unless the update operation requires
* the document to be present (see {@link UpdateOp#isNew()}). The returned
* document is immutable.
* <p>
* If this method fails with a {@code DocumentStoreException}, then the
* document may or may not have been created or updated. It is the
* responsibility of the caller to check the result e.g. by calling
* {@link #find(Collection, String)}. The implementation however ensures
* that the result of the operation is properly reflected in the document
* cache. That is, an implementation could simply evict documents with the
* given keys from the cache.
*
* @param <T>
* the document type
* @param collection
* the collection
* @param update
* the update operation (where {@link Condition}s are not
* allowed)
* @return the old document or {@code null} if it either didn't exist
* before, or the {@linkplain UpdateOp} required the document to be
* present but {@link UpdateOp#isNew()} was {@code false}.
* @throws IllegalArgumentException
* when the {@linkplain UpdateOp} is conditional
* @throws DocumentStoreException
* if the operation failed. E.g. because of an I/O error.
*/
@Nullable
<T extends Document> T createOrUpdate(Collection<T> collection,
UpdateOp update)
throws IllegalArgumentException, DocumentStoreException;
/**
* Create or unconditionally update a number of documents. An implementation
* does not have to guarantee that all changes are applied atomically,
* together.
* <p>
* In case of a {@code DocumentStoreException} (e.g. when a communication
* error occurs) only some changes may have been applied. In this case it is
* the responsibility of the caller to check which {@linkplain UpdateOp}s
* were applied and take appropriate action. The implementation however
* ensures that the result of the operations are properly reflected in the
* document cache. That is, an implementation could simply evict documents
* related to the given update operations from the cache.
*
* @param <T> the document type
* @param collection the collection
* @param updateOps the update operation list
* @return the list containing old documents or <code>null</code> values if they didn't exist
* before (see {@linkplain #createOrUpdate(Collection, UpdateOp)}), where the order
* reflects the order in the "updateOps" parameter
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
<T extends Document> List<T> createOrUpdate(Collection<T> collection,
List<UpdateOp> updateOps)
throws DocumentStoreException;
/**
* Performs a conditional update (e.g. using
* {@link UpdateOp.Condition.Type#EXISTS} and only updates the
* document if the condition is <code>true</code>. The returned document is
* immutable.
* <p>
* In case of a {@code DocumentStoreException} (e.g. when a communication
* error occurs) the update may or may not have been applied. In this case
* it is the responsibility of the caller to check whether the update was
* applied and take appropriate action. The implementation however ensures
* that the result of the operation is properly reflected in the document
* cache. That is, an implementation could simply evict the document related
* to the given update operation from the cache.
*
* @param <T> the document type
* @param collection the collection
* @param update the update operation with the condition
* @return the old document or <code>null</code> if the condition is not met or
* if the document wasn't found
* @throws DocumentStoreException if the operation failed. E.g. because of
* an I/O error.
*/
@Nullable
<T extends Document> T findAndUpdate(Collection<T> collection,
UpdateOp update)
throws DocumentStoreException;
/**
* Invalidate the document cache. Calling this method instructs the
* implementation to invalidate each document from the cache, which is not
* up to date with the underlying storage at the time this method is called.
* A document is considered in the cache if {@link #getIfCached(Collection, String)}
* returns a non-null value for a key.
* <p>
* An implementation is allowed to perform lazy invalidation and only check
* whether a document is up-to-date when it is accessed after this method
* is called. However, this also includes a call to {@link #getIfCached(Collection, String)},
* which must only return the document if it was up-to-date at the time
* this method was called. Similarly, a call to {@link #find(Collection, String)}
* must guarantee the returned document reflects all the changes done up to
* when {@code invalidateCache()} was called.
* <p>
* In some implementations this method can be a NOP because documents can
* only be modified through a single instance of a {@code DocumentStore}.
*
* @return cache invalidation statistics or {@code null} if none are
* available.
*/
@Nullable
CacheInvalidationStats invalidateCache();
/**
* Invalidate the document cache but only with entries that match one
* of the keys provided.
*
* See {@link #invalidateCache()} for the general contract of cache
* invalidation.
*
* @param keys the keys of the documents to invalidate.
* @return cache invalidation statistics or {@code null} if none are
* available.
*/
@Nullable
CacheInvalidationStats invalidateCache(Iterable<String> keys);
/**
* Invalidate the document cache for the given key.
*
* See {@link #invalidateCache()} for the general contract of cache
* invalidation.
*
* @param collection the collection
* @param key the key
*/
<T extends Document> void invalidateCache(Collection<T> collection, String key);
/**
* Dispose this instance.
*/
void dispose();
/**
* Fetches the cached document. If the document is not present in the cache
* {@code null} will be returned. This method is consistent with other find
* methods that may return cached documents and will return {@code null}
* even when the implementation has a negative cache for documents that
* do not exist. This method will never return {@link NodeDocument#NULL}.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @return cached document if present. Otherwise {@code null}.
*/
@Nullable
<T extends Document> T getIfCached(Collection<T> collection, String key);
/**
* Set the level of guarantee for read and write operations, if supported by this backend.
*
* @param readWriteMode the read/write mode
*/
void setReadWriteMode(String readWriteMode);
/**
* @return status information about the cache
*/
@Nullable
Iterable<CacheStats> getCacheStats();
/**
* @return description of the underlying storage.
*/
Map<String, String> getMetadata();
/**
* Returns statistics about the underlying storage. The information and
* keys returned by this method are implementation specific, may change
* between releases or may even depend on deployment aspects. E.g. depending
* on access rights, the method may return more or less information from
* the underlying store. This method should only be used for informational
* or debug purposes.
*
* @return statistics about this document store.
*/
@NotNull
Map<String, String> getStats();
/**
* @return the estimated time difference in milliseconds between the local
* instance and the (typically common, shared) document server system. The
* value can be zero if the times are estimated to be equal, positive when
* the local instance is ahead of the remote server and negative when the
* local instance is behind the remote server. An invocation is not cached
* and typically requires a round-trip to the server (but that is not a
* requirement).
* @throws UnsupportedOperationException if this DocumentStore does not
* support this method
* @throws DocumentStoreException if an I/O error occurs.
*/
long determineServerTimeDifferenceMillis()
throws UnsupportedOperationException, DocumentStoreException;
}
| |
package com.orientechnologies.orient.core.storage.impl.local.paginated;
import com.orientechnologies.orient.core.config.OContextConfiguration;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.config.OStorageClusterConfiguration;
import com.orientechnologies.orient.core.config.OStorageConfiguration;
import com.orientechnologies.orient.core.config.OStorageSegmentConfiguration;
import com.orientechnologies.orient.core.db.record.OCurrentStorageComponentsFactory;
import com.orientechnologies.orient.core.storage.cache.local.O2QCache;
import com.orientechnologies.orient.core.storage.cache.OCacheEntry;
import com.orientechnologies.orient.core.storage.cache.OReadCache;
import com.orientechnologies.orient.core.storage.cache.local.OWOWCache;
import com.orientechnologies.orient.core.storage.cache.OWriteCache;
import com.orientechnologies.orient.core.storage.fs.OFileClassic;
import com.orientechnologies.orient.core.storage.impl.local.OStorageVariableParser;
import com.orientechnologies.orient.core.storage.impl.local.paginated.atomicoperations.OAtomicOperationsManager;
import com.orientechnologies.orient.core.storage.impl.local.paginated.base.ODurablePage;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OAtomicUnitEndRecord;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OAtomicUnitStartRecord;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.ODiskWriteAheadLog;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OFileCreatedWALRecord;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OLogSequenceNumber;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.ONonTxOperationPerformedWALRecord;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OUpdatePageRecord;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OWALPage;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OWALRecord;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* @author Andrey Lomakin
* @since 5/8/13
*/
@Test
public class LocalPaginatedClusterWithWAL extends LocalPaginatedClusterTest {
{
OGlobalConfiguration.WAL_FUZZY_CHECKPOINT_INTERVAL.setValue(1000000000);
}
private ODiskWriteAheadLog writeAheadLog;
private OPaginatedCluster testCluster;
private OReadCache testReadCache;
private OWriteCache testWriteCache;
private OLocalPaginatedStorage testStorage;
private String storageDir;
private String testStorageDir;
private OLocalPaginatedStorage storage;
@BeforeMethod
@Override
public void beforeMethod() throws IOException {
buildDirectory = System.getProperty("buildDirectory", ".");
buildDirectory += "/localPaginatedClusterWithWALTest";
createPaginatedCluster();
createTestPaginatedCluster();
}
private void createPaginatedCluster() throws IOException {
storage = mock(OLocalPaginatedStorage.class);
OStorageConfiguration storageConfiguration = mock(OStorageConfiguration.class);
when(storageConfiguration.getContextConfiguration()).thenReturn(new OContextConfiguration());
storageConfiguration.clusters = new ArrayList<OStorageClusterConfiguration>();
storageConfiguration.fileTemplate = new OStorageSegmentConfiguration();
storageConfiguration.binaryFormatVersion = Integer.MAX_VALUE;
storageDir = buildDirectory + "/localPaginatedClusterWithWALTestOne";
when(storage.getStoragePath()).thenReturn(storageDir);
when(storage.getName()).thenReturn("localPaginatedClusterWithWALTestOne");
when(storage.getComponentsFactory()).thenReturn(new OCurrentStorageComponentsFactory(storageConfiguration));
when(storage.getVariableParser()).thenReturn(new OStorageVariableParser(storageDir));
File buildDir = new File(buildDirectory);
if (!buildDir.exists())
buildDir.mkdirs();
File storageDirOneFile = new File(storageDir);
if (!storageDirOneFile.exists())
storageDirOneFile.mkdirs();
writeAheadLog = new ODiskWriteAheadLog(6000, -1, 10 * 1024L * OWALPage.PAGE_SIZE, storage);
writeCache = new OWOWCache(false, OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, 1000000, writeAheadLog,
100, 1648L * 1024 * 1024, 2 * 1648L * 1024 * 1024, storage, false, 1);
readCache = new O2QCache(1648L * 1024 * 1024, OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, false, 20);
when(storage.getReadCache()).thenReturn(readCache);
when(storage.getWriteCache()).thenReturn(writeCache);
when(storage.getStorageTransaction()).thenReturn(null);
when(storage.getWALInstance()).thenReturn(writeAheadLog);
atomicOperationsManager = new OAtomicOperationsManager(storage);
when(storage.getAtomicOperationsManager()).thenReturn(atomicOperationsManager);
when(storage.getConfiguration()).thenReturn(storageConfiguration);
when(storage.getMode()).thenReturn("rw");
when(storageConfiguration.getDirectory()).thenReturn(storageDir);
paginatedCluster = new OPaginatedCluster("testPaginatedClusterWithWALTest", storage);
paginatedCluster.configure(storage, 6, "testPaginatedClusterWithWALTest", buildDirectory, -1);
paginatedCluster.create(-1);
}
private void createTestPaginatedCluster() throws IOException {
testStorage = mock(OLocalPaginatedStorage.class);
OStorageConfiguration storageConfiguration = mock(OStorageConfiguration.class);
storageConfiguration.clusters = new ArrayList<OStorageClusterConfiguration>();
storageConfiguration.fileTemplate = new OStorageSegmentConfiguration();
storageConfiguration.binaryFormatVersion = Integer.MAX_VALUE;
when(storageConfiguration.getContextConfiguration()).thenReturn(new OContextConfiguration());
testStorageDir = buildDirectory + "/localPaginatedClusterWithWALTestTwo";
when(testStorage.getStoragePath()).thenReturn(testStorageDir);
when(testStorage.getComponentsFactory()).thenReturn(new OCurrentStorageComponentsFactory(storageConfiguration));
when(testStorage.getName()).thenReturn("localPaginatedClusterWithWALTestTwo");
when(testStorage.getVariableParser()).thenReturn(new OStorageVariableParser(testStorageDir));
File buildDir = new File(buildDirectory);
if (!buildDir.exists())
buildDir.mkdirs();
File storageDirTwoFile = new File(testStorageDir);
if (!storageDirTwoFile.exists())
storageDirTwoFile.mkdirs();
testWriteCache = new OWOWCache(false, OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, 1000000,
writeAheadLog, 100, 1648L * 1024 * 1024, 1648L * 1024 * 1024 + 400L * 1024 * 1024 * 1024, testStorage, false, 1);
testReadCache = new O2QCache(400L * 1024 * 1024 * 1024, OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024,
false, 20);
OStorageVariableParser variableParser = new OStorageVariableParser(testStorageDir);
final OAtomicOperationsManager testAtomicOperationsManager = new OAtomicOperationsManager(testStorage);
when(testStorage.getReadCache()).thenReturn(testReadCache);
when(testStorage.getWriteCache()).thenReturn(testWriteCache);
when(testStorage.getWALInstance()).thenReturn(null);
when(testStorage.getStorageTransaction()).thenReturn(null);
when(testStorage.getAtomicOperationsManager()).thenReturn(testAtomicOperationsManager);
when(testStorage.getVariableParser()).thenReturn(variableParser);
when(testStorage.getConfiguration()).thenReturn(storageConfiguration);
when(testStorage.getMode()).thenReturn("rw");
when(storageConfiguration.getDirectory()).thenReturn(testStorageDir);
testCluster = new OPaginatedCluster("testPaginatedClusterWithWALTest", testStorage);
testCluster.configure(testStorage, 6, "testPaginatedClusterWithWALTest", buildDirectory, -1);
testCluster.create(-1);
}
@AfterMethod
public void afterMethod() throws IOException {
Assert.assertNull(atomicOperationsManager.getCurrentOperation());
writeAheadLog.delete();
readCache.deleteStorage(writeCache);
testCluster.delete();
testReadCache.deleteStorage(testWriteCache);
File file = new File(storageDir);
Assert.assertTrue(file.delete());
file = new File(testStorageDir);
Assert.assertTrue(file.delete());
file = new File(buildDirectory);
Assert.assertTrue(file.delete());
}
@BeforeClass
@Override
public void beforeClass() throws IOException {
System.out.println("Start LocalPaginatedClusterWithWALTest");
}
@AfterClass
@Override
public void afterClass() throws IOException {
System.out.println("End LocalPaginatedClusterWithWALTest");
}
@Override
public void testAddOneSmallRecord() throws IOException {
super.testAddOneSmallRecord();
assertFileRestoreFromWAL();
}
@Override
public void testAddOneBigRecord() throws IOException {
super.testAddOneBigRecord();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateOneSmallRecord() throws IOException {
super.testUpdateOneSmallRecord();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateOneBigRecord() throws IOException {
super.testUpdateOneBigRecord();
assertFileRestoreFromWAL();
}
@Override
public void testDeleteRecordAndAddNewOnItsPlace() throws IOException {
super.testDeleteRecordAndAddNewOnItsPlace();
assertFileRestoreFromWAL();
}
@Override
public void testAddManySmallRecords() throws IOException {
super.testAddManySmallRecords();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateOneSmallRecordVersionIsLowerCurrentOne() throws IOException {
super.testUpdateOneSmallRecordVersionIsLowerCurrentOne();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateOneSmallRecordVersionIsMinusTwo() throws IOException {
super.testUpdateOneSmallRecordVersionIsMinusTwo();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateManySmallRecords() throws IOException {
super.testUpdateManySmallRecords();
assertFileRestoreFromWAL();
}
@Override
public void testAddManyRecords() throws IOException {
super.testAddManyRecords();
assertFileRestoreFromWAL();
}
@Override
public void testAddManyBigRecords() throws IOException {
super.testAddManyBigRecords();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateManyRecords() throws IOException {
super.testUpdateManyRecords();
assertFileRestoreFromWAL();
}
@Override
public void testUpdateManyBigRecords() throws IOException {
super.testUpdateManyBigRecords();
assertFileRestoreFromWAL();
}
@Override
public void testRemoveHalfSmallRecords() throws IOException {
super.testRemoveHalfSmallRecords();
assertFileRestoreFromWAL();
}
@Override
public void testRemoveHalfRecords() throws IOException {
super.testRemoveHalfRecords();
assertFileRestoreFromWAL();
}
@Override
public void testRemoveHalfBigRecords() throws IOException {
super.testRemoveHalfBigRecords();
assertFileRestoreFromWAL();
}
@Override
public void testRemoveHalfRecordsAndAddAnotherHalfAgain() throws IOException {
super.testRemoveHalfRecordsAndAddAnotherHalfAgain();
assertFileRestoreFromWAL();
}
@Override
public void testHideHalfSmallRecords() throws IOException {
super.testHideHalfSmallRecords();
assertFileRestoreFromWAL();
}
@Override
public void testHideHalfBigRecords() throws IOException {
super.testHideHalfBigRecords();
assertFileRestoreFromWAL();
}
@Override
public void testHideHalfRecords() throws IOException {
super.testHideHalfRecords();
assertFileRestoreFromWAL();
}
@Override
public void testHideHalfRecordsAndAddAnotherHalfAgain() throws IOException {
super.testHideHalfRecordsAndAddAnotherHalfAgain();
assertFileRestoreFromWAL();
}
@Override
@Test(enabled = false)
public void testForwardIteration() throws IOException {
super.testForwardIteration();
}
@Override
@Test(enabled = false)
public void testBackwardIteration() throws IOException {
super.testBackwardIteration();
}
@Override
@Test(enabled = false)
public void testGetPhysicalPosition() throws IOException {
super.testGetPhysicalPosition();
}
@Override
@Test(enabled = false)
public void testRecordGrowFactor() throws Exception {
super.testRecordGrowFactor();
}
@Override
@Test(enabled = false)
public void testRecordOverflowGrowFactor() throws Exception {
super.testRecordOverflowGrowFactor();
}
private void assertFileRestoreFromWAL() throws IOException {
paginatedCluster.close();
writeAheadLog.close();
readCache.clear();
restoreClusterFromWAL();
testCluster.close();
assertClusterContentIsTheSame(testCluster.getName(), paginatedCluster.getName());
testCluster.open();
paginatedCluster.open();
}
private void restoreClusterFromWAL() throws IOException {
ODiskWriteAheadLog log = new ODiskWriteAheadLog(4, -1, 10 * 1024L * OWALPage.PAGE_SIZE, storage);
OLogSequenceNumber lsn = log.begin();
List<OWALRecord> atomicUnit = new ArrayList<OWALRecord>();
boolean atomicChangeIsProcessed = false;
while (lsn != null) {
OWALRecord walRecord = log.read(lsn);
atomicUnit.add(walRecord);
if (!atomicChangeIsProcessed) {
Assert.assertTrue(walRecord instanceof OAtomicUnitStartRecord);
atomicChangeIsProcessed = true;
} else if (walRecord instanceof OAtomicUnitEndRecord) {
atomicChangeIsProcessed = false;
for (OWALRecord restoreRecord : atomicUnit) {
if (restoreRecord instanceof OAtomicUnitStartRecord || restoreRecord instanceof OAtomicUnitEndRecord
|| restoreRecord instanceof OFileCreatedWALRecord || restoreRecord instanceof ONonTxOperationPerformedWALRecord)
continue;
final OUpdatePageRecord updatePageRecord = (OUpdatePageRecord) restoreRecord;
final long fileId = updatePageRecord.getFileId();
final long pageIndex = updatePageRecord.getPageIndex();
if (!testWriteCache.isOpen(fileId))
testReadCache.openFile(fileId, testWriteCache);
OCacheEntry cacheEntry = testReadCache.load(fileId, pageIndex, true, testWriteCache);
if (cacheEntry == null) {
do {
if (cacheEntry != null)
readCache.release(cacheEntry, testWriteCache);
cacheEntry = testReadCache.allocateNewPage(fileId, testWriteCache);
} while (cacheEntry.getPageIndex() != pageIndex);
}
cacheEntry.acquireExclusiveLock();
try {
ODurablePage durablePage = new ODurablePage(cacheEntry, null);
durablePage.restoreChanges(updatePageRecord.getChanges());
durablePage.setLsn(updatePageRecord.getLsn());
cacheEntry.markDirty();
} finally {
cacheEntry.releaseExclusiveLock();
testReadCache.release(cacheEntry, testWriteCache);
}
}
atomicUnit.clear();
} else {
Assert.assertTrue(walRecord instanceof OUpdatePageRecord || walRecord instanceof OFileCreatedWALRecord
|| walRecord instanceof ONonTxOperationPerformedWALRecord);
}
lsn = log.next(lsn);
}
Assert.assertTrue(atomicUnit.isEmpty());
log.close();
}
private void assertClusterContentIsTheSame(String expectedCluster, String actualCluster) throws IOException {
File expectedDataFile = new File(testStorageDir, expectedCluster + ".pcl");
RandomAccessFile datFileOne = new RandomAccessFile(expectedDataFile, "r");
RandomAccessFile datFileTwo = new RandomAccessFile(new File(storageDir, actualCluster + ".pcl"), "r");
assertFileContentIsTheSame(datFileOne, datFileTwo);
datFileOne.close();
datFileTwo.close();
File expectedRIDMapFile = new File(testStorageDir, expectedCluster + ".cpm");
RandomAccessFile ridMapOne = new RandomAccessFile(expectedRIDMapFile, "r");
RandomAccessFile ridMapTwo = new RandomAccessFile(new File(storageDir, actualCluster + ".cpm"), "r");
assertFileContentIsTheSame(ridMapOne, ridMapTwo);
ridMapOne.close();
ridMapTwo.close();
}
private void assertFileContentIsTheSame(RandomAccessFile datFileOne, RandomAccessFile datFileTwo) throws IOException {
Assert.assertEquals(datFileOne.length(), datFileTwo.length());
byte[] expectedContent = new byte[OClusterPage.PAGE_SIZE];
byte[] actualContent = new byte[OClusterPage.PAGE_SIZE];
datFileOne.seek(OFileClassic.HEADER_SIZE);
datFileTwo.seek(OFileClassic.HEADER_SIZE);
int bytesRead = datFileOne.read(expectedContent);
while (bytesRead >= 0) {
datFileTwo.readFully(actualContent, 0, bytesRead);
Assert.assertEquals(expectedContent, actualContent);
expectedContent = new byte[OClusterPage.PAGE_SIZE];
actualContent = new byte[OClusterPage.PAGE_SIZE];
bytesRead = datFileOne.read(expectedContent);
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.python;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.file.WriteFile;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.model.Pair;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.ImplicitDepsInferringDescription;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.args.MacroArg;
import com.facebook.buck.rules.macros.LocationMacroExpander;
import com.facebook.buck.rules.macros.MacroHandler;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.OptionalCompat;
import com.facebook.buck.versions.Version;
import com.facebook.buck.versions.VersionRoot;
import com.facebook.infer.annotation.SuppressFieldNotInitialized;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Maps;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
public class PythonTestDescription implements
Description<PythonTestDescription.Arg>,
ImplicitDepsInferringDescription<PythonTestDescription.Arg>,
VersionRoot<PythonTestDescription.Arg> {
private static final Flavor BINARY_FLAVOR = InternalFlavor.of("binary");
private static final MacroHandler MACRO_HANDLER =
new MacroHandler(
ImmutableMap.of(
"location", new LocationMacroExpander()));
private final PythonBinaryDescription binaryDescription;
private final PythonBuckConfig pythonBuckConfig;
private final FlavorDomain<PythonPlatform> pythonPlatforms;
private final CxxBuckConfig cxxBuckConfig;
private final CxxPlatform defaultCxxPlatform;
private final Optional<Long> defaultTestRuleTimeoutMs;
private final FlavorDomain<CxxPlatform> cxxPlatforms;
public PythonTestDescription(
PythonBinaryDescription binaryDescription,
PythonBuckConfig pythonBuckConfig,
FlavorDomain<PythonPlatform> pythonPlatforms,
CxxBuckConfig cxxBuckConfig,
CxxPlatform defaultCxxPlatform,
Optional<Long> defaultTestRuleTimeoutMs,
FlavorDomain<CxxPlatform> cxxPlatforms) {
this.binaryDescription = binaryDescription;
this.pythonBuckConfig = pythonBuckConfig;
this.pythonPlatforms = pythonPlatforms;
this.cxxBuckConfig = cxxBuckConfig;
this.defaultCxxPlatform = defaultCxxPlatform;
this.defaultTestRuleTimeoutMs = defaultTestRuleTimeoutMs;
this.cxxPlatforms = cxxPlatforms;
}
@Override
public Arg createUnpopulatedConstructorArg() {
return new Arg();
}
@VisibleForTesting
protected static Path getTestMainName() {
return Paths.get("__test_main__.py");
}
@VisibleForTesting
protected static Path getTestModulesListName() {
return Paths.get("__test_modules__.py");
}
@VisibleForTesting
protected static Path getTestModulesListPath(
BuildTarget buildTarget,
ProjectFilesystem filesystem) {
return BuildTargets.getGenPath(filesystem, buildTarget, "%s").resolve(getTestModulesListName());
}
/**
* Create the contents of a python source file that just contains a list of
* the given test modules.
*/
private static String getTestModulesListContents(ImmutableSet<String> modules) {
String contents = "TEST_MODULES = [\n";
for (String module : modules) {
contents += String.format(" \"%s\",\n", module);
}
contents += "]";
return contents;
}
/**
* Return a {@link BuildRule} that constructs the source file which contains the list
* of test modules this python test rule will run. Setting up a separate build rule
* for this allows us to use the existing python binary rule without changes to account
* for the build-time creation of this file.
*/
private static BuildRule createTestModulesSourceBuildRule(
BuildRuleParams params,
Path outputPath,
ImmutableSet<String> testModules) {
// Modify the build rule params to change the target, type, and remove all deps.
params.getBuildTarget().checkUnflavored();
BuildRuleParams newParams = params
.withAppendedFlavor(InternalFlavor.of("test_module"))
.copyReplacingDeclaredAndExtraDeps(
Suppliers.ofInstance(ImmutableSortedSet.of()),
Suppliers.ofInstance(ImmutableSortedSet.of()));
String contents = getTestModulesListContents(testModules);
return new WriteFile(
newParams,
contents,
outputPath,
/* executable */ false);
}
@Override
public <A extends Arg> PythonTest createBuildRule(
TargetGraph targetGraph,
final BuildRuleParams params,
final BuildRuleResolver resolver,
CellPathResolver cellRoots,
final A args) throws HumanReadableException, NoSuchBuildTargetException {
PythonPlatform pythonPlatform =
pythonPlatforms.getValue(params.getBuildTarget()).orElse(
pythonPlatforms.getValue(
args.platform.<Flavor>map(InternalFlavor::of).orElse(
pythonPlatforms.getFlavors().iterator().next())));
CxxPlatform cxxPlatform = cxxPlatforms.getValue(params.getBuildTarget()).orElse(
defaultCxxPlatform);
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
Path baseModule = PythonUtil.getBasePath(params.getBuildTarget(), args.baseModule);
Optional<ImmutableMap<BuildTarget, Version>> selectedVersions =
targetGraph.get(params.getBuildTarget()).getSelectedVersions();
ImmutableMap<Path, SourcePath> srcs =
PythonUtil.getModules(
params.getBuildTarget(),
resolver,
ruleFinder,
pathResolver,
pythonPlatform,
cxxPlatform,
"srcs",
baseModule,
args.srcs,
args.platformSrcs,
args.versionedSrcs,
selectedVersions);
ImmutableMap<Path, SourcePath> resources =
PythonUtil.getModules(
params.getBuildTarget(),
resolver,
ruleFinder,
pathResolver,
pythonPlatform,
cxxPlatform,
"resources",
baseModule,
args.resources,
args.platformResources,
args.versionedResources,
selectedVersions);
// Convert the passed in module paths into test module names.
ImmutableSet.Builder<String> testModulesBuilder = ImmutableSet.builder();
for (Path name : srcs.keySet()) {
testModulesBuilder.add(
PythonUtil.toModuleName(params.getBuildTarget(), name.toString()));
}
ImmutableSet<String> testModules = testModulesBuilder.build();
// Construct a build rule to generate the test modules list source file and
// add it to the build.
BuildRule testModulesBuildRule = createTestModulesSourceBuildRule(
params,
getTestModulesListPath(params.getBuildTarget(), params.getProjectFilesystem()),
testModules);
resolver.addToIndex(testModulesBuildRule);
String mainModule;
if (args.mainModule.isPresent()) {
mainModule = args.mainModule.get();
} else {
mainModule = PythonUtil.toModuleName(params.getBuildTarget(), getTestMainName().toString());
}
// Build up the list of everything going into the python test.
PythonPackageComponents testComponents = PythonPackageComponents.of(
ImmutableMap
.<Path, SourcePath>builder()
.put(
getTestModulesListName(),
testModulesBuildRule.getSourcePathToOutput())
.put(
getTestMainName(),
pythonBuckConfig.getPathToTestMain(params.getProjectFilesystem()))
.putAll(srcs)
.build(),
resources,
ImmutableMap.of(),
ImmutableSet.of(),
args.zipSafe);
PythonPackageComponents allComponents =
PythonUtil.getAllComponents(
params,
resolver,
ruleFinder,
testComponents,
pythonPlatform,
cxxBuckConfig,
cxxPlatform,
args.linkerFlags.stream()
.map(MacroArg.toMacroArgFunction(
PythonUtil.MACRO_HANDLER,
params.getBuildTarget(),
cellRoots,
resolver)::apply)
.collect(MoreCollectors.toImmutableList()),
pythonBuckConfig.getNativeLinkStrategy(),
args.preloadDeps);
// Build the PEX using a python binary rule with the minimum dependencies.
params.getBuildTarget().checkUnflavored();
PythonBinary binary =
binaryDescription.createPackageRule(
params.withAppendedFlavor(BINARY_FLAVOR),
resolver,
ruleFinder,
pythonPlatform,
cxxPlatform,
mainModule,
args.extension,
allComponents,
args.buildArgs,
args.packageStyle.orElse(pythonBuckConfig.getPackageStyle()),
PythonUtil.getPreloadNames(
resolver,
cxxPlatform,
args.preloadDeps));
resolver.addToIndex(binary);
ImmutableList.Builder<Pair<Float, ImmutableSet<Path>>> neededCoverageBuilder =
ImmutableList.builder();
for (NeededCoverageSpec coverageSpec : args.neededCoverage) {
BuildRule buildRule = resolver.getRule(coverageSpec.getBuildTarget());
if (params.getDeps().contains(buildRule) &&
buildRule instanceof PythonLibrary) {
PythonLibrary pythonLibrary = (PythonLibrary) buildRule;
ImmutableSortedSet<Path> paths;
if (coverageSpec.getPathName().isPresent()) {
Path path = coverageSpec.getBuildTarget().getBasePath().resolve(
coverageSpec.getPathName().get());
if (!pythonLibrary.getPythonPackageComponents(pythonPlatform, cxxPlatform)
.getModules().keySet().contains(path)) {
throw new HumanReadableException(
"%s: path %s specified in needed_coverage not found in target %s",
params.getBuildTarget(),
path,
buildRule.getBuildTarget());
}
paths = ImmutableSortedSet.of(path);
} else {
paths =
ImmutableSortedSet.copyOf(
pythonLibrary.getPythonPackageComponents(pythonPlatform, cxxPlatform)
.getModules()
.keySet());
}
neededCoverageBuilder.add(
new Pair<Float, ImmutableSet<Path>>(
coverageSpec.getNeededCoverageRatio(),
paths));
} else {
throw new HumanReadableException(
"%s: needed_coverage requires a python library dependency. Found %s instead",
params.getBuildTarget(), buildRule);
}
}
Supplier<ImmutableMap<String, String>> testEnv =
() -> ImmutableMap.copyOf(
Maps.transformValues(
args.env,
MACRO_HANDLER.getExpander(
params.getBuildTarget(),
cellRoots,
resolver)));
// Generate and return the python test rule, which depends on the python binary rule above.
return PythonTest.from(
params,
ruleFinder,
testEnv,
binary,
args.labels,
neededCoverageBuilder.build(),
args.testRuleTimeoutMs.map(Optional::of).orElse(defaultTestRuleTimeoutMs),
args.contacts);
}
@Override
public Iterable<BuildTarget> findDepsForTargetFromConstructorArgs(
BuildTarget buildTarget,
CellPathResolver cellRoots,
Arg constructorArg) {
ImmutableList.Builder<BuildTarget> targets = ImmutableList.builder();
// We need to use the C/C++ linker for native libs handling, so add in the C/C++ linker to
// parse time deps.
targets.addAll(
cxxPlatforms.getValue(buildTarget).orElse(defaultCxxPlatform).getLd().getParseTimeDeps());
if (constructorArg.packageStyle.orElse(pythonBuckConfig.getPackageStyle()) ==
PythonBuckConfig.PackageStyle.STANDALONE) {
targets.addAll(OptionalCompat.asSet(pythonBuckConfig.getPexTarget()));
targets.addAll(OptionalCompat.asSet(pythonBuckConfig.getPexExecutorTarget()));
}
return targets.build();
}
@Override
public boolean isVersionRoot(ImmutableSet<Flavor> flavors) {
return true;
}
@SuppressFieldNotInitialized
public static class Arg extends PythonLibraryDescription.Arg {
public Optional<String> mainModule;
public ImmutableSet<String> contacts = ImmutableSet.of();
public Optional<String> platform;
public Optional<String> extension;
public Optional<PythonBuckConfig.PackageStyle> packageStyle;
public ImmutableSet<BuildTarget> preloadDeps = ImmutableSet.of();
public ImmutableList<String> linkerFlags = ImmutableList.of();
public ImmutableList<NeededCoverageSpec> neededCoverage = ImmutableList.of();
public ImmutableList<String> buildArgs = ImmutableList.of();
public ImmutableMap<String, String> env = ImmutableMap.of();
public Optional<Long> testRuleTimeoutMs;
public Optional<String> versionUniverse;
}
}
| |
/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.cbean.cq.bs;
import java.util.*;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.*;
import org.dbflute.cbean.ckey.*;
import org.dbflute.cbean.coption.*;
import org.dbflute.cbean.cvalue.ConditionValue;
import org.dbflute.cbean.ordering.*;
import org.dbflute.cbean.scoping.*;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.dbmeta.DBMetaProvider;
import org.docksidestage.mysql.dbflute.allcommon.*;
import org.docksidestage.mysql.dbflute.cbean.*;
import org.docksidestage.mysql.dbflute.cbean.cq.*;
/**
* The abstract condition-query of white_suppress_def_check.
* @author DBFlute(AutoGenerator)
*/
public abstract class AbstractBsWhiteSuppressDefCheckCQ extends AbstractConditionQuery {
// ===================================================================================
// Constructor
// ===========
public AbstractBsWhiteSuppressDefCheckCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) {
super(referrerQuery, sqlClause, aliasName, nestLevel);
}
// ===================================================================================
// DB Meta
// =======
@Override
protected DBMetaProvider xgetDBMetaProvider() {
return DBMetaInstanceHandler.getProvider();
}
public String asTableDbName() {
return "white_suppress_def_check";
}
// ===================================================================================
// Query
// =====
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckId The value of defCheckId as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setDefCheckId_Equal(Long defCheckId) {
doSetDefCheckId_Equal(defCheckId);
}
protected void doSetDefCheckId_Equal(Long defCheckId) {
regDefCheckId(CK_EQ, defCheckId);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckId The value of defCheckId as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setDefCheckId_GreaterThan(Long defCheckId) {
regDefCheckId(CK_GT, defCheckId);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckId The value of defCheckId as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setDefCheckId_LessThan(Long defCheckId) {
regDefCheckId(CK_LT, defCheckId);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckId The value of defCheckId as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setDefCheckId_GreaterEqual(Long defCheckId) {
regDefCheckId(CK_GE, defCheckId);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckId The value of defCheckId as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setDefCheckId_LessEqual(Long defCheckId) {
regDefCheckId(CK_LE, defCheckId);
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param minNumber The min number of defCheckId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of defCheckId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of range-of. (NotNull)
*/
public void setDefCheckId_RangeOf(Long minNumber, Long maxNumber, ConditionOptionCall<RangeOfOption> opLambda) {
setDefCheckId_RangeOf(minNumber, maxNumber, xcROOP(opLambda));
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param minNumber The min number of defCheckId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of defCheckId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param rangeOfOption The option of range-of. (NotNull)
*/
public void setDefCheckId_RangeOf(Long minNumber, Long maxNumber, RangeOfOption rangeOfOption) {
regROO(minNumber, maxNumber, xgetCValueDefCheckId(), "DEF_CHECK_ID", rangeOfOption);
}
/**
* InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckIdList The collection of defCheckId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setDefCheckId_InScope(Collection<Long> defCheckIdList) {
doSetDefCheckId_InScope(defCheckIdList);
}
protected void doSetDefCheckId_InScope(Collection<Long> defCheckIdList) {
regINS(CK_INS, cTL(defCheckIdList), xgetCValueDefCheckId(), "DEF_CHECK_ID");
}
/**
* NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
* @param defCheckIdList The collection of defCheckId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setDefCheckId_NotInScope(Collection<Long> defCheckIdList) {
doSetDefCheckId_NotInScope(defCheckIdList);
}
protected void doSetDefCheckId_NotInScope(Collection<Long> defCheckIdList) {
regINS(CK_NINS, cTL(defCheckIdList), xgetCValueDefCheckId(), "DEF_CHECK_ID");
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
*/
public void setDefCheckId_IsNull() { regDefCheckId(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* DEF_CHECK_ID: {PK, NotNull, BIGINT(19)}
*/
public void setDefCheckId_IsNotNull() { regDefCheckId(CK_ISNN, DOBJ); }
protected void regDefCheckId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueDefCheckId(), "DEF_CHECK_ID"); }
protected abstract ConditionValue xgetCValueDefCheckId();
/**
* Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)}
* @param defCheckName The value of defCheckName as equal. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setDefCheckName_Equal(String defCheckName) {
doSetDefCheckName_Equal(fRES(defCheckName));
}
protected void doSetDefCheckName_Equal(String defCheckName) {
regDefCheckName(CK_EQ, defCheckName);
}
/**
* NotEqual(<>). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)}
* @param defCheckName The value of defCheckName as notEqual. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setDefCheckName_NotEqual(String defCheckName) {
doSetDefCheckName_NotEqual(fRES(defCheckName));
}
protected void doSetDefCheckName_NotEqual(String defCheckName) {
regDefCheckName(CK_NES, defCheckName);
}
/**
* InScope {in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)}
* @param defCheckNameList The collection of defCheckName as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setDefCheckName_InScope(Collection<String> defCheckNameList) {
doSetDefCheckName_InScope(defCheckNameList);
}
protected void doSetDefCheckName_InScope(Collection<String> defCheckNameList) {
regINS(CK_INS, cTL(defCheckNameList), xgetCValueDefCheckName(), "DEF_CHECK_NAME");
}
/**
* NotInScope {not in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)}
* @param defCheckNameList The collection of defCheckName as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setDefCheckName_NotInScope(Collection<String> defCheckNameList) {
doSetDefCheckName_NotInScope(defCheckNameList);
}
protected void doSetDefCheckName_NotInScope(Collection<String> defCheckNameList) {
regINS(CK_NINS, cTL(defCheckNameList), xgetCValueDefCheckName(), "DEF_CHECK_NAME");
}
/**
* LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)} <br>
* <pre>e.g. setDefCheckName_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> op.<span style="color: #CC4747">likeContain()</span>);</pre>
* @param defCheckName The value of defCheckName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param opLambda The callback for option of like-search. (NotNull)
*/
public void setDefCheckName_LikeSearch(String defCheckName, ConditionOptionCall<LikeSearchOption> opLambda) {
setDefCheckName_LikeSearch(defCheckName, xcLSOP(opLambda));
}
/**
* LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)} <br>
* <pre>e.g. setDefCheckName_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre>
* @param defCheckName The value of defCheckName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param likeSearchOption The option of like-search. (NotNull)
*/
public void setDefCheckName_LikeSearch(String defCheckName, LikeSearchOption likeSearchOption) {
regLSQ(CK_LS, fRES(defCheckName), xgetCValueDefCheckName(), "DEF_CHECK_NAME", likeSearchOption);
}
/**
* NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br>
* And NullOrEmptyIgnored, SeveralRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)}
* @param defCheckName The value of defCheckName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param opLambda The callback for option of like-search. (NotNull)
*/
public void setDefCheckName_NotLikeSearch(String defCheckName, ConditionOptionCall<LikeSearchOption> opLambda) {
setDefCheckName_NotLikeSearch(defCheckName, xcLSOP(opLambda));
}
/**
* NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br>
* And NullOrEmptyIgnored, SeveralRegistered. <br>
* DEF_CHECK_NAME: {NotNull, VARCHAR(200)}
* @param defCheckName The value of defCheckName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param likeSearchOption The option of not-like-search. (NotNull)
*/
public void setDefCheckName_NotLikeSearch(String defCheckName, LikeSearchOption likeSearchOption) {
regLSQ(CK_NLS, fRES(defCheckName), xgetCValueDefCheckName(), "DEF_CHECK_NAME", likeSearchOption);
}
protected void regDefCheckName(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueDefCheckName(), "DEF_CHECK_NAME"); }
protected abstract ConditionValue xgetCValueDefCheckName();
// ===================================================================================
// ScalarCondition
// ===============
/**
* Prepare ScalarCondition as equal. <br>
* {where FOO = (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteSuppressDefCheckCB> scalar_Equal() {
return xcreateSLCFunction(CK_EQ, WhiteSuppressDefCheckCB.class);
}
/**
* Prepare ScalarCondition as equal. <br>
* {where FOO <> (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteSuppressDefCheckCB> scalar_NotEqual() {
return xcreateSLCFunction(CK_NES, WhiteSuppressDefCheckCB.class);
}
/**
* Prepare ScalarCondition as greaterThan. <br>
* {where FOO > (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteSuppressDefCheckCB> scalar_GreaterThan() {
return xcreateSLCFunction(CK_GT, WhiteSuppressDefCheckCB.class);
}
/**
* Prepare ScalarCondition as lessThan. <br>
* {where FOO < (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteSuppressDefCheckCB> scalar_LessThan() {
return xcreateSLCFunction(CK_LT, WhiteSuppressDefCheckCB.class);
}
/**
* Prepare ScalarCondition as greaterEqual. <br>
* {where FOO >= (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteSuppressDefCheckCB> scalar_GreaterEqual() {
return xcreateSLCFunction(CK_GE, WhiteSuppressDefCheckCB.class);
}
/**
* Prepare ScalarCondition as lessEqual. <br>
* {where FOO <= (select max(BAR) from ...)}
* <pre>
* cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery<WhiteSuppressDefCheckCB>() {
* public void query(WhiteSuppressDefCheckCB subCB) {
* subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span>
* subCB.query().setBar...
* }
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteSuppressDefCheckCB> scalar_LessEqual() {
return xcreateSLCFunction(CK_LE, WhiteSuppressDefCheckCB.class);
}
@SuppressWarnings("unchecked")
protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) {
assertObjectNotNull("subQuery", sq);
WhiteSuppressDefCheckCB cb = xcreateScalarConditionCB(); sq.query((CB)cb);
String pp = keepScalarCondition(cb.query()); // for saving query-value
cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by
registerScalarCondition(fn, cb.query(), pp, rd, cs, op);
}
public abstract String keepScalarCondition(WhiteSuppressDefCheckCQ sq);
protected WhiteSuppressDefCheckCB xcreateScalarConditionCB() {
WhiteSuppressDefCheckCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb;
}
protected WhiteSuppressDefCheckCB xcreateScalarConditionPartitionByCB() {
WhiteSuppressDefCheckCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb;
}
// ===================================================================================
// MyselfDerived
// =============
public void xsmyselfDerive(String fn, SubQuery<WhiteSuppressDefCheckCB> sq, String al, DerivedReferrerOption op) {
assertObjectNotNull("subQuery", sq);
WhiteSuppressDefCheckCB cb = new WhiteSuppressDefCheckCB(); cb.xsetupForDerivedReferrer(this);
lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "DEF_CHECK_ID";
registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op);
}
public abstract String keepSpecifyMyselfDerived(WhiteSuppressDefCheckCQ sq);
/**
* Prepare for (Query)MyselfDerived (correlated sub-query).
* @return The object to set up a function for myself table. (NotNull)
*/
public HpQDRFunction<WhiteSuppressDefCheckCB> myselfDerived() {
return xcreateQDRFunctionMyselfDerived(WhiteSuppressDefCheckCB.class);
}
@SuppressWarnings("unchecked")
protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) {
assertObjectNotNull("subQuery", sq);
WhiteSuppressDefCheckCB cb = new WhiteSuppressDefCheckCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb);
String pk = "DEF_CHECK_ID";
String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value.
String prpp = keepQueryMyselfDerivedParameter(vl);
registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op);
}
public abstract String keepQueryMyselfDerived(WhiteSuppressDefCheckCQ sq);
public abstract String keepQueryMyselfDerivedParameter(Object vl);
// ===================================================================================
// MyselfExists
// ============
/**
* Prepare for MyselfExists (correlated sub-query).
* @param subCBLambda The implementation of sub-query. (NotNull)
*/
public void myselfExists(SubQuery<WhiteSuppressDefCheckCB> subCBLambda) {
assertObjectNotNull("subCBLambda", subCBLambda);
WhiteSuppressDefCheckCB cb = new WhiteSuppressDefCheckCB(); cb.xsetupForMyselfExists(this);
lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query());
registerMyselfExists(cb.query(), pp);
}
public abstract String keepMyselfExists(WhiteSuppressDefCheckCQ sq);
// ===================================================================================
// Full Text Search
// ================
/**
* Match for full-text search. <br>
* Bind variable is unused because the condition value should be literal in MySQL.
* @param textColumn The text column. (NotNull, StringColumn, TargetTableColumn)
* @param conditionValue The condition value embedded without binding (by MySQL restriction) but escaped. (NullAllowed: if null or empty, no condition)
* @param modifier The modifier of full-text search. (NullAllowed: If the value is null, No modifier specified)
*/
public void match(org.dbflute.dbmeta.info.ColumnInfo textColumn
, String conditionValue
, org.dbflute.dbway.WayOfMySQL.FullTextSearchModifier modifier) {
assertObjectNotNull("textColumn", textColumn);
match(newArrayList(textColumn), conditionValue, modifier);
}
/**
* Match for full-text search. <br>
* Bind variable is unused because the condition value should be literal in MySQL.
* @param textColumnList The list of text column. (NotNull, NotEmpty, StringColumn, TargetTableColumn)
* @param conditionValue The condition value embedded without binding (by MySQL restriction) but escaped. (NullAllowed: if null or empty, no condition)
* @param modifier The modifier of full-text search. (NullAllowed: If the value is null, no modifier specified)
*/
public void match(List<org.dbflute.dbmeta.info.ColumnInfo> textColumnList
, String conditionValue
, org.dbflute.dbway.WayOfMySQL.FullTextSearchModifier modifier) {
xdoMatchForMySQL(textColumnList, conditionValue, modifier);
}
// ===================================================================================
// Manual Order
// ============
/**
* Order along manual ordering information.
* <pre>
* cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span>
* });
* <span style="color: #3F7E5E">// order by </span>
* <span style="color: #3F7E5E">// case</span>
* <span style="color: #3F7E5E">// when BIRTHDATE >= '2000/01/01' then 0</span>
* <span style="color: #3F7E5E">// else 1</span>
* <span style="color: #3F7E5E">// end asc, ...</span>
*
* cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal);
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized);
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional);
* });
* <span style="color: #3F7E5E">// order by </span>
* <span style="color: #3F7E5E">// case</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span>
* <span style="color: #3F7E5E">// else 3</span>
* <span style="color: #3F7E5E">// end asc, ...</span>
* </pre>
* <p>This function with Union is unsupported!</p>
* <p>The order values are bound (treated as bind parameter).</p>
* @param opLambda The callback for option of manual-order containing order values. (NotNull)
*/
public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public!
xdoWithManualOrder(cMOO(opLambda));
}
// ===================================================================================
// Small Adjustment
// ================
// ===================================================================================
// Very Internal
// =============
protected WhiteSuppressDefCheckCB newMyCB() {
return new WhiteSuppressDefCheckCB();
}
// very internal (for suppressing warn about 'Not Use Import')
protected String xabUDT() { return Date.class.getName(); }
protected String xabCQ() { return WhiteSuppressDefCheckCQ.class.getName(); }
protected String xabLSO() { return LikeSearchOption.class.getName(); }
protected String xabSLCS() { return HpSLCSetupper.class.getName(); }
protected String xabSCP() { return SubQuery.class.getName(); }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.http11;
import java.io.EOFException;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import org.apache.coyote.ActionCode;
import org.apache.coyote.http11.filters.BufferedInputFilter;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.net.AbstractEndpoint.Handler.SocketState;
import org.apache.tomcat.util.net.JIoEndpoint;
import org.apache.tomcat.util.net.SSLSupport;
import org.apache.tomcat.util.net.SocketStatus;
import org.apache.tomcat.util.net.SocketWrapper;
/**
* Processes HTTP requests.
*
* @author Remy Maucherat
*/
public class Http11Processor extends AbstractHttp11Processor<Socket> {
private static final Log log = LogFactory.getLog(Http11Processor.class);
@Override
protected Log getLog() {
return log;
}
// ------------------------------------------------------------ Constructor
public Http11Processor(int headerBufferSize, JIoEndpoint endpoint,
int maxTrailerSize, int maxExtensionSize) {
super(endpoint);
inputBuffer = new InternalInputBuffer(request, headerBufferSize);
request.setInputBuffer(inputBuffer);
outputBuffer = new InternalOutputBuffer(response, headerBufferSize);
response.setOutputBuffer(outputBuffer);
initializeFilters(maxTrailerSize, maxExtensionSize);
}
// ----------------------------------------------------- Instance Variables
/**
* SSL information.
*/
protected SSLSupport sslSupport;
/**
* The percentage of threads that have to be in use before keep-alive is
* disabled to aid scalability.
*/
private int disableKeepAlivePercentage = 75;
// --------------------------------------------------------- Public Methods
/**
* Set the SSL information for this HTTP connection.
*/
@Override
public void setSslSupport(SSLSupport sslSupport) {
this.sslSupport = sslSupport;
}
public int getDisableKeepAlivePercentage() {
return disableKeepAlivePercentage;
}
public void setDisableKeepAlivePercentage(int disableKeepAlivePercentage) {
this.disableKeepAlivePercentage = disableKeepAlivePercentage;
}
@Override
protected boolean disableKeepAlive() {
int threadRatio = -1;
// These may return zero or negative values
// Only calculate a thread ratio when both are >0 to ensure we get a
// sensible result
int maxThreads, threadsBusy;
if ((maxThreads = endpoint.getMaxThreads()) > 0
&& (threadsBusy = endpoint.getCurrentThreadsBusy()) > 0) {
threadRatio = (threadsBusy * 100) / maxThreads;
}
// Disable keep-alive if we are running low on threads
if (threadRatio > getDisableKeepAlivePercentage()) {
return true;
}
return false;
}
@Override
protected void setRequestLineReadTimeout() throws IOException {
/*
* When there is no data in the buffer and this is not the first
* request on this connection and timeouts are being used the
* first read for this request may need a different timeout to
* take account of time spent waiting for a processing thread.
*
* This is a little hacky but better than exposing the socket
* and the timeout info to the InputBuffer
*/
if (inputBuffer.lastValid == 0 && socketWrapper.getLastAccess() > -1) {
int firstReadTimeout;
if (keepAliveTimeout == -1) {
firstReadTimeout = 0;
} else {
long queueTime =
System.currentTimeMillis() - socketWrapper.getLastAccess();
if (queueTime >= keepAliveTimeout) {
// Queued for longer than timeout but there might be
// data so use shortest possible timeout
firstReadTimeout = 1;
} else {
// Cast is safe since queueTime must be less than
// keepAliveTimeout which is an int
firstReadTimeout = keepAliveTimeout - (int) queueTime;
}
}
socketWrapper.getSocket().setSoTimeout(firstReadTimeout);
// Blocking IO so fill() always blocks
if (!inputBuffer.fill(true)) {
throw new EOFException(sm.getString("iib.eof.error"));
}
// Once the first byte has been read, the standard timeout should be
// used so restore it here.
if (endpoint.getSoTimeout()> 0) {
setSocketTimeout(endpoint.getSoTimeout());
} else {
setSocketTimeout(0);
}
}
}
@Override
protected boolean handleIncompleteRequestLineRead() {
// Not used with BIO since it uses blocking reads
return false;
}
@Override
protected void setSocketTimeout(int timeout) throws IOException {
socketWrapper.getSocket().setSoTimeout(timeout);
}
@Override
protected void setCometTimeouts(SocketWrapper<Socket> socketWrapper) {
// NO-OP for BIO
}
@Override
protected boolean breakKeepAliveLoop(SocketWrapper<Socket> socketWrapper) {
openSocket = keepAlive;
// If we don't have a pipe-lined request allow this thread to be
// used by another connection
if (inputBuffer.lastValid == 0) {
return true;
}
return false;
}
@Override
protected void registerForEvent(boolean read, boolean write) {
// NO-OP for BIO
}
@Override
protected void resetTimeouts() {
// NO-OP for BIO
}
@Override
protected void recycleInternal() {
// Recycle
this.socketWrapper = null;
// Recycle ssl info
sslSupport = null;
}
@Override
public SocketState event(SocketStatus status) throws IOException {
// Should never reach this code but in case we do...
throw new IOException(
sm.getString("http11processor.comet.notsupported"));
}
// ----------------------------------------------------- ActionHook Methods
/**
* Send an action to the connector.
*
* @param actionCode Type of the action
* @param param Action parameter
*/
@Override
public void actionInternal(ActionCode actionCode, Object param) {
if (actionCode == ActionCode.REQ_SSL_ATTRIBUTE ) {
try {
if (sslSupport != null) {
Object sslO = sslSupport.getCipherSuite();
if (sslO != null)
request.setAttribute
(SSLSupport.CIPHER_SUITE_KEY, sslO);
sslO = sslSupport.getPeerCertificateChain(false);
if (sslO != null)
request.setAttribute
(SSLSupport.CERTIFICATE_KEY, sslO);
sslO = sslSupport.getKeySize();
if (sslO != null)
request.setAttribute
(SSLSupport.KEY_SIZE_KEY, sslO);
sslO = sslSupport.getSessionId();
if (sslO != null)
request.setAttribute
(SSLSupport.SESSION_ID_KEY, sslO);
request.setAttribute(SSLSupport.SESSION_MGR, sslSupport);
}
} catch (Exception e) {
log.warn(sm.getString("http11processor.socket.ssl"), e);
}
} else if (actionCode == ActionCode.REQ_HOST_ADDR_ATTRIBUTE) {
if (socketWrapper == null) {
request.remoteAddr().recycle();
} else {
if (socketWrapper.getRemoteAddr() == null) {
InetAddress inetAddr = socketWrapper.getSocket().getInetAddress();
if (inetAddr != null) {
socketWrapper.setRemoteAddr(inetAddr.getHostAddress());
}
}
request.remoteAddr().setString(socketWrapper.getRemoteAddr());
}
} else if (actionCode == ActionCode.REQ_LOCAL_NAME_ATTRIBUTE) {
if (socketWrapper == null) {
request.localName().recycle();
} else {
if (socketWrapper.getLocalName() == null) {
InetAddress inetAddr = socketWrapper.getSocket().getLocalAddress();
if (inetAddr != null) {
socketWrapper.setLocalName(inetAddr.getHostName());
}
}
request.localName().setString(socketWrapper.getLocalName());
}
} else if (actionCode == ActionCode.REQ_HOST_ATTRIBUTE) {
if (socketWrapper == null) {
request.remoteHost().recycle();
} else {
if (socketWrapper.getRemoteHost() == null) {
InetAddress inetAddr = socketWrapper.getSocket().getInetAddress();
if (inetAddr != null) {
socketWrapper.setRemoteHost(inetAddr.getHostName());
}
if (socketWrapper.getRemoteHost() == null) {
if (socketWrapper.getRemoteAddr() == null &&
inetAddr != null) {
socketWrapper.setRemoteAddr(inetAddr.getHostAddress());
}
if (socketWrapper.getRemoteAddr() != null) {
socketWrapper.setRemoteHost(socketWrapper.getRemoteAddr());
}
}
}
request.remoteHost().setString(socketWrapper.getRemoteHost());
}
} else if (actionCode == ActionCode.REQ_LOCAL_ADDR_ATTRIBUTE) {
if (socketWrapper == null) {
request.localAddr().recycle();
} else {
if (socketWrapper.getLocalAddr() == null) {
socketWrapper.setLocalAddr(
socketWrapper.getSocket().getLocalAddress().getHostAddress());
}
request.localAddr().setString(socketWrapper.getLocalAddr());
}
} else if (actionCode == ActionCode.REQ_REMOTEPORT_ATTRIBUTE) {
if (socketWrapper == null) {
request.setRemotePort(0);
} else {
if (socketWrapper.getRemotePort() == -1) {
socketWrapper.setRemotePort(socketWrapper.getSocket().getPort());
}
request.setRemotePort(socketWrapper.getRemotePort());
}
} else if (actionCode == ActionCode.REQ_LOCALPORT_ATTRIBUTE) {
if (socketWrapper == null) {
request.setLocalPort(0);
} else {
if (socketWrapper.getLocalPort() == -1) {
socketWrapper.setLocalPort(socketWrapper.getSocket().getLocalPort());
}
request.setLocalPort(socketWrapper.getLocalPort());
}
} else if (actionCode == ActionCode.REQ_SSL_CERTIFICATE) {
if( sslSupport != null) {
/*
* Consume and buffer the request body, so that it does not
* interfere with the client's handshake messages
*/
InputFilter[] inputFilters = inputBuffer.getFilters();
((BufferedInputFilter) inputFilters[Constants.BUFFERED_FILTER])
.setLimit(maxSavePostSize);
inputBuffer.addActiveFilter
(inputFilters[Constants.BUFFERED_FILTER]);
try {
Object sslO = sslSupport.getPeerCertificateChain(true);
if( sslO != null) {
request.setAttribute
(SSLSupport.CERTIFICATE_KEY, sslO);
}
} catch (Exception e) {
log.warn(sm.getString("http11processor.socket.ssl"), e);
}
}
} else if (actionCode == ActionCode.ASYNC_COMPLETE) {
socketWrapper.clearDispatches();
if (asyncStateMachine.asyncComplete()) {
((JIoEndpoint) endpoint).processSocket(this.socketWrapper,
SocketStatus.OPEN_READ, true);
}
} else if (actionCode == ActionCode.ASYNC_SETTIMEOUT) {
if (param == null) return;
long timeout = ((Long)param).longValue();
// if we are not piggy backing on a worker thread, set the timeout
socketWrapper.setTimeout(timeout);
} else if (actionCode == ActionCode.ASYNC_DISPATCH) {
if (asyncStateMachine.asyncDispatch()) {
((JIoEndpoint) endpoint).processSocket(this.socketWrapper,
SocketStatus.OPEN_READ, true);
}
}
}
// ------------------------------------------------------ Protected Methods
@Override
protected void prepareRequestInternal() {
// NOOP for BIO
}
@Override
protected boolean prepareSendfile(OutputFilter[] outputFilters) {
// Should never, ever call this code
Exception e = new Exception();
log.error(sm.getString("http11processor.neverused"), e);
return false;
}
@Override
protected AbstractInputBuffer<Socket> getInputBuffer() {
return inputBuffer;
}
@Override
protected AbstractOutputBuffer<Socket> getOutputBuffer() {
return outputBuffer;
}
/**
* Set the socket buffer flag.
*/
@Override
public void setSocketBuffer(int socketBuffer) {
super.setSocketBuffer(socketBuffer);
outputBuffer.setSocketBuffer(socketBuffer);
}
}
| |
package com.missionhub.people;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.AdapterView.OnItemLongClickListener;
import android.widget.ListAdapter;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.Volley;
import com.missionhub.model.Person;
import com.missionhub.ui.NetworkImageOnScrollListener;
import com.missionhub.ui.widget.SelectableListView;
import com.missionhub.ui.widget.SelectableListView.OnItemCheckedListener;
import com.missionhub.util.LruBitmapCache;
/**
* A generic ListView for displaying people.
*/
public class PeopleListView extends SelectableListView implements OnItemCheckedListener, OnItemClickListener, OnItemLongClickListener, AbsListView.OnScrollListener, NetworkImageOnScrollListener.ImageLoaderProvider {
/**
* The android logging tag
*/
public static final String TAG = PeopleListView.class.getSimpleName();
/**
* Listener used to dispatch person checked events
*/
private OnPersonCheckedListener mOnPersonCheckedListener;
/**
* Listener used to dispatch person click events
*/
private OnPersonClickListener mOnPersonClickListener;
/**
* Listener used to dispatch person long click events
*/
private OnPersonLongClickListener mOnPersonLongClickListener;
/**
* Listener used to dispatch scroll events
*/
private OnScrollListener mOnScrollListener;
/**
* the person list provider
*/
private PeopleListProvider mProvider;
/**
* The Volley Request Queue
*/
private RequestQueue mRequestQueue;
/**
* The Volley Image Loader
*/
private ImageLoader mImageLoader;
/**
* Construct a new PeopleListView with default styling.
*
* @param context The Context that will determine this widget's theming.
*/
public PeopleListView(final Context context) {
this(context, null);
}
/**
* Construct a new PeopleListView with default styling.
*
* @param context The Context that will determine this widget's theming.
* @param attrs Specification of attributes that should deviate from default styling.
*/
public PeopleListView(final Context context, final AttributeSet attrs) {
this(context, attrs, android.R.attr.listViewStyle);
}
/**
* Construct a new PeopleListView with default styling.
*
* @param context The Context that will determine this widget's theming.
* @param attrs Specification of attributes that should deviate from default styling.
* @param defStyle An attribute ID within the active theme containing a reference to the
* default style for this widget. e.g. android.R.attr.listViewStyle.
*/
public PeopleListView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
super.setOnItemCheckedListener(this);
super.setOnItemClickListener(this);
super.setOnItemLongClickListener(this);
super.setOnScrollListener(this);
}
/**
* @deprecated Use {@link #setProvider(PeopleListProvider)}
*/
@Override
@Deprecated
public void setAdapter(final ListAdapter adapter) {
throw new RuntimeException("Use PeopleListProvider to provided the list data.");
}
/**
* @deprecated Use {@link #setOnPersonCheckedListener(PeopleListView.OnPersonCheckedListener)}
*/
@Override
@Deprecated
public void setOnItemCheckedListener(final OnItemCheckedListener listener) {
throw new RuntimeException("use setOnPersonCheckedListener");
}
/**
* @deprecated Use {@link #setOnPersonClickListener(PeopleListView.OnPersonClickListener)}
*/
@Override
@Deprecated
public void setOnItemClickListener(final OnItemClickListener listener) {
throw new RuntimeException("use setOnPersonClickListener");
}
/**
* @deprecated Use {@link #setOnPersonLongClickListener(PeopleListView.OnPersonLongClickListener)}
*/
@Override
@Deprecated
public void setOnItemLongClickListener(final OnItemLongClickListener listener) {
throw new RuntimeException("use setOnPersonLongClickListener");
}
/**
* @deprecated Use {@link #setOnPersonCheckedListener(PeopleListView.OnPersonCheckedListener)}
*/
@Override
@Deprecated
public void setOnItemSelectedListener(final OnItemSelectedListener listener) {
throw new RuntimeException("use setOnPersonCheckedListener for selection status");
}
/**
* {@inheritDoc}
*/
@Override
public void setOnScrollListener(final OnScrollListener listener) {
mOnScrollListener = listener;
}
/**
* {@inheritDoc}
*/
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
if (mOnScrollListener != null) {
mOnScrollListener.onScrollStateChanged(view, scrollState);
}
}
/**
* {@inheritDoc}
*/
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
if (mOnScrollListener != null) {
mOnScrollListener.onScroll(view, firstVisibleItem, visibleItemCount, totalItemCount);
}
if (mProvider != null) {
mProvider.onListScroll(this, firstVisibleItem, visibleItemCount, totalItemCount);
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean onItemLongClick(final AdapterView<?> parent, final View view, final int position, final long id) {
if (mOnPersonLongClickListener != null) {
final Object item = parent.getItemAtPosition(position);
if (item instanceof Person) {
return mOnPersonLongClickListener.onPersonLongClick(this, (Person) item, position, id);
}
}
return false;
}
/**
* Register a callback to be invoked when a person in the PeopleListView has been long clicked.
*
* @param listener The callback that will run
*/
public void setOnPersonLongClickListener(final OnPersonLongClickListener listener) {
mOnPersonLongClickListener = listener;
}
/**
* {@inheritDoc}
*/
@Override
public void onItemClick(final AdapterView<?> parent, final View view, final int position, final long id) {
if (mOnPersonClickListener != null) {
final Object item = parent.getItemAtPosition(position);
if (item instanceof Person) {
mOnPersonClickListener.onPersonClick(this, (Person) item, position, id);
}
}
}
/**
* Register a callback to be invoked when a person in the PeopleListView has been clicked.
*
* @param listener The callback that will run
*/
public void setOnPersonClickListener(final OnPersonClickListener listener) {
mOnPersonClickListener = listener;
}
/**
* {@inheritDoc}
*/
@Override
public void onSetItemChecked(final int position, final boolean checked) {
if (mOnPersonCheckedListener != null) {
final Object item = getItemAtPosition(position);
if (item instanceof Person) {
mOnPersonCheckedListener.onPersonChecked(this, (Person) item, position, checked);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void onAllUnchecked() {
if (mOnPersonCheckedListener != null) {
mOnPersonCheckedListener.onAllPeopleUnchecked();
}
}
/**
* Register a callback to be invoked when a contacts in the PeopleListView have been checked or unchecked.
*
* @param listener The callback that will run
*/
public void setOnPersonCheckedListener(final OnPersonCheckedListener listener) {
mOnPersonCheckedListener = listener;
}
/**
* Returns the provider currently associated with this widget
*
* @return The provider used to create this view's content
*/
public PeopleListProvider getProvider() {
return mProvider;
}
/**
* Sets the {@link PeopleListProvider} that provides the data and the views to represent
* the data in this widget.
*
* @param provider The provider to use to create this view's content.
*/
public void setProvider(final PeopleListProvider provider) {
mProvider = provider;
mProvider.setPeopleList(this);
super.setAdapter(mProvider);
}
@Override
public ImageLoader getImageLoader() {
if (mImageLoader == null) {
synchronized (this) {
mImageLoader = new ImageLoader(getVolleyRequestQueue(), LruBitmapCache.getInstance());
}
}
return mImageLoader;
}
@Override
public RequestQueue getVolleyRequestQueue() {
if (mRequestQueue == null) {
synchronized (this) {
mRequestQueue = Volley.newRequestQueue(getContext());
}
}
return mRequestQueue;
}
/**
* Interface definition for a callback to be invoked when a contact is long clicked.
*/
public interface OnPersonLongClickListener {
/**
* Callback method to be invoked when a person has been long clicked.
*
* @param list The PeopleListView where the long click happened
* @param person The person that was long clicked
* @param position The position of the view in the list
* @param id The row id of the item that was long clicked
* @return true if the callback consumed the long click, false otherwise
*/
public boolean onPersonLongClick(PeopleListView list, Person person, int position, long id);
}
/**
* Interface definition for a callback to be invoked when a contact is clicked.
*/
public interface OnPersonClickListener {
/**
* Callback method to be invoked when a person has been clicked.
*
* @param list The PeopleListView where the click happened
* @param person The person that was clicked
* @param position The position of the view in the list
* @param id The row id of the item that was clicked
*/
public void onPersonClick(PeopleListView list, Person person, int position, long id);
}
/**
* Interface definition for callbacks to be invoked when people are checked and unchecked.
*/
public interface OnPersonCheckedListener {
/**
* Callback method to be invoked when a person has been checked or unchecked
*
* @param list The PeopleListView where the click happened
* @param person The person that was clicked
* @param position The position of the view in the list
* @param checked True if the person is checked
*/
public void onPersonChecked(PeopleListView list, Person person, int position, boolean checked);
/**
* Callback method to be invoked when all people in the list have been unchecked
*/
public void onAllPeopleUnchecked();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.