repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
mdanielwork/intellij-community | plugins/terminal/src/org/jetbrains/plugins/terminal/vfs/TerminalSessionEditorProvider.java | 1995 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.terminal.vfs;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorPolicy;
import com.intellij.openapi.fileEditor.FileEditorProvider;
import com.intellij.openapi.fileEditor.impl.FileEditorManagerImpl;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.terminal.JBTerminalWidget;
import com.intellij.ui.tabs.TabInfo;
import org.jetbrains.annotations.NotNull;
/**
* @author traff
*/
public class TerminalSessionEditorProvider implements FileEditorProvider, DumbAware {
@Override
public boolean accept(@NotNull Project project, @NotNull VirtualFile file) {
return file instanceof TerminalSessionVirtualFileImpl;
}
@NotNull
@Override
public FileEditor createEditor(@NotNull Project project, @NotNull VirtualFile file) {
if (file.getUserData(FileEditorManagerImpl.CLOSING_TO_REOPEN) != null) {
return new TerminalSessionEditor(project, (TerminalSessionVirtualFileImpl)file);
}
else {
TerminalSessionVirtualFileImpl terminalFile = (TerminalSessionVirtualFileImpl)file;
JBTerminalWidget widget = terminalFile.getTerminalWidget();
TabInfo tabInfo = new TabInfo(widget).setText(terminalFile.getName());
TerminalSessionVirtualFileImpl newSessionVirtualFile =
new TerminalSessionVirtualFileImpl(tabInfo, widget, terminalFile.getSettingsProvider());
tabInfo
.setObject(newSessionVirtualFile);
return new TerminalSessionEditor(project, newSessionVirtualFile);
}
}
@NotNull
@Override
public String getEditorTypeId() {
return "terminal-session-editor";
}
@NotNull
@Override
public FileEditorPolicy getPolicy() {
return FileEditorPolicy.HIDE_DEFAULT_EDITOR;
}
}
| apache-2.0 |
jjYBdx4IL/misc | swing-utils/src/main/java/com/github/jjYBdx4IL/utils/awt/FontScanner.java | 1488 | /*
* Copyright © 2017 jjYBdx4IL (https://github.com/jjYBdx4IL)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jjYBdx4IL.utils.awt;
//CHECKSTYLE:OFF
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.commons.io.DirectoryWalker;
/**
*
* @author Github jjYBdx4IL Projects
*/
public class FontScanner extends DirectoryWalker<String> {
public FontScanner() {
super();
}
public ArrayList<String> getFontFiles(String startDirectory) throws IOException {
ArrayList<String> dirs = new ArrayList<>();
walk(new File(startDirectory), dirs);
return dirs;
}
@Override
protected void handleFile(File file, int depth, Collection<String> results) throws IOException {
String path = file.getAbsolutePath();
if (path.toLowerCase().endsWith(".ttf")) {
results.add(file.getAbsolutePath());
}
}
}
| apache-2.0 |
mikes222/BsnsTemplate | app/src/main/java/com/mschwartz/bsnstemplate/example/WizardFragment.java | 1289 | package com.mschwartz.bsnstemplate.example;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.widget.Toast;
import com.mschwartz.bsnstemplate.ui.materialstepper.interfaces.IStepperItem;
import com.mschwartz.bsnstemplate.ui.materialstepper.style.DotStepper;
/**
* Created by Mike on 10/15/2016.
*/
public class WizardFragment extends DotStepper {
public static WizardFragment newInstance() {
WizardFragment fragment = new WizardFragment();
return fragment;
}
@NonNull
@Override
public IStepperItem createTabItem(int position) {
switch (position) {
case 0: {
return WizardStep1Fragment.newInstance();
}
case 1: {
return WizardStep2Fragment.newInstance();
}
case 2: {
return WizardStep3Fragment.newInstance();
}
}
return null;
}
@Override
public int getTabCount() {
return 3;
}
@Override
public void onComplete(Bundle bundle) {
Toast.makeText(getContext(), "complete " + bundle.getString("name"), Toast.LENGTH_LONG).show();
}
@Override
protected String getFragmentname() {
return "WizardFragment";
}
}
| apache-2.0 |
snazy/ohc | ohc-core/src/test/java/org/caffinitas/ohc/chunked/UtilTest.java | 1424 | /*
* Copyright (C) 2014 Robert Stupp, Koeln, Germany, robert-stupp.de
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.caffinitas.ohc.chunked;
import org.testng.Assert;
import org.testng.annotations.Test;
public class UtilTest
{
@Test
public void testBitNum()
{
Assert.assertEquals(Util.bitNum(0), 0);
Assert.assertEquals(Util.bitNum(1), 1);
Assert.assertEquals(Util.bitNum(2), 2);
Assert.assertEquals(Util.bitNum(4), 3);
Assert.assertEquals(Util.bitNum(8), 4);
Assert.assertEquals(Util.bitNum(16), 5);
Assert.assertEquals(Util.bitNum(32), 6);
Assert.assertEquals(Util.bitNum(64), 7);
Assert.assertEquals(Util.bitNum(128), 8);
Assert.assertEquals(Util.bitNum(256), 9);
Assert.assertEquals(Util.bitNum(1024), 11);
Assert.assertEquals(Util.bitNum(65536), 17);
}
}
| apache-2.0 |
cflint987/android-basic-samples-master | BasicSamples/app/src/main/java/Game/PathFinding.java | 2015 | package Game;
import java.util.Vector;
import Generation.MapGenerator;
/**
* Created by Nathan on 3/29/2015.
*/
public class PathFinding {
private Territory findMin(Territory territory){
Territory t = territory.neighbors.get(0);
for(int i = 1; i < territory.neighbors.size(); i++){
t = t.distance < territory.neighbors.get(i).distance ? t : territory.neighbors.get(i);
}
return t;
}
private double getDistance(Territory t1, Territory t2){
float x1 = t1.x, x2 = t2.x;
float y1 = t1.y, y2 = t1.y;
return Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y1) * (y1 - y2));
}
private void setNeighbors(Territory territory){
territory.visited = true;
for(int i = 0; i < territory.neighbors.size(); i++){
Territory t = territory.neighbors.get(i);
if(territory.owner == t.owner){
double dist = getDistance(territory, t) + territory.distance;
t.distance = dist < t.distance ? dist : t.distance;
}
if(!t.visited){
setNeighbors(t);
}
}
}
public Vector<Territory> getPath(Territory source, Territory destination){
Vector<Territory> notVisited = new Vector<>();
Vector<Territory> visited = new Vector<>();
Vector<Territory> path = new Vector<>();
for(int i = 0; i < MapGenerator.mapData.territories.size(); i++){
Territory territory = MapGenerator.mapData.territories.get(i);
territory.distance = 1000000;
territory.visited = false;
}
source.distance = 0;
source.visited = true;
setNeighbors(source);
if(destination.distance == 1000000){
return null;
}
Territory current = destination;
path.add(destination);
while(current != source){
current = findMin(current);
path.add(current);
}
return path;
}
}
| apache-2.0 |
rewayaat/rewayaat | src/main/java/com/rewayaat/loader/kitab_al_tawheed/KitabAlTawheedWorker.java | 13702 | package com.rewayaat.loader.kitab_al_tawheed;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.io.Files;
import com.rewayaat.config.ESClientProvider;
import com.rewayaat.core.data.HadithObject;
import com.rewayaat.loader.LoaderUtil;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.text.PDFTextStripper;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
/**
* Must be executed on linux machine with the pdftopom package installed.
*/
public class KitabAlTawheedWorker extends Thread {
private String[] chapterNamesArray = {
"n/a",
"Reward for the Monotheists and Gnostics",
"Divine Unity and Negation of Anthropomorphism",
"The Definition of One, Divine Unity, and the Believer in Divine Unity",
"The Commentary of Chapter 112 the Unity",
"The Meaning of Divine Unity and Divine Justice",
"The Mighty and High is Devoid of both Body and Image",
"The Blessed and Exalted is a Thing",
"What is Related Regarding the Vision",
"Al-Qudrah Omnipotence",
"Al-Ilm Knowledge",
"Attributes of Essence and Attributes of Actions",
"The Commentary of verse 88 of Chapter 28 the Narrative [al-Qasas] 'Everything is perishable but He'",
"The Commentary of Verse 75 of Chapter 38 Sad O Iblis! What prevented you that you should do obeisance to him whom I created with My Two Hands?",
"The Commentary of Verse 42 of Chapter 68 Qalam - On the Day when there shall " +
"be a severe affliction, and they shall be called upon to make obeisance",
"The Commentary of Verse 35 of Chapter 24 the Light [al-Nur]",
"The Commentary of Verse 67 of Chapter 9 the Repentance [al-Tawbah] They have " +
"forsaken Allah, so He has forsaken them",
"The Commentary of Verse 67 of Chapter 39 the Companies [al-Zumar] And the " +
"whole Earth shall be in His Grip on the Day of Resurrection and the Heavens rolled up in his Right " +
"Hand",
"The Commentary of Verse 15 of Chapter 83 the Defrauders [al-Mutaffifin] Nay! " +
"Most surely they shall on that day be debarred from their Lord",
"The Commentary of Verse 22 of Chapter 89 the Daybreak [al-Fajr] And your Lord " +
"comes and (also) the angels in ranks",
"The Commentary of Verse 210 of Chapter 2 the Cow [al-Baqarah] They do not wait " +
"aught but that Allah should come to them in the shadow of clouds along with the angels.",
"Meaning of Scoffing, Mockery, Planning and Deception of Allah",
"The Meaning of Allah’s Side",
"The Meaning of the Waistband",
"The Meaning of the Eye, the Ear, and the " +
"Tongue of Allah",
"The Meaning of Allah’s Hand is Tied Up",
"The Meaning of His Pleasure and His Anger",
"The Meaning of Allah’s Breathing of Spirit And " +
"I breathed into him of My Sprit",
"Negation of Space, Time, Stillness, Motion, Descending, Ascending, " +
"and Transference from Allah",
"The Names of Allah, the Exalted, and the Difference between their Meanings and the " +
"Meaning of the Names of Creation",
"What is the Qur’an?",
"The Meaning of “In the Name of Allah, " +
"the Most Compassionate, the Most Merciful”",
"The Explanation of the Letters of the Alphabet",
"The Explanation of the Letters of the Alphabet " +
"According to Their Numerical Value",
"The Explanation of the Words of the Calls " +
"to Prayer",
"The " +
"Commentary of Guidance, Misguidance, Direction, and Forsaking is from Allah, the Exalted",
"The Refutation of the Dualists & " +
"the Atheists",
"The Refutation of the Ones who Say that Allah is the Third of the Three: There is no god " +
"but the One God",
"The Remembrance of Allah’s Greatness, " +
"Mighty be His Glory",
"The Subtlety of Allah, the Blessed, the " +
"Exalted",
"The Least Required for Recognizing Diving " +
"Unity",
"He, the Mighty and High, in not " +
"Recognized, Except by Himself",
"The Assertion of the Emergence of the Universe",
"The Tradition of Dhi`lib",
"The Tradition of Subakht, the Jewish Man",
"The Meaning of “Glory be to Allah”",
"The Meaning of Allah is the Greatest",
"The Meaning of the First and the Last",
"The Meaning of Allah`s " +
"Word: the Most Compassionate is Firm on the Empyrean",
"The Meaning of Allah’s Word: " +
"His Empyrean was on the Water",
"The Empyrean and Its Description",
"The Empyrean was Created in Quarters",
"The Meaning of 'His " +
"Knowledge Extend over the Heavens and the Earth'",
"Allah made the Nature of " +
"the Creation upon Divine Unity",
"Al-Bada’ The Appearance",
"The Will and the Intent",
"Al-Istita`ah Capability",
"The Trial and the Test",
"Privilege and Adversity",
"The Negation of Determinism and Relinquishment",
"Predestination, Divine Decree, Trials, Means of Sustenance Rates, and Restricted Powers",
"Fihim Children and Allah’s Justice Concerning Them",
"Allah Only Des What is " +
"Best for His Servant",
"Command, Prohibition, Promise " +
"and Threat",
"Recognition, Explanation, " +
"Evidence, and Guidance",
"A Session of Imam Rida (AS) with Theologians from Among the Rhetoricians and Various " +
"Religions about Unity in the presence of al-Ma`mun",
"A Session of Imam Rida (AS) with Sulayman al-Marwazi, the Theologian of Khurasan, in the " +
"Presece of al-Ma`mun concerning the Subject of Divine Unity",
"The Prohibition of " +
"Discussing, Debating, and Arguing about Allah"
};
private String chapter = this.chapterNamesArray[29];
private String book = "Kitab Al-Tawhid | كتاب التوحيد";
private List<HadithObject> hadithObjects = new ArrayList<HadithObject>();
private int start;
private int end;
public KitabAlTawheedWorker(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public void run() {
PrintWriter writer = null;
try {
writer = new PrintWriter((new BufferedWriter(new FileWriter(
"/home/zir0/git/rewayaatv2/rewayaat/src/main/java/com/rewayaat/loader/kitab_al_tawheed/operationLog_"
+ start + "-" + end + ".txt",
true))));
} catch (IOException e1) {
e1.printStackTrace();
}
File myTempDir = Files.createTempDir();
PDDocument document = null;
String pdfLocation = "/ssd/onedrive/Documents/Books/Tawhid.pdf";
try {
document = PDDocument.load(new File(pdfLocation));
} catch (InvalidPasswordException e1) {
e1.printStackTrace();
} catch (IOException e1) {
e1.printStackTrace();
}
PDFTextStripper reader = null;
try {
reader = new PDFTextStripper();
} catch (IOException e1) {
e1.printStackTrace();
}
for (int i = start; i < end; i++) {
writer.println("Processing page: " + i);
try {
reader.setStartPage(i);
reader.setEndPage(i);
String st = reader.getText(document);
String[] lines = st.split("\n");
for (int j = 0; j < lines.length; j++) {
String line = lines[j];
if (!LoaderUtil.containsArabic(line)) {
if (!line.trim().isEmpty()) {
if (line.contains("Translator’s Note")
|| line.contains("Editor’s Note")
|| line.trim().matches("[0-9]+")) {
if (line.trim().matches("[0-9]+")) {
continue;
} else {
break;
}
} else if (line.toUpperCase().trim().startsWith("CHAPTER ")) {
if (!hadithObjects.isEmpty()) {
saveHadith();
}
chapter = "";
chapter += line.trim();
while (!lines[j + 1].trim().isEmpty() && !LoaderUtil.containsArabic(lines[j + 1])) {
chapter += " " + lines[j + 1].trim();
j++;
}
if (chapter.contains(":")) {
String wordNumber = chapter.substring(0, chapter.indexOf(":")).toLowerCase().replaceAll("chapter", "").trim();
int chapterInteger = (int) LoaderUtil.convertWordToInteger(wordNumber);
chapter = "Chapter " + String.valueOf(chapterInteger) + " - " + this.chapterNamesArray[chapterInteger].trim();
}
chapter = LoaderUtil.cleanupText(chapter).trim();
writer.println(chapter);
writer.flush();
} else if (line.trim().matches("^[0-9]+\\..*$")) {
if (!hadithObjects.isEmpty()) {
saveHadith();
}
setupNewHadithObj();
getNewestHadith().setNumber(line.trim().substring(0, line.trim().indexOf(".")));
getNewestHadith().insertEnglishText(
line.trim().substring(line.trim().indexOf(".") + 1).trim() + " ");
} else {
if (!hadithObjects.isEmpty()) {
getNewestHadith().insertEnglishText(line.trim() + " ");
}
}
}
}
}
} catch (Exception e) {
writer.println("Error while processing current Hadith:\n" + getNewestHadith().toString() + "\n");
writer.flush();
e.printStackTrace(writer);
continue;
}
writer.println("Finished Processing page: " + i);
writer.flush();
}
if (!hadithObjects.isEmpty()) {
try {
saveHadith();
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
writer.close();
myTempDir.delete();
}
public void saveHadith() throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
HadithObject completedHadith = completeOldestHadith();
completedHadith.setEnglish(LoaderUtil.cleanupText(completedHadith.getEnglish()));
byte[] json = mapper.writeValueAsBytes(completedHadith);
boolean successful = false;
int tries = 0;
while (successful == false && tries < 8) {
try {
ESClientProvider.instance().getClient().prepareIndex(ESClientProvider.INDEX, "_doc")
.setSource(json).get();
successful = true;
} catch (NoNodeAvailableException | UnknownHostException e) {
tries++;
continue;
}
}
}
public void setupNewHadithObj() {
HadithObject currentHadith = new HadithObject();
if (chapter != null) {
currentHadith.setChapter(chapter);
}
currentHadith.setBook(book);
hadithObjects.add(currentHadith);
}
private HadithObject getOldestHadith() {
return hadithObjects.get(0);
}
private HadithObject getNewestHadith() {
return hadithObjects.get(hadithObjects.size() - 1);
}
private HadithObject completeOldestHadith() {
HadithObject hadith = hadithObjects.get(0);
hadith.setEnglish(LoaderUtil.cleanupText(hadith.getEnglish()));
hadithObjects.remove(0);
return hadith;
}
} | apache-2.0 |
boylesoftware/thymes2 | src/main/java/org/bsworks/x2/resource/impl/FilterConditionImpl.java | 8360 | package org.bsworks.x2.resource.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.Iterator;
import java.util.Set;
import org.bsworks.x2.resource.AggregatePropertyHandler;
import org.bsworks.x2.resource.DependentRefPropertyHandler;
import org.bsworks.x2.resource.FilterCondition;
import org.bsworks.x2.resource.FilterConditionOperandType;
import org.bsworks.x2.resource.FilterConditionType;
import org.bsworks.x2.resource.InvalidResourceDataException;
import org.bsworks.x2.resource.InvalidSpecificationException;
import org.bsworks.x2.resource.PropertyValueFunction;
import org.bsworks.x2.resource.RefPropertyHandler;
import org.bsworks.x2.resource.ResourcePropertyHandler;
import org.bsworks.x2.resource.ResourcePropertyValueHandler;
/**
* Filter condition implementation.
*
* @author Lev Himmelfarb
*/
class FilterConditionImpl
implements FilterCondition {
/**
* Condition type.
*/
private final FilterConditionType type;
/**
* Tells if negated.
*/
private final boolean negated;
/**
* Property path.
*/
private final String propPath;
/**
* Property value type.
*/
private final FilterConditionOperandType propValueType;
/**
* Property value transformation function.
*/
private final PropertyValueFunction valueFunc;
/**
* Property value transformation function parameters.
*/
private final Object[] valueFuncParams;
/**
* Property chain.
*/
private final Deque<? extends ResourcePropertyHandler> propChain;
/**
* Condition operands.
*/
private final Collection<FilterConditionOperandImpl> operands;
/**
* Create new condition.
*
* @param resources Application resources manager.
* @param type Condition type.
* @param valueFunc Property value transformation function.
* @param valueFuncParams Property value transformation function parameters.
* May be {@code null} if the function takes no parameters.
* @param negated {@code true} if negated.
* @param prsrcHandler Root persistent resource handler.
* @param propPath Property path.
* @param operands Condition operands. Cannot be {@code null}, but can be
* empty.
* @param prsrcClasses Set, to which to add any participating persistent
* resource classes.
*
* @throws InvalidSpecificationException If condition specification is
* invalid.
*/
FilterConditionImpl(final ResourcesImpl resources,
final FilterConditionType type,
final PropertyValueFunction valueFunc,
final Object[] valueFuncParams, final boolean negated,
final PersistentResourceHandlerImpl<?> prsrcHandler,
final String propPath, final Object[] operands,
final Set<Class<?>> prsrcClasses) {
this.type = type;
this.valueFunc = valueFunc;
this.valueFuncParams = (valueFuncParams != null ?
valueFuncParams : new Object[0]);
this.negated = negated;
// get property path and tested value operand type
if (propPath.endsWith("/id")) {
this.propPath = propPath.substring(0,
propPath.length() - "/id".length());
this.propValueType = FilterConditionOperandType.ID;
} else if (propPath.endsWith("/key")) {
this.propPath = propPath.substring(0,
propPath.length() - "/key".length());
this.propValueType = FilterConditionOperandType.KEY;
} else {
this.propPath = propPath;
this.propValueType = FilterConditionOperandType.VALUE;
}
// get property path chain
this.propChain = prsrcHandler.getPersistentPropertyChain(this.propPath);
// get handler of the property at the end of the chain
final AbstractResourcePropertyHandlerImpl propHandler =
(AbstractResourcePropertyHandlerImpl) this.propChain.getLast();
// cannot use aggregates in filters
if (propHandler instanceof AggregatePropertyHandler)
throw new InvalidSpecificationException(
"Cannot use aggregate properties in filters.");
// get property value handlers (top and leaf)
final AbstractResourcePropertyValueHandlerImpl propTopValueHandler =
propHandler.getValueHandler();
final AbstractResourcePropertyValueHandlerImpl propLeafValueHandler =
propHandler.getValueHandler().getLastInChain();
// determine if the test is a presence check, validate operands number
final boolean presenceCheck = ((this.type == FilterConditionType.EMPTY)
|| (this.type == FilterConditionType.NOT_EMPTY));
if (presenceCheck && (operands.length > 0))
throw new InvalidSpecificationException("This type of filter"
+ " condition does not use operands.");
if (!presenceCheck && (operands.length == 0))
throw new InvalidSpecificationException("This type of filter"
+ " condition requires at least one operand.");
// get value handler for the operand
final ResourcePropertyValueHandler opValueHandler;
switch (this.propValueType) {
case ID:
// make sure the property is a reference
if (!propLeafValueHandler.isRef())
throw new InvalidSpecificationException("Property " + propPath
+ " is not a reference and its id cannot be tested.");
// use target resource id property value handler for operand values
opValueHandler = resources.getPersistentResourceHandler(
propHandler.getValueHandler().getLastInChain()
.getRefTargetClass())
.getIdProperty().getValueHandler();
break;
case KEY:
// make sure the property is a map
if (propTopValueHandler.getType() != ResourcePropertyValueType.MAP)
throw new InvalidSpecificationException("Property " + propPath
+ " is not a map and does not have a key.");
// use key value handler for operand values
opValueHandler = propHandler.getKeyValueHandler();
break;
default: // VALUE
// make sure the property has simple value to test
if (!(propLeafValueHandler
instanceof SimpleResourcePropertyValueHandler))
throw new InvalidSpecificationException("Property " + propPath
+ " does not have simple value.");
// use property value handler for operand values
opValueHandler = propHandler.getValueHandler();
}
// gather all operands using appropriate value handler
try {
final Collection<FilterConditionOperandImpl> operandsCol =
new ArrayList<>(operands.length > 10 ? operands.length : 10);
for (final Object op : operands) {
if (op == null)
throw new InvalidSpecificationException(
"Filter condition operands may not be null.");
operandsCol.add(new FilterConditionOperandImpl(
op instanceof String ?
opValueHandler.valueOf((String) op) : op));
}
this.operands = Collections.unmodifiableCollection(operandsCol);
} catch (final InvalidResourceDataException e) {
throw new InvalidSpecificationException("Invalid operand value.",
e);
}
// save participating persistent resource classes from the chain
for (final Iterator<? extends ResourcePropertyHandler> i =
this.propChain.iterator(); i.hasNext();) {
final ResourcePropertyHandler prop = i.next();
if ((prop instanceof RefPropertyHandler) && i.hasNext())
prsrcClasses.add(((RefPropertyHandler) prop)
.getReferredResourceClass());
else if (prop instanceof DependentRefPropertyHandler)
prsrcClasses.add(((DependentRefPropertyHandler) prop)
.getReferredResourceClass());
}
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public FilterConditionType getType() {
return this.type;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public boolean isNegated() {
return this.negated;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String getPropertyPath() {
return this.propPath;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public FilterConditionOperandType getPropertyValueType() {
return this.propValueType;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public PropertyValueFunction getValueFunction() {
return this.valueFunc;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public Object[] getValueFunctionParams() {
return this.valueFuncParams;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public Deque<? extends ResourcePropertyHandler> getPropertyChain() {
return this.propChain;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public Collection<FilterConditionOperandImpl> getOperands() {
return this.operands;
}
}
| apache-2.0 |
srinikandula/takewebsh | src/main/java/com/web/sruthijava/tressSetExample.java | 5032 | package com.web.sruthijava;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
/**
* Created by chsru on 7/8/2017.
*/
public class tressSetExample {
static BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
private static Set <Account> treeSet = new TreeSet<Account>(new AccountComparator());
public static void main(String[] args) throws IOException, ClassNotFoundException {
int option = 0;
do {
System.out.println("1. Create Account");
System.out.println("2. Find Balance");
System.out.println("3. Deposit");
System.out.println("4. Withdraw");
System.out.println("5. Delete Account");
System.out.println("6. Order Account");
System.out.println("0. Exit");
System.out.println("please enter your option");
option = Integer.parseInt(bufferedReader.readLine());
switch (option) {
case 1:
createNewAccount();
break;
case 2:
findBalance();
break;
case 3:
deposit();
break;
case 4:
withdraw();
break;
case 5:
deleteAccount();
break;
case 6:
orderAccount();
break;
}
} while (option != 0);
}
public static void createNewAccount() throws IOException {
Account newAccount = new Account();
System.out.println("Please enter name");
newAccount.setName(bufferedReader.readLine());
System.out.println("Please enter your ssn");
newAccount.setSsn(Long.valueOf(bufferedReader.readLine()));
System.out.println("Please enter deposit balance");
newAccount.setBalance(Double.valueOf(bufferedReader.readLine()));
newAccount.setAccountId((int) (Math.random() * 10000));
System.out.println("Account Number:" + newAccount.getAccountId());
treeSet.add(newAccount);
}
public static void findBalance() throws IOException {
System.out.println("Please enter your account number");
int accountNumber = Integer.parseInt(bufferedReader.readLine());
Iterator<Account> itr = treeSet.iterator();
while (itr.hasNext()){
Account check = itr.next();
if(check.getAccountId() == accountNumber)
{
System.out.println("Your current Balance:" + check.getBalance());
}
}
}
public static void deleteAccount() throws IOException{
System.out.println("Please enter your account number");
int accountNumber = Integer.parseInt(bufferedReader.readLine());
Iterator<Account> itr = treeSet.iterator();
while (itr.hasNext()){
Account check = itr.next();
if(check.getAccountId() == accountNumber)
{
treeSet.remove(check);
System.out.println("account deleted successfully");
break;
}
}
}
public static void deposit() throws IOException{
System.out.println("Please enter your account number");
int accountNumber = Integer.parseInt(bufferedReader.readLine());
System.out.println("Please enter deposit amount");
int depositAmount = Integer.parseInt(bufferedReader.readLine());
Iterator<Account> itr = treeSet.iterator();
while (itr.hasNext()){
Account check = itr.next();
if(check.getAccountId() == accountNumber)
{
check.setBalance( check.getBalance() + depositAmount);
System.out.println("New balance "+ check.getBalance());
}
}
}
public static void withdraw() throws IOException {
System.out.println("Please enter your account number");
int accountNumber = Integer.parseInt(bufferedReader.readLine());
System.out.println("Please enter withdraw amount");
int withdrawAmount = Integer.parseInt(bufferedReader.readLine());
Iterator<Account> itr = treeSet.iterator();
while (itr.hasNext()){
Account check = itr.next();
if(check.getAccountId() == accountNumber)
{
check.setBalance( check.getBalance() - withdrawAmount);
System.out.println("New balance "+ check.getBalance());
}
}
}
public static void orderAccount() throws IOException{
Iterator<Account> itr = treeSet.iterator();
while (itr.hasNext()){
Account temp = itr.next();
System.out.println("Account No : " +temp.getAccountId() + "Account Balance : " +temp.getBalance());
}
}
}
| apache-2.0 |
esoco/objectrelations | src/test/java/de/esoco/lib/json/JsonUtilTest.java | 3122 | //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'objectrelations' project.
// Copyright 2016 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.lib.json;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/********************************************************************
* Test of {@link Json}
*
* @author eso
*/
public class JsonUtilTest
{
//~ Methods ----------------------------------------------------------------
/***************************************
* Test of {@link Json#escape(String)}
*/
@Test
public void testEscape()
{
assertEquals("\\\"", Json.escape("\""));
assertEquals("\\\\", Json.escape("\\"));
assertEquals("\\/", Json.escape("/"));
assertEquals("\\b", Json.escape("\b"));
assertEquals("\\f", Json.escape("\f"));
assertEquals("\\n", Json.escape("\n"));
assertEquals("\\r", Json.escape("\r"));
assertEquals("\\t", Json.escape("\t"));
assertEquals("\\u0000-\\u001F", Json.escape("\u0000-\u001f"));
assertEquals("\\u007F-\\u009F", Json.escape("\u007f-\u009f"));
assertEquals("\\u2000-\\u20FF", Json.escape("\u2000-\u20ff"));
assertEquals("\u2100,\u21FF", Json.escape("\u2100,\u21ff"));
assertEquals("\uFFFF", Json.escape("\uffff"));
assertEquals("\\\"\\\\\\/\\b\\f\\n\\r\\t\\\"",
Json.escape("\"\\/\b\f\n\r\t\""));
assertEquals(" \\\" \\\\ \\/ \\b \\f \\n \\r \\t \\\" ",
Json.escape(" \" \\ / \b \f \n \r \t \" "));
}
/***************************************
* Test of {@link Json#restore(String)}
*/
@Test
public void testRestore()
{
assertEquals("\"", Json.restore("\\\""));
assertEquals("\\", Json.restore("\\\\"));
assertEquals("/", Json.restore("\\/"));
assertEquals("\b", Json.restore("\\b"));
assertEquals("\f", Json.restore("\\f"));
assertEquals("\n", Json.restore("\\n"));
assertEquals("\r", Json.restore("\\r"));
assertEquals("\t", Json.restore("\\t"));
assertEquals("\u0000-\u001F", Json.restore("\\u0000-\\u001f"));
assertEquals("\u007F-\u009F", Json.restore("\\u007f-\\u009f"));
assertEquals("\u2000-\u20FF", Json.restore("\\u2000-\\u20ff"));
assertEquals("\u2100,\u21FF", Json.restore("\u2100,\u21ff"));
assertEquals("\uFFFF", Json.restore("\uffff"));
assertEquals("\"\\/\b\f\n\r\t\"",
Json.restore("\\\"\\\\\\/\\b\\f\\n\\r\\t\\\""));
assertEquals(" \" \\ / \b \f \n \r \t \" ",
Json.restore(" \\\" \\\\ \\/ \\b \\f \\n \\r \\t \\\" "));
}
}
| apache-2.0 |
kokog78/ibello-api | src/main/java/hu/ibello/bdd/ExamplesHandler.java | 2671 | /*
* Ark-Sys Kft. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package hu.ibello.bdd;
import java.util.List;
import java.util.regex.Pattern;
import hu.ibello.bdd.model.Examples;
/**
* With the methods of this class we can load and save Cucumber example files.
* The files will be loaded from the current ibello project.
* <p>
* The return values are in ibello's own format. The {@link Examples} class describes a Cucumber example
* with scenarios and steps.
* </p>
* @author Kornél Simon
*/
public interface ExamplesHandler {
/**
* Loads a single Cucumber example file.
* @param relativePath the path to the file, relative to the "examples" directory of the project
* @return the loaded Cucumber example data
* @throws BDDException if there was an error during loading the file
*/
Examples loadExamples(String relativePath) throws BDDException;
/**
* Loads multiple Cucumber example files.
* The method uses a regular expression to find the files.
* If a file's relative path matches the regular expression, it will be loaded.
* @param relativePath regular expression for the path to the file (which is relative to the "examples" directory of the project)
* @return the list of the loaded Cucumber examples
* @throws BDDException if there was an error during loading a file
*/
List<Examples> loadExamples(Pattern relativePath) throws BDDException;
/**
* Checks if an examples file exists.
* @param relativePath the path to the file, relative to the "examples" directory of the project
* @return <code>true</code> if the given file exists, <code>false</code> otherwise
*/
boolean existsExamplesFile(String relativePath);
/**
* Saves the given examples into a gherkin feature file.
* It overwrites the existing file.
* The file path should be specified with the {@link Examples#setFullPath(String)} or {@link Examples#setRelativePath(String)}
* method.
* @param examples the examples we want to save
* @throws BDDException if there was an error during saving the file or the file path is not specified
*/
void saveExamples(Examples examples) throws BDDException;
}
| apache-2.0 |
mebigfatguy/java-driver | driver-core/src/test/java/com/datastax/driver/core/CustomPayloadTest.java | 12641 | /*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import com.datastax.driver.core.exceptions.UnsupportedFeatureException;
import com.datastax.driver.core.utils.CassandraVersion;
import com.google.common.collect.ImmutableMap;
import org.apache.log4j.Logger;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import static com.datastax.driver.core.ProtocolVersion.V3;
import static com.datastax.driver.core.querybuilder.QueryBuilder.eq;
import static com.datastax.driver.core.querybuilder.QueryBuilder.select;
import static org.apache.log4j.Level.TRACE;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Fail.fail;
@CassandraVersion(major = 2.2)
@CCMConfig(jvmArgs = "-Dcassandra.custom_query_handler_class=org.apache.cassandra.cql3.CustomPayloadMirroringQueryHandler")
public class CustomPayloadTest extends CCMTestsSupport {
private Map<String, ByteBuffer> payload1;
private Map<String, ByteBuffer> payload2;
@BeforeMethod(groups = {"short", "unit"})
public void initPayloads() {
payload1 = ImmutableMap.of(
"k1", ByteBuffer.wrap(new byte[]{1, 2, 3}),
"k2", ByteBuffer.wrap(new byte[]{4, 5, 6})
);
payload2 = ImmutableMap.of(
"k2", ByteBuffer.wrap(new byte[]{1, 2}),
"k3", ByteBuffer.wrap(new byte[]{3, 4})
);
}
// execute
@Test(groups = "short")
public void should_echo_custom_payload_when_executing_statement() throws Exception {
Statement statement = new SimpleStatement("SELECT c2 FROM t1 where c1 = ?", 1);
statement.setOutgoingPayload(payload1);
ResultSet rows = session().execute(statement);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload1);
}
@Test(groups = "short")
public void should_echo_custom_payload_when_executing_batch_statement() throws Exception {
Statement statement = new BatchStatement().add(new SimpleStatement("INSERT INTO t1 (c1, c2) values (1, 'foo')"));
statement.setOutgoingPayload(payload1);
ResultSet rows = session().execute(statement);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload1);
}
@Test(groups = "short")
public void should_echo_custom_payload_when_building_statement() throws Exception {
Statement statement = select("c2").from("t1").where(eq("c1", 1)).setOutgoingPayload(payload1);
ResultSet rows = session().execute(statement);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload1);
}
// prepare
/**
* Ensures that an incoming payload is propagated from prepared to bound statements.
*
* @throws Exception
*/
@Test(groups = "short")
public void should_propagate_incoming_payload_to_bound_statement() throws Exception {
RegularStatement statement = new SimpleStatement("SELECT c2 as col1 FROM t1 where c1 = ?");
statement.setOutgoingPayload(payload1);
PreparedStatement ps = session().prepare(statement);
// Prepared statement should inherit outgoing payload
assertThat(ps.getOutgoingPayload()).isEqualTo(payload1);
// Prepared statement should receive incoming payload
assertThat(ps.getIncomingPayload()).isEqualTo(payload1);
ps.setOutgoingPayload(null); // unset outgoing payload
// bound statement should inherit from prepared statement's incoming payload
BoundStatement bs = ps.bind(1);
ResultSet rows = session().execute(bs);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload1);
bs = ps.bind();
bs.setInt(0, 1);
rows = session().execute(bs);
actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload1);
}
/**
* Ensures that an incoming payload is overridden by an explicitly set outgoing payload
* when propagated to bound statements.
*
* @throws Exception
*/
@Test(groups = "short")
public void should_override_incoming_payload_when_outgoing_payload_explicitly_set_on_preparing_statement() throws Exception {
RegularStatement statement = new SimpleStatement("SELECT c2 as col2 FROM t1 where c1 = ?");
statement.setOutgoingPayload(payload1);
PreparedStatement ps = session().prepare(statement);
// Prepared statement should inherit outgoing payload
assertThat(ps.getOutgoingPayload()).isEqualTo(payload1);
// Prepared statement should receive incoming payload
assertThat(ps.getIncomingPayload()).isEqualTo(payload1);
ps.setOutgoingPayload(payload2); // override outgoing payload
// bound statement should inherit from prepared statement's outgoing payload
BoundStatement bs = ps.bind(1);
ResultSet rows = session().execute(bs);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload2);
bs = ps.bind();
bs.setInt(0, 1);
rows = session().execute(bs);
actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload2);
}
/**
* Ensures that payloads can still be set individually on bound statements
* if the prepared statement does not have a default payload.
*
* @throws Exception
*/
@Test(groups = "short")
public void should_not_set_any_payload_on_bound_statement() throws Exception {
RegularStatement statement = new SimpleStatement("SELECT c2 as col3 FROM t1 where c1 = ?");
PreparedStatement ps = session().prepare(statement);
assertThat(ps.getOutgoingPayload()).isNull();
assertThat(ps.getIncomingPayload()).isNull();
// bound statement should not have outgoing payload
BoundStatement bs = ps.bind(1);
assertThat(bs.getOutgoingPayload()).isNull();
// explicitly set a payload for this boudn statement only
bs.setOutgoingPayload(payload1);
ResultSet rows = session().execute(bs);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload1);
// a second bound statement should not have any payload
bs = ps.bind();
assertThat(bs.getOutgoingPayload()).isNull();
bs.setInt(0, 1);
rows = session().execute(bs);
actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isNull();
}
// pagination
/**
* Ensures that a custom payload is propagated throughout pages.
*
* @throws Exception
*/
@Test(groups = "short")
public void should_echo_custom_payload_when_paginating() throws Exception {
session().execute("INSERT INTO t1 (c1, c2) VALUES (1, 'a')");
session().execute("INSERT INTO t1 (c1, c2) VALUES (1, 'b')");
Statement statement = new SimpleStatement("SELECT c2 FROM t1 where c1 = 1");
statement.setFetchSize(1);
statement.setOutgoingPayload(payload1);
ResultSet rows = session().execute(statement);
rows.all();
assertThat(rows.getAllExecutionInfo()).extracting("incomingPayload").containsOnly(payload1);
}
// TODO retries, spec execs
// edge cases
@Test(groups = "short")
public void should_encode_null_values() throws Exception {
Map<String, ByteBuffer> payload = new HashMap<String, ByteBuffer>();
payload.put("k1", Statement.NULL_PAYLOAD_VALUE);
Statement statement = new SimpleStatement("SELECT c2 FROM t1 where c1 = ?", 1);
statement.setOutgoingPayload(payload);
ResultSet rows = session().execute(statement);
Map<String, ByteBuffer> actual = rows.getExecutionInfo().getIncomingPayload();
assertThat(actual).isEqualTo(payload);
}
@Test(groups = "unit", expectedExceptions = NullPointerException.class)
public void should_throw_npe_when_null_key_on_regular_statement() throws Exception {
Map<String, ByteBuffer> payload = new HashMap<String, ByteBuffer>();
payload.put(null, ByteBuffer.wrap(new byte[]{1}));
new SimpleStatement("SELECT c2 FROM t1 where c1 = ?", 1).setOutgoingPayload(payload);
}
@Test(groups = "unit", expectedExceptions = NullPointerException.class)
public void should_throw_npe_when_null_value_on_regular_statement() throws Exception {
Map<String, ByteBuffer> payload = new HashMap<String, ByteBuffer>();
payload.put("k1", null);
new SimpleStatement("SELECT c2 FROM t1 where c1 = ?", 1).setOutgoingPayload(payload);
}
@Test(groups = "short", expectedExceptions = NullPointerException.class)
public void should_throw_npe_when_null_key_on_prepared_statement() throws Exception {
Map<String, ByteBuffer> payload = new HashMap<String, ByteBuffer>();
payload.put(null, ByteBuffer.wrap(new byte[]{1}));
session().prepare(new SimpleStatement("SELECT c2 FROM t1 where c1 = 1")).setOutgoingPayload(payload);
}
@Test(groups = "short", expectedExceptions = NullPointerException.class)
public void should_throw_npe_when_null_value_on_prepared_statement() throws Exception {
Map<String, ByteBuffer> payload = new HashMap<String, ByteBuffer>();
payload.put("k1", null);
session().prepare(new SimpleStatement("SELECT c2 FROM t1 where c1 = 2")).setOutgoingPayload(payload);
}
@Test(groups = "short")
public void should_throw_ufe_when_protocol_version_lesser_than_4() throws Exception {
try {
Cluster v3cluster = register(Cluster.builder()
.addContactPoints(getContactPoints())
.withPort(ccm().getBinaryPort())
.withProtocolVersion(V3)
.build())
.init();
Session v3session = v3cluster.connect();
Statement statement = new SimpleStatement("SELECT c2 FROM t1 where c1 = ?", 1);
statement.setOutgoingPayload(payload1);
v3session.execute(statement);
fail("Should not send custom payloads with protocol V3");
} catch (UnsupportedFeatureException e) {
assertThat(e.getMessage()).isEqualTo(
"Unsupported feature with the native protocol V3 (which is currently in use): Custom payloads are only supported since native protocol V4");
}
}
// log messages
/**
* Ensures that when debugging custom payloads, the driver will print appropriate log messages.
*
* @throws Exception
*/
@Test(groups = "short")
public void should_print_log_message_when_level_trace() throws Exception {
Logger logger = Logger.getLogger(Message.logger.getName());
MemoryAppender appender = new MemoryAppender();
try {
logger.setLevel(TRACE);
logger.addAppender(appender);
Statement statement = new SimpleStatement("SELECT c2 FROM t1 where c1 = ?", 1);
statement.setOutgoingPayload(payload1);
session().execute(statement);
String logs = appender.waitAndGet(10000);
assertThat(logs)
.contains("Sending payload: {k1:0x010203, k2:0x040506} (20 bytes total)")
.contains("Received payload: {k1:0x010203, k2:0x040506} (20 bytes total)");
} finally {
logger.setLevel(null);
logger.removeAppender(appender);
}
}
@Override
public void onTestContextInitialized() {
execute("CREATE TABLE t1 (c1 int, c2 text, PRIMARY KEY (c1, c2))");
}
}
| apache-2.0 |
jasobrown/barker | src/main/java/jmh/barker/MultipleInstanceOfChecks.java | 1534 | package jmh.barker;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@State(Scope.Thread)
@Warmup(iterations = 4, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 8, time = 4, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1,jvmArgsAppend = "-Xmx512M")
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Mode.SampleTime)
public class MultipleInstanceOfChecks
{
private static final Object i = new Integer(42);
@Benchmark
public Object firstCheck()
{
if (i instanceof Integer)
return i;
return null;
}
@Benchmark
public Object secondCheck()
{
if (i instanceof String)
return "sdfsdf";
if (i instanceof Integer)
return i;
return null;
}
@Benchmark
public Object thridCheck()
{
if (i instanceof String)
return "sdfsdf";
if (i instanceof ReentrantLock)
return new CountDownLatch(1);
if (i instanceof Integer)
return i;
return null;
}
}
| apache-2.0 |
shopizer-ecommerce/shopizer | sm-shop/src/main/java/com/salesmanager/shop/store/controller/currency/facade/CurrencyFacadeImpl.java | 1123 | package com.salesmanager.shop.store.controller.currency.facade;
import com.salesmanager.core.business.services.reference.currency.CurrencyService;
import com.salesmanager.core.model.reference.currency.Currency;
import com.salesmanager.shop.store.api.exception.ResourceNotFoundException;
import com.salesmanager.shop.store.controller.currency.facade.CurrencyFacade;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.inject.Inject;
import org.springframework.stereotype.Service;
@Service
public class CurrencyFacadeImpl implements CurrencyFacade {
@Inject
private CurrencyService currencyService;
@Override
public List<Currency> getList() {
List<Currency> currencyList = currencyService.list();
if (currencyList.isEmpty()){
throw new ResourceNotFoundException("No languages found");
}
Collections.sort(currencyList, new Comparator<Currency>(){
public int compare(Currency o1, Currency o2)
{
return o1.getCode().compareTo(o2.getCode());
}
});
return currencyList;
}
}
| apache-2.0 |
devetude/BOJ-PSJ | src/boj_9546/Main.java | 834 | package boj_9546;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
* 백준 온라인 저지 9546번 (3000번 버스) 문제풀이
*
* @see https://www.acmicpc.net/problem/9546
* @author devetude
*/
public class Main {
// 개행 문자열 상수
private static final String NEW_LINE = "\n";
public static void main(String args[]) throws Exception {
// 버퍼를 통해 입력 값을 받음
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int T = Integer.parseInt(br.readLine());
// 버퍼를 통해 결과 값을 만듬
StringBuilder sb = new StringBuilder();
while (T-- != 0) {
sb.append((int) Math.pow(2, Integer.parseInt(br.readLine())) - 1).append(NEW_LINE);
}
br.close();
// 결과 값 한꺼번에 출력
System.out.println(sb.toString());
}
} | apache-2.0 |
atomfrede/QRGenerator | app/src/main/java/com/mEmoZz/qrgen/Tabs/VCardFragment.java | 1892 | /*
* Copyright (C) 2015 Mohamed Fathy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mEmoZz.qrgen.Tabs;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import com.mEmoZz.qrgen.MainActivity;
import com.mEmoZz.qrgen.R;
/**
* Created by Mohamed Fathy on 5/12/15.
*/
public class VCardFragment extends Fragment {
private Button btn;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View fragView = inflater.inflate(R.layout.vcard_fragment, container, false);
btn = (Button) fragView.findViewById(R.id.vcardBtn);
btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
((MainActivity) getActivity()).vcardCore();
((MainActivity) getActivity()).mCore();
} catch (Exception e) {
Toast.makeText(getActivity(),
"Text fields cannot be empty!", Toast.LENGTH_SHORT)
.show();
}
}
});
return fragView;
}
}
| apache-2.0 |
zhiaixinyang/LightThink | greatbook/src/main/java/com/example/greatbook/widght/itemdecoration/PowerfulStickyDecoration.java | 5509 | package com.example.greatbook.widght.itemdecoration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.View;
import android.view.ViewGroup;
import com.example.greatbook.widght.itemdecoration.listener.PowerGroupListener;
/**
* Created MDove 2017/10/3
*/
public class PowerfulStickyDecoration extends RecyclerView.ItemDecoration {
private PowerGroupListener mGroupListener;
private int mGroupHeight = 80; //悬浮栏高度
private boolean isAlignLeft = true; //是否靠右边
private PowerfulStickyDecoration(PowerGroupListener groupListener) {
this.mGroupListener = groupListener;
}
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) {
super.getItemOffsets(outRect, view, parent, state);
int pos = parent.getChildAdapterPosition(view);
String groupId = getGroupName(pos);
if (groupId == null) return;
//只有是同一组的第一个才显示悬浮栏
if (pos == 0 || isFirstInGroup(pos)) {
outRect.top = mGroupHeight;
}
}
@Override
public void onDrawOver(Canvas c, RecyclerView parent, RecyclerView.State state) {
super.onDrawOver(c, parent, state);
int itemCount = state.getItemCount();
int childCount = parent.getChildCount();
int left = parent.getPaddingLeft();
int right = parent.getWidth() - parent.getPaddingRight();
String preGroupName;
String currentGroupName = null;
for (int i = 0; i < childCount; i++) {
View view = parent.getChildAt(i);
int position = parent.getChildAdapterPosition(view);
preGroupName = currentGroupName;
currentGroupName = getGroupName(position);
if (currentGroupName == null || TextUtils.equals(currentGroupName, preGroupName))
continue;
int viewBottom = view.getBottom();
int top = Math.max(mGroupHeight, view.getTop());//top 决定当前顶部第一个悬浮Group的位置
if (position + 1 < itemCount) {
//获取下个GroupName
String nextGroupName = getGroupName(position + 1);
//下一组的第一个View接近头部
if (!currentGroupName.equals(nextGroupName) && viewBottom < top) {
top = viewBottom;
}
}
//根据position获取View
View groupView = getGroupView(position);
if (groupView == null) return;
ViewGroup.LayoutParams layoutParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, mGroupHeight);
groupView.setLayoutParams(layoutParams);
groupView.setDrawingCacheEnabled(true);
groupView.measure(
View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED),
View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
//指定高度、宽度的groupView
groupView.layout(0, 0, right, mGroupHeight);
groupView.buildDrawingCache();
Bitmap bitmap = groupView.getDrawingCache();
int marginLeft = isAlignLeft ? 0 : right - groupView.getMeasuredWidth();
c.drawBitmap(bitmap, left + marginLeft, top - mGroupHeight, null);
}
}
/**
* 判断是不是组中的第一个位置
* 根据前一个组名,判断当前是否为新的组
*/
private boolean isFirstInGroup(int pos) {
if (pos == 0) {
return true;
} else {
String prevGroupId = getGroupName(pos - 1);
String groupId = getGroupName(pos);
return !TextUtils.equals(prevGroupId, groupId);
}
}
/**
* 获取组名
*
* @param position position
* @return 组名
*/
private String getGroupName(int position) {
if (mGroupListener != null) {
return mGroupListener.getGroupName(position);
} else {
return null;
}
}
/**
* 获取组View
*
* @param position position
* @return 组名
*/
private View getGroupView(int position) {
if (mGroupListener != null) {
return mGroupListener.getGroupView(position);
} else {
return null;
}
}
public static class Builder {
PowerfulStickyDecoration mDecoration;
private Builder(PowerGroupListener listener) {
mDecoration = new PowerfulStickyDecoration(listener);
}
public static Builder init(PowerGroupListener listener) {
return new Builder(listener);
}
/**
* 设置Group高度
* @param groutHeight 高度
* @return this
*/
public Builder setGroupHeight(int groutHeight) {
mDecoration.mGroupHeight = groutHeight;
return this;
}
/**
* 是否靠左边
* true 靠左边(默认)、false 靠右边
* @param b b
* @return this
*/
public Builder isAlignLeft(boolean b){
mDecoration.isAlignLeft = b;
return this;
}
public PowerfulStickyDecoration build() {
return mDecoration;
}
}
} | apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-chimesdkidentity/src/main/java/com/amazonaws/services/chimesdkidentity/model/transform/ListAppInstanceUserEndpointsRequestMarshaller.java | 2859 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.chimesdkidentity.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.chimesdkidentity.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ListAppInstanceUserEndpointsRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ListAppInstanceUserEndpointsRequestMarshaller {
private static final MarshallingInfo<String> APPINSTANCEUSERARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PATH).marshallLocationName("appInstanceUserArn").build();
private static final MarshallingInfo<Integer> MAXRESULTS_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("max-results").build();
private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("next-token").build();
private static final ListAppInstanceUserEndpointsRequestMarshaller instance = new ListAppInstanceUserEndpointsRequestMarshaller();
public static ListAppInstanceUserEndpointsRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(ListAppInstanceUserEndpointsRequest listAppInstanceUserEndpointsRequest, ProtocolMarshaller protocolMarshaller) {
if (listAppInstanceUserEndpointsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listAppInstanceUserEndpointsRequest.getAppInstanceUserArn(), APPINSTANCEUSERARN_BINDING);
protocolMarshaller.marshall(listAppInstanceUserEndpointsRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(listAppInstanceUserEndpointsRequest.getNextToken(), NEXTTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
deleidos/digitaledge-platform | ingest/src/main/java/com/deleidos/rtws/core/framework/factory/Cache.java | 14543 | /**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.core.framework.factory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
/**
* A simple cache of named objects. Provides a callback interface to create new objects for the cache when a request is
* made for a key that is not already in the cache.
*
* This is a first cut implementation. Should consider polishing it by merging it with the factory abstractions, or
* perhaps replacing with ehcache.
*
* @param <K>
* The cache key object type.
* @param <E>
* The cache entry object type.
*/
public class Cache<K, E> {
/** The entries currently stored in this cache. */
private HashMap<K, E> entries = new HashMap<K, E>();
/** Delegate used to instantiate new cache entries when requested. */
private final Instantiator<K, E> instantiator;
/** Concurrency lock for read only access. */
private final ReadLock readLock;
/** Concurrency lock for cache updates. */
private final WriteLock writeLock;
/**
* Constructor.
*
* @param instantiator
* Delegate used to instantiate new cache entries when requested.
*/
public Cache(Instantiator<K, E> instantiator) {
super();
this.instantiator = instantiator;
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
readLock = lock.readLock();
writeLock = lock.writeLock();
}
/**
* Retrieves the given object from the cache.
*/
public E getInstance(K key) {
readLock.lock();
try {
if (!entries.containsKey(key)) {
readLock.unlock();
writeLock.lock();
try {
if (!entries.containsKey(key)) {
entries.put(key, instantiator.create(key));
}
} finally {
readLock.lock();
writeLock.unlock();
}
}
return entries.get(key);
} finally {
readLock.unlock();
}
}
/**
* Retrieves a snapshot of the current cache contents.
*/
public Collection<E> getEntries() {
writeLock.lock();
try {
return new ArrayList<E>(entries.values());
} finally {
writeLock.unlock();
}
}
/**
* Interface defining an object that can create new cache entries. Users of a cache must provided an appropriate
* implementation in order to self-populate the cache.
*
* @param <K>
* The cache key object type.
* @param <E>
* The cache entry object type.
*/
public static interface Instantiator<K, E> {
public E create(K name);
}
}
| apache-2.0 |
zhgxun/cNotes | java/banana/src/main/java/github/banana/design/proxy/simple/UserDao.java | 206 | package github.banana.design.proxy.simple;
public class UserDao implements Dao {
@Override
public void save(String message) {
System.out.println("保存用户信息: " + message);
}
}
| apache-2.0 |
shunghsiyu/trivial-work | computer-programming-2/src/programming2/ch10/pet/Kennel.java | 943 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package programming2.ch10.pet;
import java.util.ArrayList;
/**
* AP Pet
* @author Shung-Hsi Yu <syu07@nyit.edu> ID#0906172
* @version Apr 27, 2014
*/
public class Kennel {
private ArrayList petList;
public Kennel() {
petList = new ArrayList();
}
public void addPet(Pet pet) {
petList.add(pet);
}
public void allSpeak() {
System.out.println(this.toString());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for(Object o: petList) {
Pet pet = (Pet) o;
sb.append(String.format("%s: %s%n", pet.getName(), pet.speak()));
}
sb.deleteCharAt(sb.length()-1);
return sb.toString();
}
}
| apache-2.0 |
FLVC/fcrepo-src-3.4.2 | fcrepo-security/fcrepo-security-pep/src/main/java/org/fcrepo/server/security/xacml/pep/ws/operations/ModifyDatastreamByReferenceHandler.java | 6953 | /*
* File: ModifyDatastreamByReferenceHandler.java
*
* Copyright 2007 Macquarie E-Learning Centre Of Excellence
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.server.security.xacml.pep.ws.operations;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.sun.xacml.attr.AnyURIAttribute;
import com.sun.xacml.attr.AttributeValue;
import com.sun.xacml.attr.StringAttribute;
import com.sun.xacml.ctx.RequestCtx;
import org.apache.axis.AxisFault;
import org.apache.axis.MessageContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.fcrepo.common.Constants;
import org.fcrepo.server.security.xacml.pdp.data.FedoraPolicyStore;
import org.fcrepo.server.security.xacml.pep.PEPException;
import org.fcrepo.server.security.xacml.util.LogUtil;
/**
* @author nishen@melcoe.mq.edu.au
*/
public class ModifyDatastreamByReferenceHandler
extends AbstractOperationHandler {
private static final Logger logger =
LoggerFactory.getLogger(ModifyDatastreamByReferenceHandler.class);
public ModifyDatastreamByReferenceHandler()
throws PEPException {
super();
}
public RequestCtx handleResponse(MessageContext context)
throws OperationHandlerException {
return null;
}
public RequestCtx handleRequest(MessageContext context)
throws OperationHandlerException {
logger.debug("ModifyDatastreamByReferenceHandler/handleRequest!");
RequestCtx req = null;
List<Object> oMap = null;
String pid = null;
String dsID = null;
// String[] altIDs = null;
// String dsLabel = null;
String mimeType = null;
String formatURI = null;
String dsLocation = null;
String checksumType = null;
String checksum = null;
// String logMessage = null;
// Boolean force = null;
try {
oMap = getSOAPRequestObjects(context);
logger.debug("Retrieved SOAP Request Objects");
} catch (AxisFault af) {
logger.error("Error obtaining SOAP Request Objects", af);
throw new OperationHandlerException("Error obtaining SOAP Request Objects",
af);
}
try {
pid = (String) oMap.get(0);
dsID = (String) oMap.get(1);
// altIDs = (String[]) oMap.get(2);
// dsLabel = (String) oMap.get(3);
mimeType = (String) oMap.get(4);
formatURI = (String) oMap.get(5);
dsLocation = (String) oMap.get(6);
checksumType = (String) oMap.get(7);
checksum = (String) oMap.get(8);
// logMessage = (String) oMap.get(9);
// force = (Boolean) oMap.get(10);
} catch (Exception e) {
logger.error("Error obtaining parameters", e);
throw new OperationHandlerException("Error obtaining parameters.",
e);
}
logger.debug("Extracted SOAP Request Objects");
Map<URI, AttributeValue> actions = new HashMap<URI, AttributeValue>();
Map<URI, AttributeValue> resAttr = new HashMap<URI, AttributeValue>();
try {
if (pid != null && !"".equals(pid)) {
resAttr.put(Constants.OBJECT.PID.getURI(),
new StringAttribute(pid));
}
if (pid != null && !"".equals(pid)) {
resAttr.put(new URI(XACML_RESOURCE_ID),
new AnyURIAttribute(new URI(pid)));
}
if (dsID != null && !"".equals(dsID)) {
resAttr.put(Constants.DATASTREAM.ID.getURI(),
new StringAttribute(dsID));
}
if (mimeType != null && !"".equals(mimeType)) {
resAttr.put(Constants.DATASTREAM.NEW_MIME_TYPE.getURI(),
new StringAttribute(mimeType));
}
if (formatURI != null && !"".equals(formatURI)) {
resAttr.put(Constants.DATASTREAM.NEW_FORMAT_URI.getURI(),
new AnyURIAttribute(new URI(formatURI)));
}
if (dsLocation != null && !"".equals(dsLocation)) {
resAttr.put(Constants.DATASTREAM.NEW_LOCATION.getURI(),
new AnyURIAttribute(new URI(dsLocation)));
}
if (checksumType != null && !"".equals(checksumType)) {
resAttr.put(Constants.DATASTREAM.NEW_CHECKSUM_TYPE.getURI(),
new StringAttribute(checksumType));
}
if (checksum != null && !"".equals(checksum)) {
resAttr.put(Constants.DATASTREAM.NEW_CHECKSUM.getURI(),
new StringAttribute(checksum));
}
actions
.put(Constants.ACTION.ID.getURI(),
new StringAttribute(Constants.ACTION.MODIFY_DATASTREAM_BY_REFERENCE
.getURI().toASCIIString()));
actions.put(Constants.ACTION.API.getURI(),
new StringAttribute(Constants.ACTION.APIM.getURI()
.toASCIIString()));
// modifying the FeSL policy datastream requires policy management permissions
if (dsID != null && dsID.equals(FedoraPolicyStore.POLICY_DATASTREAM)) {
actions.put(Constants.ACTION.ID.getURI(),
new StringAttribute(Constants.ACTION.MANAGE_POLICIES.getURI().toASCIIString()));
}
req =
getContextHandler().buildRequest(getSubjects(context),
actions,
resAttr,
getEnvironment(context));
LogUtil.statLog(context.getUsername(),
Constants.ACTION.MODIFY_DATASTREAM_BY_REFERENCE
.getURI().toASCIIString(),
pid,
dsID);
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new OperationHandlerException(e.getMessage(), e);
}
return req;
}
}
| apache-2.0 |
oakhole/journal | iecp/ums/src/main/java/com/oakhole/packet/cmpp/CMPP_QUERY_RESP.java | 2786 | package com.oakhole.packet.cmpp;
import java.io.IOException;
public class CMPP_QUERY_RESP extends CMPPPacket {
public CMPP_QUERY_RESP() {
}
public CMPP_QUERY_RESP(CMPPPacket packet) {
super(packet);
}
private String Time;
private byte Query_Type;
private String Query_Code;
private int MT_TLMsg;
private int MT_Tlusr;
private int MT_Scs;
private int MT_WT;
private int MT_FL;
private int MO_Scs;
private int MO_WT;
private int MO_FL;
public String getTime() {
return Time;
}
public void setTime(String time) {
Time = time;
}
public byte getQuery_Type() {
return Query_Type;
}
public void setQuery_Type(byte query_Type) {
Query_Type = query_Type;
}
public String getQuery_Code() {
return Query_Code;
}
public void setQuery_Code(String query_Code) {
Query_Code = query_Code;
}
public int getMT_TLMsg() {
return MT_TLMsg;
}
public void setMT_TLMsg(int mT_TLMsg) {
MT_TLMsg = mT_TLMsg;
}
public int getMT_Tlusr() {
return MT_Tlusr;
}
public void setMT_Tlusr(int mT_Tlusr) {
MT_Tlusr = mT_Tlusr;
}
public int getMT_Scs() {
return MT_Scs;
}
public void setMT_Scs(int mT_Scs) {
MT_Scs = mT_Scs;
}
public int getMT_WT() {
return MT_WT;
}
public void setMT_WT(int mT_WT) {
MT_WT = mT_WT;
}
public int getMT_FL() {
return MT_FL;
}
public void setMT_FL(int mT_FL) {
MT_FL = mT_FL;
}
public int getMO_Scs() {
return MO_Scs;
}
public void setMO_Scs(int mO_Scs) {
MO_Scs = mO_Scs;
}
public int getMO_WT() {
return MO_WT;
}
public void setMO_WT(int mO_WT) {
MO_WT = mO_WT;
}
public int getMO_FL() {
return MO_FL;
}
public void setMO_FL(int mO_FL) {
MO_FL = mO_FL;
}
@Override
public void pack() throws IOException {
super.pack();
this.writeString(dos, this.Time, 8);
this.dos.writeByte(this.Query_Type);
this.writeString(dos, this.Query_Code, 10);
this.dos.writeInt(this.MT_TLMsg);
this.dos.writeInt(this.MT_Tlusr);
this.dos.writeInt(this.MT_Scs);
this.dos.writeInt(this.MT_WT);
this.dos.writeInt(this.MT_FL);
this.dos.writeInt(this.MO_Scs);
this.dos.writeInt(this.MO_WT);
this.dos.writeInt(this.MO_FL);
}
@Override
public void unpack() throws IOException {
super.unpack();
this.Time = this.readString(dis, 8);
this.Query_Type = this.dis.readByte();
this.Query_Code = this.readString(dis, 10);
this.MT_TLMsg = this.dis.readInt();
this.MT_Tlusr = this.dis.readInt();
this.MT_Scs = this.dis.readInt();
this.MT_WT = this.dis.readInt();
this.MT_FL = this.dis.readInt();
this.MO_Scs = this.dis.readInt();
this.MO_WT = this.dis.readInt();
this.MO_FL = this.dis.readInt();
}
}
| apache-2.0 |
eSDK/esdk_storage_native_java | source/OpenApi/src/main/java/com/huawei/ism/openapi/nas/homedir/HomeDirIterator.java | 2989 | package com.huawei.ism.openapi.nas.homedir;
import java.util.List;
import java.util.Map;
import com.huawei.ism.openapi.common.batchquery.ApiIterator;
import com.huawei.ism.openapi.common.commu.rest.RestManager;
import com.huawei.ism.openapi.common.commu.rest.RestRequestHandler;
import com.huawei.ism.openapi.common.exception.ApiException;
import com.huawei.ism.openapi.common.keydeifines.TLVNAS.HOMEDIR;
import com.huawei.ism.openapi.common.utils.OpenApiUtils;
/**
* 主目录迭代器
* @author gWX183785
* @version V100R001C10
*/
public class HomeDirIterator extends ApiIterator<HomeDirMO, HomeDirQuery>
{
private static final String[] HOMEDIRFILTER =
{HOMEDIR.ID, HOMEDIR.TENANCYID};
/**
* 初始化方法
* @param restRequestHandler rest请求接口
* @param deviceID 设备号
* @param qCondition 批量查询条件
*/
public HomeDirIterator(RestRequestHandler restRequestHandler, String deviceID, HomeDirQuery qCondition)
{
super(restRequestHandler, deviceID, qCondition, false);
}
/**
* 查询个数
* @return 返回结果
* @throws ApiException 异常
*/
@Override
protected long getConcretCount() throws ApiException
{
String relativePath = OpenApiUtils.getOpenApiUtilsInstance().composeRelativeUri(getDeviceId(), "homedir",
"count");
Map<String, String> headParamMap = composeRequestHeader(false);
RestManager<HomeDirMO> restManager = new RestManager<HomeDirMO>(HomeDirMO.class, getRestRequestHandler(),
relativePath, headParamMap, null);
HomeDirMO resPonsemo = restManager.getGetRequestMO();
return resPonsemo.getCount();
}
/**
* 批量查询
* @return 返回结果
* @throws ApiException 异常
*/
@Override
protected List<HomeDirMO> getConcretBatchNext(boolean isAssociate) throws ApiException
{
String relativePath = OpenApiUtils.getOpenApiUtilsInstance().composeRelativeUri(getDeviceId(), "homedir");
Map<String, String> headParamMap = composeRequestHeader(true);
RestManager<HomeDirMO> restManager = new RestManager<HomeDirMO>(HomeDirMO.class, getRestRequestHandler(),
relativePath, headParamMap, null);
List<HomeDirMO> resPonsemoList = restManager.getGetRequestMOList();
return resPonsemoList;
}
/**
* 获取批量查询过滤条件
* @param modelMo HomeDir数据对象
* @return 返回结果
*/
@Override
protected Map<Object, Object> getConcretMoMap(HomeDirMO modelMo)
{
return (null != modelMo) ? (modelMo.getMO().getProperties()) : null;
}
/**
* 获取批量查询的过滤头信息
* @return 批量查询的过滤头
*/
@Override
protected String[] getConcretFilterHeaders()
{
return HOMEDIRFILTER;
}
@Override
protected String[] getFuzzyFilterHeaders()
{
return new String[0];
}
}
| apache-2.0 |
jiang111/ZhiHu-TopAnswer | architecture/src/main/java/com/jiang/android/architecture/view/MultiStateView.java | 9493 | package com.jiang.android.architecture.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.jiang.android.architecture.R;
public class MultiStateView extends FrameLayout {
private static final int UNKNOWN_VIEW = -1;
private static final int CONTENT_VIEW = 0;
private static final int ERROR_VIEW = 1;
private static final int EMPTY_VIEW = 2;
private static final int LOADING_VIEW = 3;
public enum ViewState {
CONTENT,
LOADING,
EMPTY,
ERROR
}
private LayoutInflater mInflater;
private View mContentView;
private View mLoadingView;
private View mErrorView;
private View mEmptyView;
private ViewState mViewState = ViewState.CONTENT;
public MultiStateView(Context context) {
this(context, null);
}
public MultiStateView(Context context, AttributeSet attrs) {
super(context, attrs);
init(attrs);
}
public MultiStateView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(attrs);
}
private void init(AttributeSet attrs) {
mInflater = LayoutInflater.from(getContext());
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.MultiStateView);
int loadingViewResId = a.getResourceId(R.styleable.MultiStateView_msv_loadingView, -1);
if (loadingViewResId > -1) {
mLoadingView = mInflater.inflate(loadingViewResId, this, false);
addView(mLoadingView, mLoadingView.getLayoutParams());
}
int emptyViewResId = a.getResourceId(R.styleable.MultiStateView_msv_emptyView, -1);
if (emptyViewResId > -1) {
mEmptyView = mInflater.inflate(emptyViewResId, this, false);
addView(mEmptyView, mEmptyView.getLayoutParams());
}
int errorViewResId = a.getResourceId(R.styleable.MultiStateView_msv_errorView, -1);
if (errorViewResId > -1) {
mErrorView = mInflater.inflate(errorViewResId, this, false);
addView(mErrorView, mErrorView.getLayoutParams());
}
int viewState = a.getInt(R.styleable.MultiStateView_msv_viewState, UNKNOWN_VIEW);
if (viewState != UNKNOWN_VIEW) {
switch (viewState) {
case CONTENT_VIEW:
mViewState = ViewState.CONTENT;
break;
case ERROR_VIEW:
mViewState = ViewState.EMPTY;
break;
case EMPTY_VIEW:
mViewState = ViewState.EMPTY;
break;
case LOADING_VIEW:
mViewState = ViewState.LOADING;
break;
}
}
a.recycle();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (mContentView == null) throw new IllegalArgumentException("Content view is not defined");
setView();
}
/* All of the addView methods have been overridden so that it can obtain the content view via XML
It is NOT recommended to add views into MultiStateView via the addView methods, but rather use
any of the setViewForState methods to set views for their given ViewState accordingly */
@Override
public void addView(View child) {
if (isValidContentView(child)) mContentView = child;
super.addView(child);
}
@Override
public void addView(View child, int index) {
if (isValidContentView(child)) mContentView = child;
super.addView(child, index);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (isValidContentView(child)) mContentView = child;
super.addView(child, index, params);
}
@Override
public void addView(View child, ViewGroup.LayoutParams params) {
if (isValidContentView(child)) mContentView = child;
super.addView(child, params);
}
@Override
public void addView(View child, int width, int height) {
if (isValidContentView(child)) mContentView = child;
super.addView(child, width, height);
}
@Override
protected boolean addViewInLayout(View child, int index, ViewGroup.LayoutParams params) {
if (isValidContentView(child)) mContentView = child;
return super.addViewInLayout(child, index, params);
}
@Override
protected boolean addViewInLayout(View child, int index, ViewGroup.LayoutParams params, boolean preventRequestLayout) {
if (isValidContentView(child)) mContentView = child;
return super.addViewInLayout(child, index, params, preventRequestLayout);
}
public View getView(ViewState state) {
switch (state) {
case LOADING:
return mLoadingView;
case CONTENT:
return mContentView;
case EMPTY:
return mEmptyView;
case ERROR:
return mErrorView;
default:
return null;
}
}
public ViewState getViewState() {
return mViewState;
}
public void setViewState(ViewState state) {
if (state != mViewState) {
mViewState = state;
setView();
}
}
private void setView() {
switch (mViewState) {
case LOADING:
if (mLoadingView == null) {
throw new NullPointerException("Loading View");
}
mLoadingView.setVisibility(View.VISIBLE);
if (mContentView != null) mContentView.setVisibility(View.GONE);
if (mErrorView != null) mErrorView.setVisibility(View.GONE);
if (mEmptyView != null) mEmptyView.setVisibility(View.GONE);
break;
case EMPTY:
if (mEmptyView == null) {
throw new NullPointerException("Empty View");
}
mEmptyView.setVisibility(View.VISIBLE);
if (mLoadingView != null) mLoadingView.setVisibility(View.GONE);
if (mErrorView != null) mErrorView.setVisibility(View.GONE);
if (mContentView != null) mContentView.setVisibility(View.GONE);
break;
case ERROR:
if (mErrorView == null) {
throw new NullPointerException("Error View");
}
mErrorView.setVisibility(View.VISIBLE);
if (mLoadingView != null) mLoadingView.setVisibility(View.GONE);
if (mContentView != null) mContentView.setVisibility(View.GONE);
if (mEmptyView != null) mEmptyView.setVisibility(View.GONE);
break;
case CONTENT:
default:
if (mContentView == null) {
// Should never happen, the view should throw an exception if no content view is present upon creation
throw new NullPointerException("Content View");
}
mContentView.setVisibility(View.VISIBLE);
if (mLoadingView != null) mLoadingView.setVisibility(View.GONE);
if (mErrorView != null) mErrorView.setVisibility(View.GONE);
if (mEmptyView != null) mEmptyView.setVisibility(View.GONE);
break;
}
}
/**
* Checks if the given {@link View} is valid for the Content View
*
* @param view The {@link View} to check
* @return
*/
private boolean isValidContentView(View view) {
if (mContentView != null && mContentView != view) {
return false;
}
return view != mLoadingView && view != mErrorView && view != mEmptyView;
}
public void setViewForState(View view, ViewState state, boolean switchToState) {
switch (state) {
case LOADING:
if (mLoadingView != null) removeView(mLoadingView);
mLoadingView = view;
addView(mLoadingView);
break;
case EMPTY:
if (mEmptyView != null) removeView(mEmptyView);
mEmptyView = view;
addView(mEmptyView);
break;
case ERROR:
if (mErrorView != null) removeView(mErrorView);
mErrorView = view;
addView(mErrorView);
break;
case CONTENT:
if (mContentView != null) removeView(mContentView);
mContentView = view;
addView(mContentView);
break;
}
if (switchToState) setViewState(state);
}
public void setViewForState(View view, ViewState state) {
setViewForState(view, state, false);
}
public void setViewForState(int layoutRes, ViewState state, boolean switchToState) {
if (mInflater == null) mInflater = LayoutInflater.from(getContext());
View view = mInflater.inflate(layoutRes, this, false);
setViewForState(view, state, switchToState);
}
public void setViewForState(int layoutRes, ViewState state) {
setViewForState(layoutRes, state, false);
}
}
| apache-2.0 |
googleapis/java-datacatalog | proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/BigQueryConnectionSpecOrBuilder.java | 2909 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/bigquery.proto
package com.google.cloud.datacatalog.v1;
public interface BigQueryConnectionSpecOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.datacatalog.v1.BigQueryConnectionSpec)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The type of the BigQuery connection.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.BigQueryConnectionSpec.ConnectionType connection_type = 1;
* </code>
*
* @return The enum numeric value on the wire for connectionType.
*/
int getConnectionTypeValue();
/**
*
*
* <pre>
* The type of the BigQuery connection.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.BigQueryConnectionSpec.ConnectionType connection_type = 1;
* </code>
*
* @return The connectionType.
*/
com.google.cloud.datacatalog.v1.BigQueryConnectionSpec.ConnectionType getConnectionType();
/**
*
*
* <pre>
* Specification for the BigQuery connection to a Cloud SQL instance.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec cloud_sql = 2;</code>
*
* @return Whether the cloudSql field is set.
*/
boolean hasCloudSql();
/**
*
*
* <pre>
* Specification for the BigQuery connection to a Cloud SQL instance.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec cloud_sql = 2;</code>
*
* @return The cloudSql.
*/
com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec getCloudSql();
/**
*
*
* <pre>
* Specification for the BigQuery connection to a Cloud SQL instance.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec cloud_sql = 2;</code>
*/
com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpecOrBuilder getCloudSqlOrBuilder();
/**
*
*
* <pre>
* True if there are credentials attached to the BigQuery connection; false
* otherwise.
* </pre>
*
* <code>bool has_credential = 3;</code>
*
* @return The hasCredential.
*/
boolean getHasCredential();
public com.google.cloud.datacatalog.v1.BigQueryConnectionSpec.ConnectionSpecCase
getConnectionSpecCase();
}
| apache-2.0 |
infogen7/infogen_soa | src/main/java/com/infogen/self_description/InfoGen_Parser_HTTP.java | 2583 | package com.infogen.self_description;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.infogen.self_description.component.Function;
/**
* @author larry/larrylv@outlook.com/创建时间 2015年8月18日 下午5:59:49
* @since 1.0
* @version 1.0
*/
public class InfoGen_Parser_HTTP extends InfoGen_Parser {
/*
* (non-Javadoc)
*
* @see com.infogen.self_description.Self_Description#self_description(java.util.Set)
*/
@Override
public List<Function> self_description(Class<?> clazz) {
List<Function> functions = new ArrayList<>();
// url 前缀
String pre_url = "";
RequestMapping class_url_annotation = clazz.getAnnotation(RequestMapping.class);
if (class_url_annotation != null) {
pre_url = class_url_annotation.value()[0].trim();
}
for (Method method : clazz.getDeclaredMethods()) {// 遍历clazz对应类里面的所有方法
RequestMapping request_mapping_annotation = method.getAnnotation(RequestMapping.class);// 方法映射路径和调用方式
if (request_mapping_annotation == null) {
continue;
}
// url 方法名会格式为: /get/message
String suf_url = "";// URL a/b/c/ 转化为 /a/b/c 第一个/会被补齐,最后一个/会被过滤掉
String[] values = request_mapping_annotation.value();
if (values.length != 0) {
suf_url = values[0].trim();
}
String url = new StringBuilder("/").append(pre_url).append("/").append(suf_url).toString();
if (url.endsWith("/")) {
url.substring(0, url.length());
}
url = Pattern.compile("['/']+").matcher(url).replaceAll("/").trim();
// function
Function function = new Function();
function.setRequest_method(url);
// 调用方式 GET OR POST
RequestMethod[] get_post_methods = request_mapping_annotation.method();
if (get_post_methods.length == 0) {
function.setSubmit_mode("GET");
} else {
function.setSubmit_mode(request_mapping_annotation.method()[0].name());// GET POST
}
// 方法描述注释
getDescribe(function, method);
// 输入参数注释(通过反射方法形参与注释的mapping)
getInParam(function, method, clazz, HttpServletRequest.class, HttpServletResponse.class);
// outParams
getOutParam(function, method);
//
functions.add(function);
}
return functions;
}
}
| apache-2.0 |
52North/youngs | src/main/java/org/n52/youngs/load/impl/BuilderRecord.java | 1708 | /*
* Copyright 2015-2019 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.n52.youngs.load.impl;
import com.google.common.base.MoreObjects;
import java.util.Optional;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.n52.youngs.load.SinkRecord;
/**
*
* @author <a href="mailto:d.nuest@52north.org">Daniel Nüst</a>
*/
public class BuilderRecord implements SinkRecord {
private final XContentBuilder builder;
private Optional<String> id = Optional.empty();
public BuilderRecord(String id, XContentBuilder builder) {
this.id = Optional.ofNullable(id);
this.builder = builder;
}
public BuilderRecord(XContentBuilder builder) {
this(null, builder);
}
public XContentBuilder getBuilder() {
return builder;
}
@Override
public String getId() {
return id.get();
}
@Override
public boolean hasId() {
return id.isPresent();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", getId())
.toString();
}
}
| apache-2.0 |
cloudant/sync-android | cloudant-sync-datastore-core/src/main/java/com/cloudant/sync/internal/documentstore/callables/GetLastSequenceCallable.java | 2461 | /*
* Copyright © 2016 IBM Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.cloudant.sync.internal.documentstore.callables;
import com.cloudant.sync.documentstore.Database;
import com.cloudant.sync.internal.documentstore.DatabaseImpl;
import com.cloudant.sync.documentstore.DocumentStoreException;
import com.cloudant.sync.internal.sqlite.Cursor;
import com.cloudant.sync.internal.sqlite.SQLCallable;
import com.cloudant.sync.internal.sqlite.SQLDatabase;
import com.cloudant.sync.internal.util.DatabaseUtils;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Get the most recent (highest) sequence number for the database
*/
public class GetLastSequenceCallable implements SQLCallable<Long> {
private static final Logger logger = Logger.getLogger(DatabaseImpl.class.getCanonicalName());
@Override
public Long call(SQLDatabase db) throws Exception {
String sql = "SELECT MAX(sequence) FROM revs";
Cursor cursor = null;
long result = 0;
try {
cursor = db.rawQuery(sql, null);
if (cursor.moveToFirst()) {
// TODO this will always be an integer or null and we can't be expected to handle other cases
if (cursor.columnType(0) == Cursor.FIELD_TYPE_INTEGER) {
result = cursor.getLong(0);
} else if (cursor.columnType(0) == Cursor.FIELD_TYPE_NULL) {
result = Database.SEQUENCE_NUMBER_START;
} else {
throw new IllegalStateException("SQLite return an unexpected value.");
}
}
} catch (SQLException e) {
logger.log(Level.SEVERE, "Error getting last sequence", e);
throw new DocumentStoreException(e);
} finally {
DatabaseUtils.closeCursorQuietly(cursor);
}
return result;
}
}
| apache-2.0 |
Taller/sqlworkbench-plus | src/workbench/db/oracle/OracleMergeGenerator.java | 4998 | /*
* OracleMergeGenerator.java
*
* This file is part of SQL Workbench/J, http://www.sql-workbench.net
*
* Copyright 2002-2015, Thomas Kellerer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* To contact the author please send an email to: support@sql-workbench.net
*
*/
package workbench.db.oracle;
import workbench.db.ColumnIdentifier;
import workbench.db.TableIdentifier;
import workbench.storage.ColumnData;
import workbench.storage.MergeGenerator;
import workbench.storage.ResultInfo;
import workbench.storage.RowData;
import workbench.storage.RowDataContainer;
import workbench.storage.SqlLiteralFormatter;
/**
*
* @author Thomas Kellerer
*/
public class OracleMergeGenerator
implements MergeGenerator
{
private SqlLiteralFormatter formatter;
public OracleMergeGenerator()
{
this.formatter = new SqlLiteralFormatter(SqlLiteralFormatter.ANSI_DATE_LITERAL_TYPE);
}
@Override
public String generateMergeStart(RowDataContainer data)
{
StringBuilder result = new StringBuilder(100);
generateStart(result, data, false);
return result.toString();
}
@Override
public String addRow(ResultInfo info, RowData row, long rowIndex)
{
StringBuilder sql = new StringBuilder(100);
if (rowIndex > 0) sql.append("\n UNION ALL\n");
appendValues(sql, info, row, rowIndex == 0);
return sql.toString();
}
@Override
public String generateMergeEnd(RowDataContainer data)
{
StringBuilder sql = new StringBuilder(data.getRowCount());
appendJoin(sql, data);
appendUpdate(sql, data);
appendInsert(sql, data);
return sql.toString();
}
@Override
public String generateMerge(RowDataContainer data)
{
StringBuilder sql = new StringBuilder(data.getRowCount());
generateStart(sql, data, true);
appendJoin(sql, data);
appendUpdate(sql, data);
appendInsert(sql, data);
return sql.toString();
}
private void generateStart(StringBuilder sql, RowDataContainer data, boolean withData)
{
TableIdentifier tbl = data.getUpdateTable();
sql.append("MERGE INTO ");
sql.append(tbl.getTableExpression(data.getOriginalConnection()));
sql.append(" ut\nUSING (\n");
if (withData)
{
ResultInfo info = data.getResultInfo();
for (int row=0; row < data.getRowCount(); row++)
{
if (row > 0) sql.append("\n UNION ALL\n");
appendValues(sql, info, data.getRow(row), row == 0);
}
}
}
private void appendJoin(StringBuilder sql, RowDataContainer data)
{
ResultInfo info = data.getResultInfo();
sql.append("\n) md ON (");
int pkCount = 0;
for (int col=0; col < info.getColumnCount(); col ++)
{
ColumnIdentifier colid = info.getColumn(col);
if (!colid.isPkColumn()) continue;
if (pkCount > 0) sql.append(" AND ");
sql.append("ut.");
sql.append(info.getColumnName(col));
sql.append(" = md.");
sql.append(info.getColumnName(col));
pkCount ++;
}
sql.append(")");
}
private void appendValues(StringBuilder sql, ResultInfo info, RowData rd, boolean useAlias)
{
sql.append(" SELECT ");
for (int col=0; col < info.getColumnCount(); col++)
{
if (col > 0) sql.append(", ");
ColumnData cd = new ColumnData(rd.getValue(col), info.getColumn(col));
sql.append(formatter.getDefaultLiteral(cd));
if (useAlias)
{
sql.append(" AS ");
sql.append(info.getColumnName(col));
}
}
sql.append(" FROM dual");
}
private void appendUpdate(StringBuilder sql, RowDataContainer data)
{
sql.append("\nWHEN MATCHED THEN UPDATE");
ResultInfo info = data.getResultInfo();
int colCount = 0;
for (int col=0; col < info.getColumnCount(); col ++)
{
ColumnIdentifier id = info.getColumn(col);
if (id.isPkColumn()) continue;
if (colCount == 0) sql.append("\n SET ");
if (colCount > 0) sql.append(",\n ");
sql.append("ut.");
sql.append(info.getColumnName(col));
sql.append(" = md.");
sql.append(info.getColumnName(col));
colCount ++;
}
}
private void appendInsert(StringBuilder sql, RowDataContainer data)
{
sql.append("\nWHEN NOT MATCHED THEN\n INSERT (");
ResultInfo info = data.getResultInfo();
StringBuilder columns = new StringBuilder(info.getColumnCount() * 10);
for (int col=0; col < info.getColumnCount(); col ++)
{
if (col > 0)
{
sql.append(", ");
columns.append(", ");
}
sql.append(info.getColumnName(col));
columns.append("md.");
columns.append(info.getColumnName(col));
}
sql.append(")\n");
sql.append(" VALUES (");
sql.append(columns);
sql.append(");");
}
}
| apache-2.0 |
mnovak1/activemq-artemis | artemis-server/src/main/java/org/apache/activemq/artemis/core/postoffice/impl/SimpleAddressManager.java | 12315 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.postoffice.impl;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.core.config.WildcardConfiguration;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.postoffice.Address;
import org.apache.activemq.artemis.core.postoffice.AddressManager;
import org.apache.activemq.artemis.core.postoffice.Binding;
import org.apache.activemq.artemis.core.postoffice.Bindings;
import org.apache.activemq.artemis.core.postoffice.BindingsFactory;
import org.apache.activemq.artemis.core.postoffice.QueueBinding;
import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle;
import org.apache.activemq.artemis.api.core.RoutingType;
import org.apache.activemq.artemis.core.server.cluster.impl.MessageLoadBalancingType;
import org.apache.activemq.artemis.core.server.impl.AddressInfo;
import org.apache.activemq.artemis.core.transaction.Transaction;
import org.apache.activemq.artemis.utils.CompositeAddress;
import org.jboss.logging.Logger;
/**
* A simple address manager that maintains the addresses and bindings.
*/
public class SimpleAddressManager implements AddressManager {
private static final Logger logger = Logger.getLogger(SimpleAddressManager.class);
private final ConcurrentMap<SimpleString, AddressInfo> addressInfoMap = new ConcurrentHashMap<>();
private final StorageManager storageManager;
/**
* HashMap<Address, Binding>
*/
protected final ConcurrentMap<SimpleString, Bindings> mappings = new ConcurrentHashMap<>();
/**
* HashMap<QueueName, Binding>
*/
private final ConcurrentMap<SimpleString, Binding> nameMap = new ConcurrentHashMap<>();
private final BindingsFactory bindingsFactory;
protected final WildcardConfiguration wildcardConfiguration;
public SimpleAddressManager(final BindingsFactory bindingsFactory, final StorageManager storageManager) {
this(bindingsFactory, new WildcardConfiguration(), storageManager);
}
public SimpleAddressManager(final BindingsFactory bindingsFactory,
final WildcardConfiguration wildcardConfiguration,
final StorageManager storageManager) {
this.wildcardConfiguration = wildcardConfiguration;
this.bindingsFactory = bindingsFactory;
this.storageManager = storageManager;
}
@Override
public boolean addBinding(final Binding binding) throws Exception {
if (nameMap.putIfAbsent(binding.getUniqueName(), binding) != null) {
throw ActiveMQMessageBundle.BUNDLE.bindingAlreadyExists(binding);
}
if (logger.isTraceEnabled()) {
logger.trace("Adding binding " + binding + " with address = " + binding.getUniqueName(), new Exception("trace"));
}
return addMappingInternal(binding.getAddress(), binding);
}
@Override
public Binding removeBinding(final SimpleString uniqueName, Transaction tx) throws Exception {
final Binding binding = nameMap.remove(uniqueName);
if (binding == null) {
return null;
}
removeBindingInternal(binding.getAddress(), uniqueName);
return binding;
}
@Override
public Bindings getBindingsForRoutingAddress(final SimpleString address) throws Exception {
return mappings.get(address);
}
@Override
public Binding getBinding(final SimpleString bindableName) {
return nameMap.get(CompositeAddress.extractQueueName(bindableName));
}
@Override
public Map<SimpleString, Binding> getBindings() {
return nameMap;
}
@Override
public Bindings getMatchingBindings(final SimpleString address) throws Exception {
Address add = new AddressImpl(address, wildcardConfiguration);
Bindings bindings = bindingsFactory.createBindings(address);
for (Binding binding : nameMap.values()) {
Address addCheck = new AddressImpl(binding.getAddress(), wildcardConfiguration);
if (addCheck.matches(add)) {
bindings.addBinding(binding);
}
}
return bindings;
}
@Override
public Bindings getDirectBindings(final SimpleString address) throws Exception {
Bindings bindings = bindingsFactory.createBindings(address);
for (Binding binding : nameMap.values()) {
if (binding.getAddress().equals(address)) {
bindings.addBinding(binding);
}
}
return bindings;
}
@Override
public SimpleString getMatchingQueue(final SimpleString address, RoutingType routingType) throws Exception {
Binding binding = getBinding(address);
if (binding == null || !(binding instanceof LocalQueueBinding) || !binding.getAddress().equals(address)) {
Bindings bindings = mappings.get(address);
if (bindings != null) {
for (Binding theBinding : bindings.getBindings()) {
if (theBinding instanceof LocalQueueBinding) {
binding = theBinding;
break;
}
}
}
}
return binding != null ? binding.getUniqueName() : null;
}
@Override
public SimpleString getMatchingQueue(final SimpleString address,
final SimpleString queueName,
RoutingType routingType) throws Exception {
Binding binding = getBinding(queueName);
if (binding != null && !binding.getAddress().equals(address) && !address.toString().isEmpty()) {
throw new IllegalStateException("queue belongs to address" + binding.getAddress());
}
return binding != null ? binding.getUniqueName() : null;
}
@Override
public void clear() {
nameMap.clear();
mappings.clear();
}
@Override
public Set<SimpleString> getAddresses() {
Set<SimpleString> addresses = new HashSet<>();
addresses.addAll(addressInfoMap.keySet());
return addresses;
}
protected void removeBindingInternal(final SimpleString address, final SimpleString bindableName) {
Bindings bindings = mappings.get(address);
if (bindings != null) {
removeMapping(bindableName, bindings);
if (bindings.getBindings().isEmpty()) {
mappings.remove(address);
}
}
}
protected Binding removeMapping(final SimpleString bindableName, final Bindings bindings) {
Binding theBinding = null;
for (Binding binding : bindings.getBindings()) {
if (binding.getUniqueName().equals(CompositeAddress.extractQueueName(bindableName))) {
theBinding = binding;
break;
}
}
if (theBinding == null) {
throw new IllegalStateException("Cannot find binding " + bindableName);
}
bindings.removeBinding(theBinding);
return theBinding;
}
protected boolean addMappingInternal(final SimpleString address, final Binding binding) throws Exception {
Bindings bindings = mappings.get(address);
Bindings prevBindings = null;
if (bindings == null) {
bindings = bindingsFactory.createBindings(address);
prevBindings = mappings.putIfAbsent(address, bindings);
if (prevBindings != null) {
bindings = prevBindings;
}
}
bindings.addBinding(binding);
return prevBindings != null;
}
@Override
public boolean reloadAddressInfo(AddressInfo addressInfo) throws Exception {
return addressInfoMap.putIfAbsent(addressInfo.getName(), addressInfo) == null;
}
@Override
public boolean addAddressInfo(AddressInfo addressInfo) throws Exception {
boolean added = reloadAddressInfo(addressInfo);
if (added && storageManager != null) {
long txID = storageManager.generateID();
try {
storageManager.addAddressBinding(txID, addressInfo);
storageManager.commitBindings(txID);
} catch (Exception e) {
try {
storageManager.rollbackBindings(txID);
} catch (Exception ignored) {
}
throw e;
}
}
return added;
}
@Override
public AddressInfo updateAddressInfo(SimpleString addressName,
EnumSet<RoutingType> routingTypes) throws Exception {
AddressInfo info = addressInfoMap.get(addressName);
if (info == null) {
throw ActiveMQMessageBundle.BUNDLE.addressDoesNotExist(addressName);
}
if (routingTypes == null || isEquals(routingTypes, info.getRoutingTypes())) {
// there are no changes.. we just give up now
return info;
}
validateRoutingTypes(addressName, routingTypes);
final EnumSet<RoutingType> updatedRoutingTypes = EnumSet.copyOf(routingTypes);
info.setRoutingTypes(updatedRoutingTypes);
if (storageManager != null) {
//it change the address info without any lock!
final long txID = storageManager.generateID();
try {
storageManager.deleteAddressBinding(txID, info.getId());
storageManager.addAddressBinding(txID, info);
storageManager.commitBindings(txID);
} catch (Exception e) {
try {
storageManager.rollbackBindings(txID);
} catch (Throwable ignored) {
}
throw e;
}
}
return info;
}
private boolean isEquals(Collection<RoutingType> set1, EnumSet<RoutingType> set2) {
Set<RoutingType> eset1 = set1 == null || set1.isEmpty() ? Collections.emptySet() : EnumSet.copyOf(set1);
Set<RoutingType> eset2 = set2 == null || set2.isEmpty() ? Collections.emptySet() : EnumSet.copyOf(set2);
if (eset1.size() == 0 && eset2.size() == 0) {
return true;
}
if (eset1.size() != eset2.size()) {
return false;
}
return eset2.containsAll(eset1);
}
private void validateRoutingTypes(SimpleString addressName, EnumSet<RoutingType> routingTypes) {
final Bindings bindings = this.mappings.get(addressName);
if (bindings != null) {
for (Binding binding : bindings.getBindings()) {
if (binding instanceof QueueBinding) {
final QueueBinding queueBinding = (QueueBinding) binding;
final RoutingType routingType = queueBinding.getQueue().getRoutingType();
if (!routingTypes.contains(routingType) && binding.getAddress().equals(addressName)) {
throw ActiveMQMessageBundle.BUNDLE.invalidRoutingTypeDelete(routingType, addressName.toString());
}
}
}
}
}
@Override
public AddressInfo removeAddressInfo(SimpleString address) throws Exception {
return addressInfoMap.remove(address);
}
@Override
public AddressInfo getAddressInfo(SimpleString addressName) {
return addressInfoMap.get(addressName);
}
@Override
public void updateMessageLoadBalancingTypeForAddress(SimpleString address, MessageLoadBalancingType messageLoadBalancingType) throws Exception {
getBindingsForRoutingAddress(address).setMessageLoadBalancingType(messageLoadBalancingType);
}
}
| apache-2.0 |
badpopcorn/linkdroid | src/org/linkdroid/WebhooksProvider.java | 8379 | /*
* Copyright 2009 BadPopcorn, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.linkdroid;
import java.util.HashMap;
import org.linkdroid.Constants.WebhookColumns;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.content.res.Resources;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
public class WebhooksProvider extends ContentProvider implements WebhookColumns {
private static final String TAG = "WebhookProvider";
private static final String DATABASE_NAME = "webhooks.db";
private static final int DATABASE_VERSION = 1;
private static final String TABLE_NAME = "webhooks";
private static final HashMap<String, String> COLUMNS_PROJECTION;
private static final int WEBHOOKS = 1;
private static final int WEBHOOK_ID = 2;
/**
* This class helps open, create, and upgrade the database file.
*/
private static class DatabaseHelper extends SQLiteOpenHelper {
DatabaseHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
Log.w(TAG, "Creating database from version " + DATABASE_VERSION);
db.execSQL("CREATE TABLE " + TABLE_NAME + " (" + _ID
+ " INTEGER PRIMARY KEY," + NAME + " TEXT," + URI + " TEXT," + SECRET
+ " TEXT," + NONCE_RANDOM + " INTEGER," + NONCE_TIMESTAMP
+ " INTEGER," + CREATED_AT + " INTEGER," + UPDATED_AT + " INTEGER);");
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.w(TAG, "Upgrading database from version " + oldVersion + " to "
+ newVersion + ", which will destroy all old data");
db.execSQL("DROP TABLE IF EXISTS " + TABLE_NAME);
onCreate(db);
}
}
static {
COLUMNS_PROJECTION = new HashMap<String, String>();
COLUMNS_PROJECTION.put(_ID, _ID);
COLUMNS_PROJECTION.put(NAME, NAME);
COLUMNS_PROJECTION.put(URI, URI);
COLUMNS_PROJECTION.put(SECRET, SECRET);
COLUMNS_PROJECTION.put(NONCE_RANDOM, NONCE_RANDOM);
COLUMNS_PROJECTION.put(NONCE_TIMESTAMP, NONCE_TIMESTAMP);
COLUMNS_PROJECTION.put(CREATED_AT, CREATED_AT);
COLUMNS_PROJECTION.put(UPDATED_AT, UPDATED_AT);
}
private UriMatcher uriMatcher;
private DatabaseHelper dbHelper;
@Override
public boolean onCreate() {
dbHelper = new DatabaseHelper(getContext());
uriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
uriMatcher.addURI(AUTHORITY, "webhooks", WEBHOOKS);
uriMatcher.addURI(AUTHORITY, "webhooks/#", WEBHOOK_ID);
return true;
}
@Override
public int delete(Uri uri, String where, String[] whereArgs) {
SQLiteDatabase db = dbHelper.getWritableDatabase();
int count;
switch (uriMatcher.match(uri)) {
case WEBHOOKS:
count = db.delete(TABLE_NAME, where, whereArgs);
break;
case WEBHOOK_ID:
String id = uri.getPathSegments().get(1);
count = db.delete(TABLE_NAME, _ID + "=" + id
+ (!TextUtils.isEmpty(where) ? " AND (" + where + ')' : ""),
whereArgs);
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return count;
}
@Override
public String getType(Uri uri) {
switch (uriMatcher.match(uri)) {
case WEBHOOKS:
return CONTENT_TYPE;
case WEBHOOK_ID:
return CONTENT_ITEM_TYPE;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
}
@Override
public Uri insert(Uri uri, ContentValues initialValues) {
// Validate the requested uri
if (uriMatcher.match(uri) != WEBHOOKS) {
throw new IllegalArgumentException("Unknown URI " + uri);
}
ContentValues values;
if (initialValues != null) {
values = new ContentValues(initialValues);
} else {
values = new ContentValues();
}
Long now = Long.valueOf(System.currentTimeMillis());
// Make sure that the fields are all set
if (values.containsKey(CREATED_AT) == false) {
values.put(CREATED_AT, now);
}
if (values.containsKey(UPDATED_AT) == false) {
values.put(UPDATED_AT, now);
}
if (values.containsKey(NAME) == false) {
Resources r = Resources.getSystem();
values.put(NAME, r.getString(android.R.string.untitled));
}
if (values.containsKey(URI) == false) {
values.put(URI, "");
}
if (values.containsKey(SECRET) == false) {
values.put(SECRET, "");
}
if (values.containsKey(NONCE_RANDOM) == false) {
values.put(NONCE_RANDOM, 0);
}
if (values.containsKey(NONCE_TIMESTAMP) == false) {
values.put(NONCE_TIMESTAMP, 0);
}
SQLiteDatabase db = dbHelper.getWritableDatabase();
long rowId = db.insert(TABLE_NAME, URI, values);
if (rowId > 0) {
Uri retUri = ContentUris.withAppendedId(CONTENT_URI, rowId);
getContext().getContentResolver().notifyChange(retUri, null);
return retUri;
}
throw new SQLException("Failed to insert row into " + uri);
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
SQLiteQueryBuilder qb = new SQLiteQueryBuilder();
switch (uriMatcher.match(uri)) {
case WEBHOOKS:
qb.setTables(TABLE_NAME);
qb.setProjectionMap(COLUMNS_PROJECTION);
break;
case WEBHOOK_ID:
qb.setTables(TABLE_NAME);
qb.setProjectionMap(COLUMNS_PROJECTION);
qb.appendWhere(_ID + "=" + uri.getPathSegments().get(1));
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
// If no sort order is specified use the default
String orderBy;
if (TextUtils.isEmpty(sortOrder)) {
orderBy = DEFAULT_SORT_ORDER;
} else {
orderBy = sortOrder;
}
// Get the database and run the query
SQLiteDatabase db = dbHelper.getReadableDatabase();
Cursor c = qb.query(db, projection, selection, selectionArgs, null, null,
orderBy);
// Tell the cursor what uri to watch, so it knows when its source data
// changes
c.setNotificationUri(getContext().getContentResolver(), uri);
return c;
}
@Override
public int update(Uri uri, ContentValues values, String where,
String[] whereArgs) {
SQLiteDatabase db = dbHelper.getWritableDatabase();
// Set updated
Long now = Long.valueOf(System.currentTimeMillis());
values.put(UPDATED_AT, now);
int count;
switch (uriMatcher.match(uri)) {
case WEBHOOKS:
count = db.update(TABLE_NAME, values, where, whereArgs);
break;
case WEBHOOK_ID:
String id = uri.getPathSegments().get(1);
count = db.update(TABLE_NAME, values, _ID + "=" + id
+ (!TextUtils.isEmpty(where) ? " AND (" + where + ')' : ""),
whereArgs);
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return count;
}
public static ContentValues populateContentValues(Bundle extras) {
ContentValues values = new ContentValues();
values.put(NAME, extras.getString(NAME));
values.put(URI, extras.getString(URI));
values.put(SECRET, extras.getString(SECRET));
values.put(NONCE_RANDOM, extras.getInt(NONCE_RANDOM));
values.put(NONCE_TIMESTAMP, extras.getInt(NONCE_TIMESTAMP));
return values;
}
}
| apache-2.0 |
brightgenerous/brigen-base | src/main/java/com/brightgenerous/jee/filter/SimpleFilter.java | 710 | package com.brightgenerous.jee.filter;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
public class SimpleFilter implements Filter {
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
chain.doFilter(request, response);
}
@Override
public void destroy() {
}
}
| apache-2.0 |
ueryu/Android_BlownLibrary | src/com/ueryu/android/blownlibrary/cache/package-info.java | 148 | /**
* Copyright (c) 2013 ueryu All Rights Reserved.
*/
/**
* キャッシュ.
* @author ueryu
*/
package com.ueryu.android.blownlibrary.cache; | apache-2.0 |
ilivoo/ilivoo | jdk/src/main/java/com/ilivoo/jdk/java/nio/channels/SelectionKeyTest.java | 491 | package com.ilivoo.jdk.java.nio.channels;
/**
*
*/
public class SelectionKeyTest {
public static void main(String[] args) {
SelectionKeyTest test = new SelectionKeyTest();
test.isValid();
test.cancel();
}
/**
* 1.
*/
public void cancel(){
}
/**
* 1. 判断当前键是否有效, 当键被取消此时它将永远无效。
* 2. 在调用此键的cancel方法、关闭selector, 关闭channel之前, 此键是有效的。
*/
public void isValid() {
}
}
| apache-2.0 |
bingoogolapple/J2EENote | jpa/hymallmodel/src/test/TestUUID.java | 180 | package test;
import java.util.UUID;
public class TestUUID {
public static void main(String[] args) {
System.out.println(UUID.randomUUID().toString().length());
}
}
| apache-2.0 |
talsma-ict/umldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/tim/stdlib/Lighten.java | 2575 | /* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.tim.stdlib;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.sourceforge.plantuml.LineLocation;
import net.sourceforge.plantuml.tim.EaterException;
import net.sourceforge.plantuml.tim.EaterExceptionLocated;
import net.sourceforge.plantuml.tim.TContext;
import net.sourceforge.plantuml.tim.TFunctionSignature;
import net.sourceforge.plantuml.tim.TMemory;
import net.sourceforge.plantuml.tim.expression.TValue;
import net.sourceforge.plantuml.ugraphic.color.HColor;
import net.sourceforge.plantuml.ugraphic.color.HColorSet;
import net.sourceforge.plantuml.ugraphic.color.NoSuchColorException;
public class Lighten extends SimpleReturnFunction {
// Inspired from https://github.com/Qix-/color
public TFunctionSignature getSignature() {
return new TFunctionSignature("%lighten", 2);
}
public boolean canCover(int nbArg, Set<String> namedArgument) {
return nbArg == 2;
}
public TValue executeReturnFunction(TContext context, TMemory memory, LineLocation location, List<TValue> values,
Map<String, TValue> named) throws EaterException, EaterExceptionLocated {
final String colorString = values.get(0).toString();
final int ratio = values.get(1).toInt();
try {
HColor color = HColorSet.instance().getColorLEGACY(colorString);
color = color.lighten(ratio);
return TValue.fromString(color.asString());
} catch (NoSuchColorException e) {
throw EaterException.located("No such color");
}
}
}
| apache-2.0 |
peterszatmary/jmh-benchmark-demo | src/main/java/com/szatmary/peter/App.java | 789 | package com.szatmary.peter;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
public class App {
public static void main(String[] args) {
// is not important here now
}
public long oldWay() {
List<Integer> d = data();
Long res = 0L;
for (int i = 0; i < d.size(); i++) {
res = res + d.get(i);
}
return res;
}
public long newWay() {
return data().stream().reduce(0, (x, y) -> x + y);
}
private List<Integer> data() {
List<Integer> result = new ArrayList<>();
Random r = new Random();
for (int i = 0; i < 100000; i++) {
result.add(r.nextInt(100000));
}
return result;
}
}
| apache-2.0 |
scify/JedAIToolkit | src/main/java/org/scify/jedai/progressivejoin/SetSimJoin.java | 3453 | /*
* Copyright [2016-2020] [George Papadakis (gpapadis@yahoo.gr)]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scify.jedai.progressivejoin;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import org.scify.jedai.datamodel.Comparison;
/**
*
* @author mthanos
*/
public final class SetSimJoin {
private LinkedHashMap<String, ArrayList<Integer>> records;
private ArrayList<Comparison> results;
private LinkedHashMap<Integer, ArrayList<Integer>> records_internal;
private HashMap<Integer, String> mapper;
private boolean isCleanCleanER;
private int noOfEntities;
private int datasetDelimiter;
private boolean[] isindataset1;
public SetSimJoin() {
results = new ArrayList<>();
records_internal = new LinkedHashMap<>();
mapper = new HashMap<>();
}
public SetSimJoin(LinkedHashMap<String, ArrayList<Integer>> records) {
setRecords(records);
}
public void topkGlobal(Integer k) {
if (records == null) {
throw new IllegalArgumentException("Records missing");
}
if (k == null) {
throw new IllegalArgumentException("k missing");
}
if (isCleanCleanER()) {
mapRecordsCleanClean();
} else {
mapRecords();
}
TopkGlobal topk = new TopkGlobal(records_internal, new JaccardTopK(k), results);
topk.setIscleancleanEr(isCleanCleanER);
topk.setIsindataset1(isindataset1);
topk.run();
}
public void setRecords(LinkedHashMap<String, ArrayList<Integer>> records) {
this.records = records;
}
public ArrayList<Comparison> getResults() {
return results;
}
private void mapRecords() {
final int[] internalid = {0};
records.forEach((key, value) -> {
records_internal.put(internalid[0], value);
mapper.put(internalid[0]++, key);
});
}
private void mapRecordsCleanClean() {
isindataset1 = new boolean[noOfEntities];
final int[] internalid = {0};
records.forEach((key, value) -> {
isindataset1[internalid[0]] = Integer.parseInt(key) < datasetDelimiter;
records_internal.put(internalid[0], value);
mapper.put(internalid[0]++, key);
});
}
public boolean isCleanCleanER() {
return isCleanCleanER;
}
public void setCleanCleanER(boolean cleanCleanER) {
this.isCleanCleanER = cleanCleanER;
}
public void setDatasetDelimiter(int datasetDelimiter) {
this.datasetDelimiter = datasetDelimiter;
}
public int getNoOfEntities() {
return noOfEntities;
}
public void setNoOfEntities(int noOfEntities) {
this.noOfEntities = noOfEntities;
}
}
| apache-2.0 |
kubatatami/JudoNetworking | observers/src/main/java/com/github/kubatatami/judonetworking/activities/ObserverActivity.java | 913 | package com.github.kubatatami.judonetworking.activities;
import com.github.kubatatami.judonetworking.observers.ObservableController;
import com.github.kubatatami.judonetworking.observers.ObservableWrapper;
import com.github.kubatatami.judonetworking.observers.ObserverHelper;
import com.github.kubatatami.judonetworking.observers.WrapperObserver;
/**
* Created with IntelliJ IDEA.
* User: jbogacki
* Date: 08.04.2013
* Time: 22:27
*/
public class ObserverActivity extends JudoActivity implements ObservableController {
private ObserverHelper observerHelper = new ObserverHelper();
@Override
protected void onDestroy() {
super.onDestroy();
observerHelper.onDestroy();
}
@Override
public void addObserverToDelete(ObservableWrapper<?> observableWrapper, WrapperObserver<?> observer) {
observerHelper.addObserverToDelete(observableWrapper, observer);
}
}
| apache-2.0 |
reichfrancois/android-watchface | Wearable/src/main/java/com/cheetahtechnologies/android/wearable/watchface/CalendarWatchFaceService.java | 8699 | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cheetahtechnologies.android.wearable.watchface;
import android.content.BroadcastReceiver;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Rect;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.Message;
import android.os.PowerManager;
import android.support.wearable.provider.WearableCalendarContract;
import android.support.wearable.watchface.CanvasWatchFaceService;
import android.support.wearable.watchface.WatchFaceStyle;
import android.text.DynamicLayout;
import android.text.Editable;
import android.text.Html;
import android.text.Layout;
import android.text.SpannableStringBuilder;
import android.text.TextPaint;
import android.text.format.DateUtils;
import android.util.Log;
import android.view.SurfaceHolder;
/**
* Proof of concept sample watch face that demonstrates how a watch face can load calendar data.
*/
public class CalendarWatchFaceService extends CanvasWatchFaceService {
private static final String TAG = "CalendarWatchFace";
@Override
public Engine onCreateEngine() {
return new Engine();
}
private class Engine extends CanvasWatchFaceService.Engine {
static final int BACKGROUND_COLOR = Color.BLACK;
static final int FOREGROUND_COLOR = Color.WHITE;
static final int TEXT_SIZE = 25;
static final int MSG_LOAD_MEETINGS = 0;
/** Editable string containing the text to draw with the number of meetings in bold. */
final Editable mEditable = new SpannableStringBuilder();
/** Width specified when {@link #mLayout} was created. */
int mLayoutWidth;
/** Layout to wrap {@link #mEditable} onto multiple lines. */
DynamicLayout mLayout;
/** Paint used to draw text. */
final TextPaint mTextPaint = new TextPaint();
int mNumMeetings;
private AsyncTask<Void, Void, Integer> mLoadMeetingsTask;
/** Handler to load the meetings once a minute in interactive mode. */
final Handler mLoadMeetingsHandler = new Handler() {
@Override
public void handleMessage(Message message) {
switch (message.what) {
case MSG_LOAD_MEETINGS:
cancelLoadMeetingTask();
mLoadMeetingsTask = new LoadMeetingsTask();
mLoadMeetingsTask.execute();
break;
}
}
};
private boolean mIsReceiverRegistered;
private BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (Intent.ACTION_PROVIDER_CHANGED.equals(intent.getAction())
&& WearableCalendarContract.CONTENT_URI.equals(intent.getData())) {
cancelLoadMeetingTask();
mLoadMeetingsHandler.sendEmptyMessage(MSG_LOAD_MEETINGS);
}
}
};
@Override
public void onCreate(SurfaceHolder holder) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "onCreate");
}
super.onCreate(holder);
setWatchFaceStyle(new WatchFaceStyle.Builder(CalendarWatchFaceService.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_VARIABLE)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setShowSystemUiTime(false)
.build());
mTextPaint.setColor(FOREGROUND_COLOR);
mTextPaint.setTextSize(TEXT_SIZE);
mLoadMeetingsHandler.sendEmptyMessage(MSG_LOAD_MEETINGS);
}
@Override
public void onDestroy() {
mLoadMeetingsHandler.removeMessages(MSG_LOAD_MEETINGS);
cancelLoadMeetingTask();
super.onDestroy();
}
@Override
public void onDraw(Canvas canvas, Rect bounds) {
// Create or update mLayout if necessary.
if (mLayout == null || mLayoutWidth != bounds.width()) {
mLayoutWidth = bounds.width();
mLayout = new DynamicLayout(mEditable, mTextPaint, mLayoutWidth,
Layout.Alignment.ALIGN_NORMAL, 1 /* spacingMult */, 0 /* spacingAdd */,
false /* includePad */);
}
// Update the contents of mEditable.
mEditable.clear();
mEditable.append(Html.fromHtml(getResources().getQuantityString(
R.plurals.calendar_meetings, mNumMeetings, mNumMeetings)));
// Draw the text on a solid background.
canvas.drawColor(BACKGROUND_COLOR);
mLayout.draw(canvas);
}
@Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
IntentFilter filter = new IntentFilter(Intent.ACTION_PROVIDER_CHANGED);
filter.addDataScheme("content");
filter.addDataAuthority(WearableCalendarContract.AUTHORITY, null);
registerReceiver(mBroadcastReceiver, filter);
mIsReceiverRegistered = true;
mLoadMeetingsHandler.sendEmptyMessage(MSG_LOAD_MEETINGS);
} else {
if (mIsReceiverRegistered) {
unregisterReceiver(mBroadcastReceiver);
mIsReceiverRegistered = false;
}
mLoadMeetingsHandler.removeMessages(MSG_LOAD_MEETINGS);
}
}
private void onMeetingsLoaded(Integer result) {
if (result != null) {
mNumMeetings = result;
invalidate();
}
}
private void cancelLoadMeetingTask() {
if (mLoadMeetingsTask != null) {
mLoadMeetingsTask.cancel(true);
}
}
/**
* Asynchronous task to load the meetings from the content provider and report the number of
* meetings back via {@link #onMeetingsLoaded}.
*/
private class LoadMeetingsTask extends AsyncTask<Void, Void, Integer> {
private PowerManager.WakeLock mWakeLock;
@Override
protected Integer doInBackground(Void... voids) {
PowerManager powerManager = (PowerManager) getSystemService(POWER_SERVICE);
mWakeLock = powerManager.newWakeLock(
PowerManager.PARTIAL_WAKE_LOCK, "CalendarWatchFaceWakeLock");
mWakeLock.acquire();
long begin = System.currentTimeMillis();
Uri.Builder builder =
WearableCalendarContract.Instances.CONTENT_URI.buildUpon();
ContentUris.appendId(builder, begin);
ContentUris.appendId(builder, begin + DateUtils.DAY_IN_MILLIS);
final Cursor cursor = getContentResolver().query(builder.build(),
null, null, null, null);
int numMeetings = cursor.getCount();
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Num meetings: " + numMeetings);
}
return numMeetings;
}
@Override
protected void onPostExecute(Integer result) {
releaseWakeLock();
onMeetingsLoaded(result);
}
@Override
protected void onCancelled() {
releaseWakeLock();
}
private void releaseWakeLock() {
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
}
}
} | apache-2.0 |
nodenica/node-documentation-android-pro | app/src/main/java/com/mc/nad/pro/api/Api.java | 3855 | package com.mc.nad.pro.api;
import android.os.AsyncTask;
import com.google.gson.Gson;
import com.mc.nad.pro.Config;
import com.mc.nad.pro.models.DocsModel;
import com.mc.nad.pro.models.ModuleLocalModel;
import com.mc.nad.pro.models.ModuleModel;
import com.tumblr.remember.Remember;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import io.realm.Realm;
import io.realm.RealmResults;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
public class Api extends AsyncTask<String, Void, JSONObject> {
public static final String URL_ZIP = "https://nodenicausercontent.herokuapp.com/dist/docs.zip";
public static final String URL_JSON = "https://nodenicausercontent.herokuapp.com/";
private static AdapterListener adapterListener;
public interface AdapterListener {
void onReady(JSONObject result);
void onError(int statusCode);
}
public static void setAdapterListener(AdapterListener adapterListener) {
Api.adapterListener = adapterListener;
}
protected void onPreExecute() {
}
protected JSONObject doInBackground(String... strings) {
JSONObject jsonObject = null;
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder()
.url(URL_JSON)
.build();
Response response;
try {
response = client.newCall(request).execute();
if (!response.isSuccessful()) throw new IOException(String.valueOf(response));
jsonObject = new JSONObject(response.body().string());
} catch (IOException | JSONException e) {
e.printStackTrace();
}
return jsonObject;
}
private void truncateModuleLocal() {
// instance realm
Realm realm = Realm.getDefaultInstance();
// delete old data if exists
final RealmResults<ModuleLocalModel> moduleLocalModels = realm.where(ModuleLocalModel.class).findAll();
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(Realm realm) {
// Delete all matches
moduleLocalModels.deleteAllFromRealm();
}
});
}
protected void onPostExecute(final JSONObject result) {
if (result != null && result.has("version")) {
// convert json to class
final DocsModel docsModel = new Gson().fromJson(result.toString(), DocsModel.class);
// truncate module local
truncateModuleLocal();
for (ModuleModel moduleModel : docsModel.getModules()) {
/// instance realm
Realm realm = Realm.getDefaultInstance();
realm.beginTransaction();
// instance moduleLocalModel
ModuleLocalModel moduleLocalModel = realm.createObject(ModuleLocalModel.class);
// set title
moduleLocalModel.setTitle(moduleModel.getTitle());
// set name
moduleLocalModel.setName(moduleModel.getName());
realm.commitTransaction();
}
DownloadDocs.setAdapterListener(new DownloadDocs.AdapterListener() {
@Override
public void onReady(boolean status) {
// call onReady
if (adapterListener != null) {
Remember.putString(Config.NODE_JS_VERSION, docsModel.getVersion());
adapterListener.onReady(result);
}
}
});
new DownloadDocs().execute();
} else {
// call onError
if (adapterListener != null) {
adapterListener.onError(500);
}
}
}
}
| apache-2.0 |
gstevey/gradle | subprojects/testing-base/src/main/java/org/gradle/testing/base/plugins/TestingModelBasePlugin.java | 5371 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.testing.base.plugins;
import org.gradle.api.Action;
import org.gradle.api.DefaultTask;
import org.gradle.api.Incubating;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.internal.TaskInternal;
import org.gradle.api.internal.project.taskfactory.ITaskFactory;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.language.base.plugins.ComponentModelBasePlugin;
import org.gradle.language.base.plugins.LifecycleBasePlugin;
import org.gradle.model.Each;
import org.gradle.model.Finalize;
import org.gradle.model.Model;
import org.gradle.model.ModelMap;
import org.gradle.model.Mutate;
import org.gradle.model.Path;
import org.gradle.model.RuleSource;
import org.gradle.platform.base.BinaryContainer;
import org.gradle.platform.base.BinarySpec;
import org.gradle.platform.base.ComponentType;
import org.gradle.platform.base.TypeBuilder;
import org.gradle.platform.base.internal.BinarySpecInternal;
import org.gradle.testing.base.TestSuiteBinarySpec;
import org.gradle.testing.base.TestSuiteContainer;
import org.gradle.testing.base.TestSuiteSpec;
import org.gradle.testing.base.TestSuiteTaskCollection;
import org.gradle.testing.base.internal.BaseTestSuiteSpec;
/**
* Base plugin for testing.
*
* - Adds a {@link org.gradle.testing.base.TestSuiteContainer} named {@code testSuites} to the model.
* - Copies test binaries from {@code testSuites} into {@code binaries}.
*/
@Incubating
public class TestingModelBasePlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getPluginManager().apply(ComponentModelBasePlugin.class);
}
static class Rules extends RuleSource {
@ComponentType
void registerTestSuiteSpec(TypeBuilder<TestSuiteSpec> builder) {
builder.defaultImplementation(BaseTestSuiteSpec.class);
}
@Model
void testSuites(TestSuiteContainer testSuites) {
}
@Mutate
void copyTestBinariesToGlobalContainer(BinaryContainer binaries, TestSuiteContainer testSuites) {
for (TestSuiteSpec testSuite : testSuites.values()) {
for (BinarySpecInternal binary : testSuite.getBinaries().withType(BinarySpecInternal.class).values()) {
binaries.put(binary.getProjectScopedName(), binary);
}
}
}
@Finalize
public void defineBinariesCheckTasks(@Each BinarySpecInternal binary, ITaskFactory taskFactory) {
if (binary.isLegacyBinary()) {
return;
}
TaskInternal binaryLifecycleTask = taskFactory.create(binary.getNamingScheme().getTaskName("check"), DefaultTask.class);
binaryLifecycleTask.setGroup(LifecycleBasePlugin.VERIFICATION_GROUP);
binaryLifecycleTask.setDescription("Check " + binary);
binary.setCheckTask(binaryLifecycleTask);
}
@Finalize
void copyBinariesCheckTasksToTaskContainer(TaskContainer tasks, BinaryContainer binaries) {
for (BinarySpec binary : binaries) {
Task checkTask = binary.getCheckTask();
if (checkTask != null) {
tasks.add(checkTask);
}
}
}
@Finalize
void linkTestSuiteBinariesRunTaskToBinariesCheckTasks(@Path("binaries") ModelMap<TestSuiteBinarySpec> binaries) {
binaries.afterEach(new Action<TestSuiteBinarySpec>() {
@Override
public void execute(TestSuiteBinarySpec testSuiteBinary) {
if (testSuiteBinary.isBuildable()) {
if (testSuiteBinary.getTasks() instanceof TestSuiteTaskCollection) {
testSuiteBinary.checkedBy(((TestSuiteTaskCollection) testSuiteBinary.getTasks()).getRun());
}
BinarySpec testedBinary = testSuiteBinary.getTestedBinary();
if (testedBinary != null && testedBinary.isBuildable()) {
testedBinary.checkedBy(testSuiteBinary.getCheckTask());
}
}
}
});
}
@Finalize
void attachBinariesCheckTasksToCheckLifecycle(@Path("tasks.check") Task checkTask, @Path("binaries") ModelMap<BinarySpec> binaries) {
for (BinarySpec binary : binaries) {
if (binary.isBuildable()) {
Task binaryCheckTask = binary.getCheckTask();
if (binaryCheckTask != null) {
checkTask.dependsOn(binaryCheckTask);
}
}
}
}
}
}
| apache-2.0 |
awchoudhary/Codelot | src/main/java/com/codelot/services/CodelotUserService.java | 2155 | package com.codelot.services;
import com.codelot.Beans.CodelotUser;
import com.google.appengine.api.users.User;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.googlecode.objectify.ObjectifyService;
import java.util.List;
/**
* Created by awaeschoudhary on 4/26/17.
*/
public final class CodelotUserService {
//create a new codelot user and return id for new user
public static long createUser(String fullname, int age, String username, String avatar){
//get the logged in user
UserService userService = UserServiceFactory.getUserService();
User user = userService.getCurrentUser();
//create new codelot user
CodelotUser newUser = new CodelotUser(fullname, age, username, avatar);
newUser.setUser(user.getEmail(), user.getUserId());
//save user
ObjectifyService.ofy().save().entity(newUser).now();
//return the id for the created user
return newUser.getId();
}
//get codelot user for the logged in account
public static CodelotUser getCurrentUserProfile(){
//get logged in user account
User user = UserServiceFactory.getUserService().getCurrentUser();
//return corresponding codelot user account
List<CodelotUser> profiles = ObjectifyService.ofy()
.load()
.type(CodelotUser.class)
.filter("user_id", user.getUserId())
.list();
if(profiles.size() > 0){
return profiles.get(0);
}
else{
return null;
}
}
public static void updateUser(String fullname, int age, String username, String avatar){
//get user profile for logged in user
CodelotUser profile = getCurrentUserProfile();
//update and save profile
profile.setAge(age);
profile.setAvatarImage(avatar);
profile.setFullname(fullname);
profile.setUsername(username);
ObjectifyService.ofy().save().entity(profile).now();
}
}
| apache-2.0 |
leoz/zthumb | gen/com/leoz/bz/zthumb/R.java | 495 | /* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package com.leoz.bz.zthumb;
public final class R {
public static final class attr {
}
public static final class drawable {
public static final int mishura=0x7f020000;
}
public static final class string {
public static final int app_name=0x7f030000;
}
}
| apache-2.0 |
DNAProject/DNASDKJava | src/main/java/com/github/DNAProject/network/rest/http.java | 8612 | /*
* Copyright (C) 2018 The DNA Authors
* This file is part of The DNA library.
*
* The DNA is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The DNA is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with The DNA. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.github.DNAProject.network.rest;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Map;
import java.util.Map.Entry;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import com.alibaba.fastjson.JSON;
public class http {
private static final String DEFAULT_CHARSET = "UTF-8";
public static String post(String url,Map<String,String> header, String body, boolean https) throws IOException, NoSuchAlgorithmException, NoSuchProviderException, KeyManagementException {
URL u = new URL(url);
HttpURLConnection http = (HttpURLConnection) u.openConnection();
http.setConnectTimeout(10000);
http.setReadTimeout(10000);
http.setRequestMethod("POST");
http.setRequestProperty("Content-Type","application/json");
if(header != null) {
for (Entry<String, String> e : header.entrySet()) {
http.setRequestProperty(e.getKey(), (String) e.getValue());
}
}
if(https) {
SSLContext sslContext = SSLContext.getInstance("SSL", "SunJSSE");
sslContext.init(null, new TrustManager[]{new X509()}, new SecureRandom());
SSLSocketFactory ssf = sslContext.getSocketFactory();
((HttpsURLConnection)http).setSSLSocketFactory(ssf);
}
http.setDoOutput(true);
http.setDoInput(true);
http.connect();
try (OutputStream out = http.getOutputStream()) {
out.write(body.getBytes(DEFAULT_CHARSET));
out.flush();
}
StringBuilder sb = new StringBuilder();
try (InputStream is = http.getInputStream()) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, DEFAULT_CHARSET))) {
String str = null;
while((str = reader.readLine()) != null) {
sb.append(str);
str = null;
}
}
}
if (http != null) {
http.disconnect();
}
return sb.toString();
}
public static String post(String url, String body, boolean https) throws IOException, NoSuchAlgorithmException, NoSuchProviderException, KeyManagementException {
return post(url,null,body,https);
}
public static String delete(String url, String body, boolean https) throws IOException, NoSuchAlgorithmException, NoSuchProviderException, KeyManagementException {
URL u = new URL(url);
HttpURLConnection http = (HttpURLConnection) u.openConnection();
http.setConnectTimeout(10000);
http.setReadTimeout(10000);
http.setRequestMethod("DELETE");
http.setRequestProperty("Content-Type","application/json");
if(https) {
SSLContext sslContext = SSLContext.getInstance("SSL", "SunJSSE");
sslContext.init(null, new TrustManager[]{new X509()}, new SecureRandom());
SSLSocketFactory ssf = sslContext.getSocketFactory();
((HttpsURLConnection)http).setSSLSocketFactory(ssf);
}
http.setDoOutput(true);
http.setDoInput(true);
http.connect();
try (OutputStream out = http.getOutputStream()) {
out.write(body.getBytes(DEFAULT_CHARSET));
out.flush();
}
StringBuilder sb = new StringBuilder();
try (InputStream is = http.getInputStream()) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, DEFAULT_CHARSET))) {
String str = null;
while((str = reader.readLine()) != null) {
sb.append(str);
str = null;
}
}
}
if (http != null) {
http.disconnect();
}
return sb.toString();
}
public static String delete(String url, Map<String, String> params, Map<String, Object> body) throws Exception {
if(url.startsWith("https")){
return delete(url+cvtParams(params), JSON.toJSONString(body),true);
}else{
return delete(url+cvtParams(params), JSON.toJSONString(body), false);
}
}
public static String post(String url, Map<String, String> params, Map<String, Object> body) throws Exception {
System.out.println(String.format("POST url=%s,%s,%s", url,JSON.toJSONString(params),JSON.toJSONString(body)));
if(url.startsWith("https")){
return post(url+cvtParams(params), JSON.toJSONString(body), true);
}else{
return post(url+cvtParams(params), JSON.toJSONString(body), false);
}
}
private static String get(String url ,boolean https) throws Exception {
URL u = new URL(url);
HttpURLConnection http = (HttpURLConnection) u.openConnection();
http.setConnectTimeout(20000);
http.setReadTimeout(20000);
http.setRequestMethod("GET");
http.setRequestProperty("Content-Type","application/json");
if(https) {
SSLContext sslContext = SSLContext.getInstance("SSL", "SunJSSE");
sslContext.init(null, new TrustManager[]{new X509()}, new SecureRandom());
SSLSocketFactory ssf = sslContext.getSocketFactory();
((HttpsURLConnection)http).setSSLSocketFactory(ssf);
}
http.setDoOutput(true);
http.setDoInput(true);
http.connect();
StringBuilder sb = new StringBuilder();
try (InputStream is = http.getInputStream()) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, DEFAULT_CHARSET))) {
String str = null;
while((str = reader.readLine()) != null) {
sb.append(str);
str = null;
}
}
}
if (http != null) {
http.disconnect();
}
return sb.toString();
}
private static String get(String url) throws Exception {
System.out.println(String.format(" GET url=%s, params=%s", url, null));
if(url.startsWith("https")){
return get(url, true);
}else{
return get(url, false);
}
}
public static String get(String url, Map<String, String> params) throws Exception {
if(url.startsWith("https")){
return get(url+cvtParams(params), true);
}else{
return get(url+cvtParams(params), false);
}
}
private static String cvtParams( Map<String, String> params){
if (params == null || params.isEmpty()) {
return "";
}
StringBuilder sb = new StringBuilder();
for (Entry<String, String> entry : params.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
try {
value = value == null ? value:URLEncoder.encode(value, DEFAULT_CHARSET);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
sb.append("&").append(key).append("=").append(value);
}
return "?"+sb.toString().substring(1);
}
/**
*
* @param objs
* @throws IOException
*/
private static void close(Closeable... objs) throws IOException {
if(objs != null && objs.length > 0) {
Arrays.stream(objs).forEach(p -> {try {p.close(); } catch(Exception e){}});
}
}
} | apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-healthcare/v1beta1/1.30.1/com/google/api/services/healthcare/v1beta1/model/CheckDataAccessRequest.java | 5103 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.healthcare.v1beta1.model;
/**
* Checks if a particular data_id of a User data mapping in the given Consent store is consented for
* a given use.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Healthcare API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class CheckDataAccessRequest extends com.google.api.client.json.GenericJson {
/**
* The Consents to evaluate the access request against. They must have the same `user_id` as the
* data to check access for, exist in the current `consent_store`, and can have a `state` of
* either `ACTIVE` or `DRAFT`. A maximum of 100 consents can be provided here.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ConsentList consentList;
/**
* The unique identifier of the data to check access for. It must exist in the given
* `consent_store`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String dataId;
/**
* The values of request attributes associated with this access request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> requestAttributes;
/**
* The view for CheckDataAccessResponse.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String responseView;
/**
* The Consents to evaluate the access request against. They must have the same `user_id` as the
* data to check access for, exist in the current `consent_store`, and can have a `state` of
* either `ACTIVE` or `DRAFT`. A maximum of 100 consents can be provided here.
* @return value or {@code null} for none
*/
public ConsentList getConsentList() {
return consentList;
}
/**
* The Consents to evaluate the access request against. They must have the same `user_id` as the
* data to check access for, exist in the current `consent_store`, and can have a `state` of
* either `ACTIVE` or `DRAFT`. A maximum of 100 consents can be provided here.
* @param consentList consentList or {@code null} for none
*/
public CheckDataAccessRequest setConsentList(ConsentList consentList) {
this.consentList = consentList;
return this;
}
/**
* The unique identifier of the data to check access for. It must exist in the given
* `consent_store`.
* @return value or {@code null} for none
*/
public java.lang.String getDataId() {
return dataId;
}
/**
* The unique identifier of the data to check access for. It must exist in the given
* `consent_store`.
* @param dataId dataId or {@code null} for none
*/
public CheckDataAccessRequest setDataId(java.lang.String dataId) {
this.dataId = dataId;
return this;
}
/**
* The values of request attributes associated with this access request.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getRequestAttributes() {
return requestAttributes;
}
/**
* The values of request attributes associated with this access request.
* @param requestAttributes requestAttributes or {@code null} for none
*/
public CheckDataAccessRequest setRequestAttributes(java.util.Map<String, java.lang.String> requestAttributes) {
this.requestAttributes = requestAttributes;
return this;
}
/**
* The view for CheckDataAccessResponse.
* @return value or {@code null} for none
*/
public java.lang.String getResponseView() {
return responseView;
}
/**
* The view for CheckDataAccessResponse.
* @param responseView responseView or {@code null} for none
*/
public CheckDataAccessRequest setResponseView(java.lang.String responseView) {
this.responseView = responseView;
return this;
}
@Override
public CheckDataAccessRequest set(String fieldName, Object value) {
return (CheckDataAccessRequest) super.set(fieldName, value);
}
@Override
public CheckDataAccessRequest clone() {
return (CheckDataAccessRequest) super.clone();
}
}
| apache-2.0 |
googleapis/java-container | proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/ListClustersRequestOrBuilder.java | 3628 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
package com.google.container.v1beta1;
public interface ListClustersRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.container.v1beta1.ListClustersRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Required. Deprecated. The Google Developers Console [project ID or project
* number](https://support.google.com/cloud/answer/6158840).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The projectId.
*/
@java.lang.Deprecated
java.lang.String getProjectId();
/**
*
*
* <pre>
* Required. Deprecated. The Google Developers Console [project ID or project
* number](https://support.google.com/cloud/answer/6158840).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The bytes for projectId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getProjectIdBytes();
/**
*
*
* <pre>
* Required. Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available) in which the
* cluster resides, or "-" for all zones. This field has been deprecated and
* replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
@java.lang.Deprecated
java.lang.String getZone();
/**
*
*
* <pre>
* Required. Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available) in which the
* cluster resides, or "-" for all zones. This field has been deprecated and
* replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getZoneBytes();
/**
*
*
* <pre>
* The parent (project and location) where the clusters will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The parent.
*/
java.lang.String getParent();
/**
*
*
* <pre>
* The parent (project and location) where the clusters will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The bytes for parent.
*/
com.google.protobuf.ByteString getParentBytes();
}
| apache-2.0 |
heiko-braun/wildfly-swarm | arquillian/daemon/api/src/main/java/org/wildfly/swarm/arquillian/daemon/container/DaemonDeployableContainerBase.java | 8370 | /**
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.arquillian.daemon.container;
import org.jboss.arquillian.container.spi.client.container.DeployableContainer;
import org.jboss.arquillian.container.spi.client.container.DeploymentException;
import org.jboss.arquillian.container.spi.client.container.LifecycleException;
import org.jboss.arquillian.container.spi.client.protocol.ProtocolDescription;
import org.jboss.shrinkwrap.descriptor.api.Descriptor;
import org.wildfly.swarm.arquillian.daemon.protocol.WireProtocol;
import org.wildfly.swarm.arquillian.daemon.protocol.DaemonProtocol;
import org.wildfly.swarm.arquillian.daemon.protocol.DeploymentContext;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Base support for containers of the Arquillian Server Daemon
*
* @author <a href="mailto:alr@jboss.org">Andrew Lee Rubinger</a>
*/
public abstract class DaemonDeployableContainerBase<CONFIGTYPE extends DaemonContainerConfigurationBase> implements
DeployableContainer<CONFIGTYPE> {
private static final Logger log = Logger.getLogger(DaemonDeployableContainerBase.class.getName());
private static final String ERROR_MESSAGE_DESCRIPTORS_UNSUPPORTED = "Descriptor deployment not supported";
private InetSocketAddress remoteAddress;
private Socket socket;
private OutputStream socketOutstream;
private InputStream socketInstream;
private BufferedReader reader;
private PrintWriter writer;
private int timeout = 10;
@Override
public void setup(final CONFIGTYPE configuration) {
final String remoteHost = configuration.getHost();
final String remotePort = configuration.getPort();
final InetSocketAddress address = new InetSocketAddress(remoteHost, Integer.parseInt(remotePort));
this.remoteAddress = address;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
@Override
public void start() throws LifecycleException {
// Open up remote resources
try {
final long startTime = System.currentTimeMillis();
final int secondsToWait = this.timeout;
final long acceptableTime = startTime + 1000 * secondsToWait; // 10 seconds from now
Socket socket = null;
while (true) {
try {
// TODO Security Action
socket = new Socket(remoteAddress.getHostString(), remoteAddress.getPort());
if (log.isLoggable(Level.FINEST)) {
log.finest("Got connection to " + remoteAddress.toString());
}
break;
} catch (final ConnectException ce) {
if (log.isLoggable(Level.FINEST)) {
log.finest("No connection yet available to remote process");
}
final long currentTime = System.currentTimeMillis();
// Time expired?
if (currentTime > acceptableTime) {
throw new LifecycleException("Could not connect to the server at "
+ remoteAddress.getHostString() + ":" + remoteAddress.getPort() + " in the allotted "
+ secondsToWait + "s", ce);
}
// Sleep and try again
try {
Thread.sleep(200);
} catch (final InterruptedException e) {
Thread.interrupted();
throw new RuntimeException("No one should be interrupting us while we're waiting to connect", e);
}
}
}
assert socket != null : "Socket should have been connected";
this.socket = socket;
final OutputStream socketOutstream = socket.getOutputStream();
this.socketOutstream = socketOutstream;
final PrintWriter writer = new PrintWriter(new OutputStreamWriter(socketOutstream, WireProtocol.CHARSET),
true);
this.writer = writer;
final InputStream socketInstream = socket.getInputStream();
this.socketInstream = socketInstream;
final BufferedReader reader = new BufferedReader(new InputStreamReader(socketInstream));
this.reader = reader;
} catch (final IOException ioe) {
this.closeRemoteResources();
throw new LifecycleException("Could not open connection to remote process", ioe);
}
}
/**
* {@inheritDoc}
*
* @see org.jboss.arquillian.container.spi.client.container.DeployableContainer#stop()
*/
@Override
public void stop() throws LifecycleException {
this.closeRemoteResources();
}
/**
* {@inheritDoc}
*
* @see org.jboss.arquillian.container.spi.client.container.DeployableContainer#getDefaultProtocol()
*/
@Override
public ProtocolDescription getDefaultProtocol() {
return DaemonProtocol.DESCRIPTION;
}
protected DeploymentContext createDeploymentContext(final String deploymentId) {
return DeploymentContext.create(deploymentId, socketInstream,
socketOutstream, reader, writer);
}
/**
* @throws UnsupportedOperationException
* @see org.jboss.arquillian.container.spi.client.container.DeployableContainer#deploy(org.jboss.shrinkwrap.descriptor.api.Descriptor)
*/
@Override
public void deploy(final Descriptor descriptor) throws DeploymentException {
throw new UnsupportedOperationException(ERROR_MESSAGE_DESCRIPTORS_UNSUPPORTED);
}
/**
* @throws UnsupportedOperationException
* @see org.jboss.arquillian.container.spi.client.container.DeployableContainer#undeploy(org.jboss.shrinkwrap.descriptor.api.Descriptor)
*/
@Override
public void undeploy(final Descriptor descriptor) throws DeploymentException {
throw new UnsupportedOperationException(ERROR_MESSAGE_DESCRIPTORS_UNSUPPORTED);
}
/**
* @return the remoteAddress
*/
protected final InetSocketAddress getRemoteAddress() {
return remoteAddress;
}
/**
* @return the writer
*/
protected final PrintWriter getWriter() {
return writer;
}
/**
* @return the reader
*/
protected final BufferedReader getReader() {
return reader;
}
/**
* Safely close remote resources
*/
private void closeRemoteResources() {
if (reader != null) {
try {
reader.close();
} catch (final IOException ignore) {
}
reader = null;
}
if (writer != null) {
writer.close();
writer = null;
}
if (socketOutstream != null) {
try {
socketOutstream.close();
} catch (final IOException ignore) {
}
socketOutstream = null;
}
if (socketInstream != null) {
try {
socketInstream.close();
} catch (final IOException ignore) {
}
socketInstream = null;
}
if (socket != null) {
try {
socket.close();
} catch (final IOException ignore) {
}
socket = null;
}
}
}
| apache-2.0 |
foundation-runtime/service-directory | 2.0/sd-api/src/main/java/com/cisco/oss/foundation/directory/lb/ServiceInstanceLoadBalancer.java | 967 | /**
* Copyright 2014 Cisco Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cisco.oss.foundation.directory.lb;
import com.cisco.oss.foundation.directory.entity.ServiceInstance;
/**
* Load Balancer interface for ServiceInstance.
*
*
*/
public interface ServiceInstanceLoadBalancer {
/**
* Vote a ServiceInstance.
*
* @return
* the ServiceInstance.
*/
public ServiceInstance vote();
}
| apache-2.0 |
SiteMorph/protoqueue | src/test/java/net/sitemorph/queue/ConcurrentTest.java | 15117 | package net.sitemorph.queue;
import net.sitemorph.protostore.CrudException;
import net.sitemorph.protostore.CrudIterator;
import net.sitemorph.protostore.CrudStore;
import net.sitemorph.protostore.InMemoryStore.Builder;
import net.sitemorph.protostore.SortOrder;
import net.sitemorph.queue.Message.Task;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
/**
* Concurrent task handler test which runs multiple schedulers which all claim
* and work on tasks.
*/
public class ConcurrentTest {
private static Logger log = LoggerFactory.getLogger("ConcurrentTest");
private static volatile Map<String, AtomicInteger> counters =
Maps.newConcurrentMap();
private static volatile Set<String> error = Sets.newConcurrentHashSet();
private static volatile String last;
private static final int TASK_COUNT = 1000;
private static final int TASK_SLEEP = 10;
private static final double ERROR_RATE = 0.1;
final CrudStore<Task> store = new Builder<Task>()
.setPrototype(Task.newBuilder())
.setVectorField("vector")
.setUrnField("urn")
.setSortOrder("runTime", SortOrder.ASCENDING)
.addIndexField("path")
.build();
@BeforeMethod(alwaysRun = true)
public void resetTest() throws CrudException {
log.info("RESETTING TASKS");
counters.clear();
for (int i = 0; i < TASK_COUNT; i ++) {
counters.put(UUID.randomUUID().toString(), new AtomicInteger(0));
}
error.clear();
last = null;
CrudIterator<Task> tasks = store.read(Task.newBuilder());
while (tasks.hasNext()) {
store.delete(tasks.next());
}
long now = System.currentTimeMillis();
for (Map.Entry<String, AtomicInteger> entry : counters.entrySet()) {
store.create(Task.newBuilder()
.setPath(entry.getKey())
.setRunTime(now++));
last = entry.getKey();
}
log.info("Done counters");
}
@Test(groups = "longTest")
public void testSingleWorker() throws InterruptedException, CrudException {
log.info("SINGLE WORKER START");
TaskDispatcher dispatcher = TaskDispatcher.newBuilder()
.setIdentity(UUID.randomUUID())
.setSleepInterval(100)
.setTaskTimeout(1000)
.setWorkerPoolSize(1)
.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("Unhandled exception", throwable);
throw new RuntimeException("Uncaught error", throwable);
}
})
.setTaskQueueFactory(new TaskQueueFactory() {
@Override
public TaskQueue getTaskQueue() throws QueueException {
return CrudTaskQueue.fromCrudStore(store);
}
@Override
public void returnTaskQueue(TaskQueue queue) throws QueueException {
// don't need to release resources.
}
})
.registerTaskWorker(new UnreliableWorker(ERROR_RATE))
.build();
log.info("Starting dispatcher");
Thread thread = new Thread(dispatcher);
thread.setDaemon(true);
thread.setName("Dispatcher");
thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("uncaught error", throwable);
}
});
thread.start();
long start = System.currentTimeMillis();
while (true) {
int sum = 0;
for (Map.Entry<String, AtomicInteger> entry : counters.entrySet()) {
sum += entry.getValue().intValue();
}
CrudIterator<Task> tasks = store.read(Task.newBuilder());
if (!tasks.hasNext()) {
break;
}
int claims = 0;
int remaining = 0;
Task first = null;
while (tasks.hasNext()) {
Task task = tasks.next();
if (null == first) {
first = task;
}
remaining++;
if (task.hasClaim()) {
claims++;
}
}
tasks.close();
log.info("Next task {} due in {} and timeout in {}",
(null != first? first.getUrn() : null),
(null != first? first.getRunTime() - System.currentTimeMillis() : null),
(null != first && first.hasClaimTimeout()?
first.getClaimTimeout() - System.currentTimeMillis() : null));
log.info("Sleeping while waiting for counter updates. Currently at {} " +
"with {} claims against {} tasks.",
sum, claims, remaining);
Thread.sleep(1000);
}
long end = System.currentTimeMillis();
log.info("Stopping dispatcher");
dispatcher.shutdown();
long time = end - start;
long executionTime = (long)((double)TASK_COUNT * (1.0 + ERROR_RATE)) * TASK_SLEEP;
long schedulingOverhead = time - executionTime;
log.info("Total Time {} execution time {} scheduling overhead {}",
time, executionTime, schedulingOverhead);
// check that all counters are 0
for (Map.Entry<String, AtomicInteger> entry : counters.entrySet()) {
assertNotNull(entry.getKey(), "expected key");
assertEquals(entry.getValue().intValue(), 1, "Expected 1 on " +
entry.getKey());
}
assertFalse(store.read(Task.newBuilder()).hasNext(), "Expected all tasks done");
log.info("SINGLE WORKER STOP");
}
@Test(groups = "longTest")
public void testTwoOrchestrator() throws InterruptedException, CrudException {
log.info("TEST TWO ORCHESTRATOR START");
UUID dispatcherIdentity1 = UUID.randomUUID();
UUID dispatcherIdentity2 = UUID.randomUUID();
log.info("DISPATCHER 1 : " + dispatcherIdentity1.toString());
log.info("DISPATCHER 2 : " + dispatcherIdentity2.toString());
TaskDispatcher dispatcher1 = TaskDispatcher.newBuilder()
.setIdentity(dispatcherIdentity1)
.setSleepInterval(10)
.setTaskTimeout(100)
.setWorkerPoolSize(1)
.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("Unhandled exception", throwable);
throw new RuntimeException("Uncaught error", throwable);
}
})
.setTaskQueueFactory(new TaskQueueFactory() {
@Override
public TaskQueue getTaskQueue() throws QueueException {
return CrudTaskQueue.fromCrudStore(store);
}
@Override
public void returnTaskQueue(TaskQueue queue) throws QueueException {
// don't need to release resources.
}
})
.registerTaskWorker(new UnreliableWorker(0.1))
.build();
Thread thread1 = new Thread(dispatcher1);
thread1.setDaemon(true);
thread1.setName("Dispatcher1");
thread1.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("uncaught error", throwable);
}
});
TaskDispatcher dispatcher2 = TaskDispatcher.newBuilder()
.setIdentity(dispatcherIdentity2)
.setSleepInterval(10)
.setTaskTimeout(100)
.setWorkerPoolSize(1)
.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("Unhandled exception", throwable);
throw new RuntimeException("Uncaught error", throwable);
}
})
.setTaskQueueFactory(new TaskQueueFactory() {
@Override
public TaskQueue getTaskQueue() throws QueueException {
return CrudTaskQueue.fromCrudStore(store);
}
@Override
public void returnTaskQueue(TaskQueue queue) throws QueueException {
// don't need to release resources.
}
})
.registerTaskWorker(new UnreliableWorker(0.1))
.build();
Thread thread2 = new Thread(dispatcher2);
thread2.setDaemon(true);
thread2.setName("Dispatcher2");
thread2.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("uncaught error", throwable);
}
});
log.info("Starting dispatchers.");
thread1.start();
thread2.start();
long start = System.currentTimeMillis();
log.info("Started dispatchers.");
while (true) {
CrudIterator<Task> tasks = store.read(Task.newBuilder());
boolean done = !tasks.hasNext();
tasks.close();
if (done) {
break;
}
Thread.sleep(1000);
}
long end = System.currentTimeMillis();
log.info("Stopping dispatcher");
dispatcher2.shutdown();
dispatcher1.shutdown();
long time = end - start;
long executionTime = (long)((double)TASK_COUNT / 2.0 * (1.0 + ERROR_RATE))
* TASK_SLEEP;
long schedulingOverhead = time - executionTime;
log.info("Total Time {} execution time {} scheduling overhead {}",
time, executionTime, schedulingOverhead);
// check that all counters are 0
for (Map.Entry<String, AtomicInteger> entry : counters.entrySet()) {
assertNotNull(entry.getKey(), "expected key");
assertEquals(entry.getValue().intValue(), 1, "Expected 1 on " +
entry.getKey());
}
log.info("TEST TWO ORCHESTRATOR END");
}
@Test(groups = "longTest")
public void testMultiOrchestrator() throws InterruptedException, CrudException {
log.info("TEST MULTI ORCHESTRATOR START");
Set<TaskDispatcher> dispatchers = Sets.newHashSet();
int count = 100;
for (int i = 0; i < count; i++) {
TaskDispatcher dispatcher = TaskDispatcher.newBuilder()
.setIdentity(UUID.randomUUID())
.setSleepInterval(1000)
.setTaskTimeout(1000)
.setWorkerPoolSize(1)
.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("Unhandled exception", throwable);
throw new RuntimeException("Uncaught error", throwable);
}
})
.setTaskQueueFactory(new TaskQueueFactory() {
@Override
public TaskQueue getTaskQueue() throws QueueException {
return CrudTaskQueue.fromCrudStore(store);
}
@Override
public void returnTaskQueue(TaskQueue queue) throws QueueException {
// don't need to release resources.
}
})
.registerTaskWorker(new UnreliableWorker(0.1))
.build();
dispatchers.add(dispatcher);
}
Set<Thread> threads = Sets.newHashSet();
for (TaskDispatcher dispatcher : dispatchers) {
Thread thread = new Thread(dispatcher);
thread.setDaemon(true);
thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
log.error("uncaught error", throwable);
}
});
threads.add(thread);
}
log.info("Starting dispatchers.");
for (Thread thread : threads) {
thread.start();
}
long start = System.currentTimeMillis();
log.info("Started dispatchers.");
while (true) {
int sum = 0;
for (Map.Entry<String, AtomicInteger> entry : counters.entrySet()) {
sum += entry.getValue().intValue();
}
CrudIterator<Task> tasks = store.read(Task.newBuilder());
if (!tasks.hasNext()) {
break;
}
int claims = 0;
while (tasks.hasNext()) {
if (tasks.next().hasClaim()) {
claims++;
}
}
tasks.close();
log.info("Sleeping while waiting for counter updates. Currently at {} " +
"tasks and {} claims", sum, claims);
Thread.sleep(1000);
}
long end = System.currentTimeMillis();
log.info("Stopping dispatcher");
for (TaskDispatcher dispatcher : dispatchers) {
dispatcher.shutdown();
}
long time = end - start;
long executionTime = (long)((double)TASK_COUNT / (double)count * (1.0 + ERROR_RATE))
* TASK_SLEEP;
long schedulingOverhead = time - executionTime;
log.info("Total Time {} execution time {} scheduling overhead {}",
time, executionTime, schedulingOverhead);
// check that all counters are 0
for (Map.Entry<String, AtomicInteger> entry : counters.entrySet()) {
assertNotNull(entry.getKey(), "expected key");
assertEquals(entry.getValue().intValue(), 1, "Expected 1 on " +
entry.getKey());
}
log.info("TEST MULTIPLE ORCHESTRATOR END");
}
public static class UnreliableWorker implements TaskWorker {
private double failureProbability = 0.0;
private volatile TaskStatus status;
private volatile Task task;
private volatile TaskDispatcher dispatcher;
private volatile boolean expectStop = false;
public UnreliableWorker(double failureProbability) {
this.failureProbability = failureProbability;
}
@Override
public void reset() {
status = TaskStatus.RESET;
expectStop = false;
}
@Override
public boolean isRelevant(Task task) {
return true;
}
@Override
public void setTask(Task task, TaskDispatcher dispatcher) {
this.task = task;
this.dispatcher = dispatcher;
}
@Override
public TaskStatus getStatus() {
return status;
}
@Override
public void stop() {
if (null != task && status == TaskStatus.DONE) {
log.error("Rolling back {}", task.getUrn());
counters.get(task.getPath()).decrementAndGet();
}
log.debug("Stop called");
if (!expectStop) {
log.error("Didn't expect stop. Called after done...");
}
expectStop = false;
this.status = TaskStatus.STOPPED;
}
@Override
public void run() {
if (expectStop) {
log.error("Expected to have stop called due to error");
}
status = TaskStatus.RUNNING;
try {
Thread.sleep(TASK_SLEEP);
} catch (InterruptedException e) {
log.error("Interrupted");
}
if (failureProbability > Math.random()) {
status = TaskStatus.ERROR;
expectStop = true;
error.add(task.getPath());
log.debug("Error On {}", task.getPath());
} else {
counters.get(task.getPath()).incrementAndGet();
status = TaskStatus.DONE;
}
}
}
}
| apache-2.0 |
rowillia/buck | test/com/facebook/buck/event/EventSerializationTest.java | 14395 | /*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.event;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.artifact_cache.CacheResult;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.parser.ParseEvent;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleEvent;
import com.facebook.buck.rules.BuildRuleKeys;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRuleStatus;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.IndividualTestEvent;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TestRunEvent;
import com.facebook.buck.test.FakeTestResults;
import com.facebook.buck.test.TestCaseSummary;
import com.facebook.buck.test.TestResultSummary;
import com.facebook.buck.test.TestResults;
import com.facebook.buck.test.result.type.ResultType;
import com.facebook.buck.test.selectors.TestSelectorList;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.timing.DefaultClock;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Sets;
import com.google.common.hash.HashCode;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.HashSet;
public class EventSerializationTest {
// The hardcoded strings depend on this being an vanilla mapper.
private static final ObjectMapper MAPPER = new ObjectMapper();
private long timestamp;
private long nanoTime;
private long threadId;
private BuildId buildId;
@Before
public void setUp() {
Clock clock = new DefaultClock();
timestamp = clock.currentTimeMillis();
nanoTime = clock.nanoTime();
threadId = 0;
buildId = new BuildId("Test");
EventKey.setSequenceValueForTest(4242L);
}
@Test
public void testProjectGenerationEventFinished() throws IOException {
ProjectGenerationEvent.Finished event = ProjectGenerationEvent.finished();
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"type\":\"ProjectGenerationFinished\"," +
"\"eventKey\":{\"value\":4242}}", message);
}
@Test
public void testProjectGenerationEventStarted() throws IOException {
ProjectGenerationEvent.Started event = ProjectGenerationEvent.started();
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"type\":\"ProjectGenerationStarted\"," +
"\"eventKey\":{\"value\":4242}}", message);
}
@Test
public void testParseEventStarted() throws IOException {
ParseEvent.Started event = ParseEvent.started(ImmutableList.<BuildTarget>of());
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"buildTargets\":[],\"type\":\"ParseStarted\"," +
"\"eventKey\":{\"value\":4242}}", message);
}
@Test
public void testParseEventFinished() throws IOException {
ParseEvent.Started started = ParseEvent.started(ImmutableList.of(
BuildTargetFactory.newInstance("//base:short#flv")));
ParseEvent.Finished event = ParseEvent.finished(started, Optional.<TargetGraph>absent());
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"buildTargets\":[{\"cell\":{\"present\":false},\"baseName\":\"//base\"," +
"\"shortName\":\"short\",\"flavor\":\"flv\"}],\"type\":\"ParseFinished\"," +
"\"eventKey\":{\"value\":4242}}", message);
}
@Test
public void testBuildEventStarted() throws IOException {
BuildEvent.Started event = BuildEvent.started(ImmutableSet.of("//base:short"));
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals(
"{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"eventKey\":{\"value\":4242}," +
"\"buildArgs\":[\"//base:short\"], \"distributedBuild\":false," +
"\"type\":\"BuildStarted\"}",
message);
}
@Test
public void testBuildEventFinished() throws IOException {
BuildEvent.Finished event = BuildEvent.finished(
BuildEvent.started(ImmutableSet.of("//base:short")),
0);
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals(
"{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"eventKey\":{\"value\":4242}," +
"\"buildArgs\":[\"//base:short\"], \"exitCode\":0,\"type\":\"BuildFinished\"}",
message);
}
@Test
public void testBuildRuleEventStarted() throws IOException {
BuildRule rule = generateFakeBuildRule();
BuildRuleEvent.Started event = BuildRuleEvent.started(rule);
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals(
"{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"buildRule\":{\"type\":\"fake_build_rule\",\"name\":\"//fake:rule\"}," +
"\"type\":\"BuildRuleStarted\"," +
"\"eventKey\":{\"value\":1024186770}}",
message);
}
@Test
public void testBuildRuleEventFinished() throws IOException {
BuildRule rule = generateFakeBuildRule();
BuildRuleEvent.Finished event =
BuildRuleEvent.finished(
rule,
BuildRuleKeys.of(new RuleKey("aaaa")),
BuildRuleStatus.SUCCESS,
CacheResult.miss(),
Optional.<BuildRuleSuccessType>absent(),
Optional.<HashCode>absent(),
Optional.<Long>absent());
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals(
"{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"status\":\"SUCCESS\",\"cacheResult\":{\"type\":\"MISS\",\"cacheSource\":{" +
"\"present\":false},\"cacheError\":{\"present\":false}," +
"\"metadata\":{\"present\":false}}, \"buildRule\":{\"type\":" +
"\"fake_build_rule\",\"name\":\"//fake:rule\"}," +
"\"type\":\"BuildRuleFinished\"," +
"\"eventKey\":{\"value\":1024186770}}",
message);
}
@Test
public void testTestRunEventStarted() throws IOException {
TestRunEvent.Started event = TestRunEvent.started(
true, TestSelectorList.empty(), false, ImmutableSet.<String>of());
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"runAllTests\":true," +
"\"eventKey\":{\"value\":256329280}," +
"\"targetNames\":[],\"type\":\"RunStarted\"}", message);
}
@Test
public void testTestRunEventFinished() throws IOException {
TestRunEvent.Finished event = TestRunEvent.finished(
ImmutableSet.of("target"),
ImmutableList.of(generateFakeTestResults()));
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\",\"" +
"results\":[{\"testCases\":[{\"testCaseName\":\"Test1\",\"testResults\":[{\"testName\":" +
"null,\"testCaseName\":\"Test1\",\"type\":\"FAILURE\",\"time\":0,\"message\":null," +
"\"stacktrace\":null,\"stdOut\":null," +
"\"stdErr\":null}],\"failureCount\":1,\"skippedCount\":0,\"totalTime\":0," +
"\"success\":false}]," +
"\"failureCount\":1,\"contacts\":[],\"labels\":[]," +
"\"dependenciesPassTheirTests\":true,\"sequenceNumber\":0,\"totalNumberOfTests\":0," +
"\"buildTarget\":{\"shortName\":\"baz\",\"baseName\":\"//foo/bar\"," +
"\"cell\":{\"present\":false},\"flavor\":\"\"}," +
"\"success\":false}],\"type\":\"RunComplete\", \"eventKey\":" +
"{\"value\":-624576559}}",
message);
}
@Test
public void testIndividualTestEventStarted() throws IOException {
IndividualTestEvent.Started event = IndividualTestEvent.started(ImmutableList.of(""));
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"type\":\"AwaitingResults\",\"eventKey\":{\"value\":-594614447}}",
message);
}
@Test
public void testIndividualTestEventFinished() throws IOException {
IndividualTestEvent.Finished event = IndividualTestEvent.finished(ImmutableList.<String>of(),
generateFakeTestResults());
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"eventKey\":{\"value\":-594614477}," +
"\"results\":{\"testCases\":[{\"testCaseName\":\"Test1\",\"testResults\":[{\"testName\"" +
":null,\"testCaseName\":\"Test1\",\"type\":\"FAILURE\",\"time\":0,\"message\":null," +
"\"stacktrace\":null,\"stdOut\":null," +
"\"stdErr\":null}],\"failureCount\":1,\"skippedCount\":0,\"totalTime\":0," +
"\"success\":false}]," +
"\"failureCount\":1,\"contacts\":[],\"labels\":[]," +
"\"dependenciesPassTheirTests\":true,\"sequenceNumber\":0,\"totalNumberOfTests\":0," +
"\"buildTarget\":{\"shortName\":\"baz\",\"baseName\":\"//foo/bar\"," +
"\"cell\":{\"present\":false},\"flavor\":\"\"}," +
"\"success\":false},\"type\":\"ResultsAvailable\"}", message);
}
@Test
public void testSimplePerfEvent() throws IOException {
SimplePerfEvent.Started event = SimplePerfEvent.started(
PerfEventId.of("PerfId"),
"value", Optional.of(BuildTargetFactory.newInstance("//:fake")));
event.configure(timestamp, nanoTime, threadId, buildId);
String message = MAPPER.writeValueAsString(event);
assertJsonEquals("{\"timestamp\":%d,\"nanoTime\":%d,\"threadId\":%d,\"buildId\":\"%s\"," +
"\"eventKey\":{\"value\":4242},\"eventId\":\"PerfId\",\"eventType\":\"STARTED\"," +
"\"eventInfo\":{\"value\":{\"present\":true}},\"type\":\"PerfEventPerfIdStarted\"}",
message);
}
private BuildRule generateFakeBuildRule() {
BuildTarget buildTarget = BuildTargetFactory.newInstance("//fake:rule");
return new FakeBuildRule(
buildTarget,
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer())),
ImmutableSortedSet.<BuildRule>of());
}
private TestResults generateFakeTestResults() {
String testCaseName = "Test1";
TestResultSummary testResultSummary = new TestResultSummary(
testCaseName, null, ResultType.FAILURE, 0, null, null, null, null);
TestCaseSummary testCase = new TestCaseSummary(testCaseName,
ImmutableList.of(testResultSummary));
ImmutableList<TestCaseSummary> testCases = ImmutableList.of(testCase);
return FakeTestResults.of(testCases);
}
private void matchJsonObjects(String path, JsonNode expected, JsonNode actual) {
if (expected != null && actual != null && expected.isObject()) {
assertTrue(actual.isObject());
HashSet<String> expectedFields = Sets.newHashSet(expected.fieldNames());
HashSet<String> actualFields = Sets.newHashSet(actual.fieldNames());
for (String field : expectedFields) {
assertTrue(
String.format("Expecting field %s at path %s", field, path),
actualFields.contains(field));
matchJsonObjects(path + "/" + field, expected.get(field), actual.get(field));
}
assertEquals("Found unexpected fields",
Sets.newHashSet(), Sets.difference(actualFields, expectedFields));
}
assertEquals(
"At path " + path,
expected,
actual);
}
private void assertJsonEquals(String expected, String actual) throws IOException {
JsonFactory factory = MAPPER.getFactory();
JsonParser jsonParser = factory.createParser(
String.format(expected, timestamp, nanoTime, threadId, buildId));
JsonNode expectedObject = MAPPER.readTree(jsonParser);
jsonParser = factory.createParser(actual);
JsonNode actualObject = MAPPER.readTree(jsonParser);
matchJsonObjects("/", expectedObject, actualObject);
assertEquals(expectedObject, actualObject);
}
}
| apache-2.0 |
hzarrabi/CS201_Assignments_Battleship | hzarrabi_CSCI201_Assignment2/src/hzarrabi_CSCI201_Assignment2/Battleship.java | 6532 | package hzarrabi_CSCI201_Assignment2;
import java.awt.GridLayout;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.Scanner;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
public class Battleship
{
private String fileName;
public Score highScores[]=new Score[10];//array for the high scores
public char theGrid[][]=new char[10][10];
private void moveDown(int index)
{
Score temp1=highScores[index];
Score temp2;
while(index<9 && temp1!=null)//i do temp1!=null because once there are no more score no need to move down
{
temp2=highScores[index+1];//this is the original value
highScores[index+1]=temp1;
temp1=temp2;
index++;
}
}
public void add(Score newGame)
{
for(int i=0;i<10;i++)
{
if(highScores[i]!=null)
{
if(newGame.getScore()<highScores[i].getScore())//less than
{
moveDown(i);
highScores[i]=newGame;
break;
}
else if(newGame.getScore()==highScores[i].getScore() && i!=9)//equals
{
moveDown(i+1);
highScores[i+1]=newGame;
break;
}
}
else//if you hit a null index of the array you can put the score there
{
highScores[i]=newGame;
break;
}
}
}
public void readFile(String fileName)
{
this.fileName=fileName;
try
{
FileReader fr = new FileReader(this.fileName);
BufferedReader br = new BufferedReader(fr);
String line = br.readLine();
if(!line.contains("Highscores"))//make sure the first line is "highscores"
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"Your file was formatted incorrectly!");
}
//==================Reading in high scores======================
for (int i=1;i<11;i++)
{
line = br.readLine();
int i1 = line.indexOf(' ');
if(line==null || line.isEmpty())
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"Your file had a blank space!");
}
else
{
//this will allow to splitting with multiple spaces and removes anything that is not a letter or number
String[] theWords=line.replaceAll("[^0-9a-zA-Z]"," ").split("\\s+");
if(theWords.length==1)
{
}
else if(theWords.length>3)
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"More than 3 elements were parsed!");
}
else
{
String name=theWords[1];//name of the high scorer
int score=Integer.parseInt(theWords[2]);
Score newScore=new Score(score,name);
this.add(newScore);//adding the new score
}
}
}
//============================reading the grid==============================================
int a=0,b=0,c=0,d=0;//counter for making sure right number of each ship
for(int q=0;q<10;q++)
{
line = br.readLine();
char[] charArray = line.toCharArray();//
if(charArray.length!=10)
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"Incorrect # of coordinates on line "+ (q+1) + " !");
}
else
{
//copying the char array into the correct array
for(int j=0;j<10;j++)
{
if(charArray[j]!='X'&&charArray[j]!='A'&&charArray[j]!='B'&&charArray[j]!='C'&&charArray[j]!='D')
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"You have a \""+charArray[j]+"\"");//correct characters
}
else
{
if(charArray[j]=='A') a++;
if(charArray[j]=='B') b++;
if(charArray[j]=='C') c++;
if(charArray[j]=='D') d++;
theGrid[j][q]=charArray[j];
}
}
}
}
if(a!=5||b!=4||c!=3||d!=4)
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"Incorrect number of ships!");//correct characters
}
//=======================================checking to make sure ships in a row===================
int a1=0,b1=0,c1=0,d1=0;
for(int i=0;i<10;i++)
{
//columns
String test=new String(theGrid[i]);
if(test.contains("AAAAA")) a1++;
if(test.contains("BBB")) b1++;
if(test.contains("CCC")) c1++;
if(test.contains("DD"))
{
d1++;
if(test.contains("DDDD")) d1++;//in case the D ships are one after each other
}
//rows
char[] tempArray = new char[10];
for(int j=0;j<10;j++)
{
tempArray[j]=theGrid[j][i];
}
String test1=new String(tempArray);
if(test1.contains("AAAAA")) a1++;
if(test1.contains("BBB")) b1++;
if(test1.contains("CCC")) c1++;
if(test1.contains("DD"))
{
d1++;
if(test1.contains("DDDD")) d1++;//in case the D ships are one after each other
}
}
if(a1!=1||b1!=1||c1!=1||d1!=2)
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"Incorrect number of ships!");//incorrect number of ships
}
br.close();
fr.close();
}
catch (FileNotFoundException fnfe)
{
ErrorDialog notFound= new ErrorDialog(new JFrame(),"Your file was not found!");
notFound.setVisible(true);
}
catch (IOException ioe)
{
System.out.println("Some random exceptionn that idk what it is");
}
}
public void writeFile()
{
try
{
PrintWriter writer = new PrintWriter(fileName);
writer.println("Highscores:");
for(int i=0;i<10;i++)
{
if(highScores[i]!=null)
{
writer.println((i+1)+". "+highScores[i].getName()+" - "+highScores[i].getScore());
}
else writer.println((i+1)+".");
writer.flush();
}
for(int i=0;i<10;i++)
{
char[] tempArray = new char[10];
for(int j=0;j<10;j++)
{
tempArray[j]=theGrid[j][i];
}
String test1=new String(tempArray);
writer.println(test1);
writer.flush();
}
writer.close();
}
catch (FileNotFoundException e)
{
}
}
public static void main(String[] args)
{
Battleship battleshipObject = new Battleship();
if (args.length>0) //if there was text entered
{
battleshipObject.readFile(args[0]);
}
else
{
System.out.println("You didn't enter a file name intitially!");
System.out.print("Enter your file name: ");
Scanner scan = new Scanner(System.in);
String file=scan.nextLine();
while (file.length()<4)
{
System.out.println("You didnt enter a filename!");
System.out.print("Enter your file name:");
file=scan.nextLine();
}
battleshipObject.readFile(file);
}
new GameWindow(battleshipObject);
}
}
| apache-2.0 |
VHAINNOVATIONS/AVS | ll-javaBroker/src/main/java/gov/va/med/lom/javaBroker/rpc/lists/models/ServicesList.java | 460 | package gov.va.med.lom.javaBroker.rpc.lists.models;
import gov.va.med.lom.javaBroker.rpc.BaseBean;
import java.io.Serializable;
public class ServicesList extends BaseBean implements Serializable {
private Service[] services;
public ServicesList() {
this.services = null;
}
public Service[] getServices() {
return services;
}
public void setServices(Service[] services) {
this.services = services;
}
}
| apache-2.0 |
caichengan/MyWeiBo2 | app/src/main/java/com/xht/android/myweibo/mode/ChatBean.java | 944 | package com.xht.android.myweibo.mode;
/**
* Created by an on 2017/3/17.
*/
public class ChatBean {
private String imgURLHD;
private String mName;
private String mUid;
private String mContent;
private String type;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getImgURLHD() {
return imgURLHD;
}
public void setImgURLHD(String imgURLHD) {
this.imgURLHD = imgURLHD;
}
public String getmName() {
return mName;
}
public void setmName(String mName) {
this.mName = mName;
}
public String getmUid() {
return mUid;
}
public void setmUid(String mUid) {
this.mUid = mUid;
}
public String getmContent() {
return mContent;
}
public void setmContent(String mContent) {
this.mContent = mContent;
}
}
| apache-2.0 |
vvinston/functional-helpers | src/main/java/com/github/vvinston/functional/ConditionalBiConsumerBuilderStepOne.java | 887 | package com.github.vvinston.functional;
import javax.annotation.Nonnull;
import java.util.LinkedList;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.BiPredicate;
public final class ConditionalBiConsumerBuilderStepOne<INPUT1, INPUT2> {
private final BiPredicate<INPUT1, INPUT2> predicate;
ConditionalBiConsumerBuilderStepOne(@Nonnull final BiPredicate<INPUT1, INPUT2> predicate) {
this.predicate = predicate;
}
@SuppressWarnings("PMD.AccessorClassGeneration")
public ConditionalBiConsumerBuilderStepTwo<INPUT1, INPUT2> then(@Nonnull final BiConsumer<INPUT1, INPUT2> success) {
final List<Tuple<BiPredicate<INPUT1, INPUT2>, BiConsumer<INPUT1, INPUT2>>> cases = new LinkedList<>();
cases.add(SimpleTuple.of(predicate, success));
return new ConditionalBiConsumerBuilderStepTwo<>(cases);
}
}
| apache-2.0 |
markphillipwalker/Zcreen | src/com/stalkerwalker/zcreen/Main.java | 1151 | /*
* Copyright (c) 2014. Mark Walker <markphillipwalker@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stalkerwalker.zcreen;
import com.stalkerwalker.zcreen.zcenes.*;
import javafx.application.Application;
import javafx.stage.Stage;
public class Main extends Application {
private Zcene zcene;
@Override
public void init() throws Exception {
zcene = ZceneFactory.createZcreen(getParameters().getRaw());
}
@Override
public void start(Stage stage) throws Exception{
zcene.start(stage);
}
public static void main(String[] args) {
launch(args);
}
}
| apache-2.0 |
McLeodMoores/starling | projects/analytics/src/main/java/com/opengamma/analytics/financial/instrument/payment/CouponIborAverageIndexDefinition.java | 24170 | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.instrument.payment;
import org.apache.commons.lang.ObjectUtils;
import org.threeten.bp.LocalDate;
import org.threeten.bp.LocalDateTime;
import org.threeten.bp.LocalTime;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.instrument.InstrumentDefinitionVisitor;
import com.opengamma.analytics.financial.instrument.index.IborIndex;
import com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon;
import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponFixed;
import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponIborAverage;
import com.opengamma.analytics.financial.schedule.ScheduleCalculator;
import com.opengamma.analytics.util.time.TimeCalculator;
import com.opengamma.financial.convention.calendar.Calendar;
import com.opengamma.timeseries.DoubleTimeSeries;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
/**
* Class describing an average Ibor-like floating coupon (weighted mean of two different indexes).
*/
public class CouponIborAverageIndexDefinition extends CouponFloatingDefinition {
/**
* Ibor-like index1 on which the coupon fixes. The index currency should be the same as the coupon currency.
*/
private final IborIndex _index1;
/**
* Ibor-like index2 on which the coupon fixes. The index currency should be the same as the coupon currency.
*/
private final IborIndex _index2;
/**
* The weight for the first index.
*/
private final double _weight1;
/**
* The weight of the second index.
*/
private final double _weight2;
/**
* The start date of the fixing period of the first index.
*/
private final ZonedDateTime _fixingPeriodStartDate1;
/**
* The end date of the fixing period of the first index.
*/
private final ZonedDateTime _fixingPeriodEndDate1;
/**
* The accrual factor (or year fraction) associated to the fixing period of the first index in the Index day count convention.
*/
private final double _fixingPeriodAccrualFactor1;
/**
* The start date of the fixing period of the second index.
*/
private final ZonedDateTime _fixingPeriodStartDate2;
/**
* The end date of the fixing period of the second index.
*/
private final ZonedDateTime _fixingPeriodEndDate2;
/**
* The accrual factor (or year fraction) associated to the fixing period of the second index in the Index day count convention.
*/
private final double _fixingPeriodAccrualFactor2;
/**
* Constructor of an average Ibor-like floating coupon from the coupon details and the Ibor indices. The currency is the same for both index. The payment
* currency is the indices currency.
*
* @param currency
* The coupon currency.
* @param paymentDate
* The coupon payment date.
* @param accrualStartDate
* The start date of the accrual period of the first index.
* @param accrualEndDate
* The end date of the accrual period of the first index.
* @param paymentAccrualFactor
* The accrual factor of the accrual period of the first index.
* @param notional
* The coupon notional.
* @param fixingDate
* The coupon fixing date.
* @param index1
* The first coupon Ibor index. Should have the same currency as the payment.
* @param index2
* The second coupon Ibor index. Should have the same currency as the payment.
* @param weight1
* The weight of the first index.
* @param weight2
* The weight of the second index.
* @param iborCalendar1
* The holiday calendar for the first ibor index.
* @param iborCalendar2
* The holiday calendar for the second ibor index.
*/
public CouponIborAverageIndexDefinition(final Currency currency, final ZonedDateTime paymentDate, final ZonedDateTime accrualStartDate,
final ZonedDateTime accrualEndDate,
final double paymentAccrualFactor, final double notional, final ZonedDateTime fixingDate, final IborIndex index1, final IborIndex index2,
final double weight1,
final double weight2, final Calendar iborCalendar1, final Calendar iborCalendar2) {
super(currency, paymentDate, accrualStartDate, accrualEndDate, paymentAccrualFactor, notional, fixingDate);
ArgumentChecker.notNull(index1, "index1");
ArgumentChecker.notNull(index2, "index2");
ArgumentChecker.isTrue(currency.equals(index1.getCurrency()), "index1 currency different from payment currency");
ArgumentChecker.isTrue(currency.equals(index2.getCurrency()), "index2 currency different from payment currency");
_index1 = index1;
_index2 = index2;
_weight1 = weight1;
_weight2 = weight2;
_fixingPeriodStartDate1 = ScheduleCalculator.getAdjustedDate(fixingDate, _index1.getSpotLag(), iborCalendar1);
_fixingPeriodEndDate1 = ScheduleCalculator.getAdjustedDate(_fixingPeriodStartDate1, index1.getTenor(), index1.getBusinessDayConvention(), iborCalendar1,
index1.isEndOfMonth());
_fixingPeriodAccrualFactor1 = index1.getDayCount().getDayCountFraction(_fixingPeriodStartDate1, _fixingPeriodEndDate1, iborCalendar1);
_fixingPeriodStartDate2 = ScheduleCalculator.getAdjustedDate(fixingDate, _index2.getSpotLag(), iborCalendar2);
_fixingPeriodEndDate2 = ScheduleCalculator.getAdjustedDate(_fixingPeriodStartDate2, index2.getTenor(), index2.getBusinessDayConvention(), iborCalendar2,
index2.isEndOfMonth());
_fixingPeriodAccrualFactor2 = index2.getDayCount().getDayCountFraction(_fixingPeriodStartDate2, _fixingPeriodEndDate2, iborCalendar2);
}
/**
* Constructor of a Ibor-like floating coupon from the coupon details and the Ibor index. The payment currency is the index currency.
*
* @param currency
* The coupon currency.
* @param paymentDate
* The coupon payment date.
* @param accrualStartDate
* The start date of the accrual period.
* @param accrualEndDate
* The end date of the accrual period.
* @param paymentAccrualFactor
* The accrual factor of the accrual period.
* @param notional
* The coupon notional.
* @param fixingDate
* The coupon fixing date.
* @param fixingPeriodStartDate1
* The start date of the fixing period.
* @param fixingPeriodEndDate1
* The end date of the fixing period.
* @param fixingPeriodAccrualFactor1
* The accrual factor (or year fraction) associated to the fixing period in the Index day count convention.
* @param fixingPeriodStartDate2
* The start date of the fixing period.
* @param fixingPeriodEndDate2
* The end date of the fixing period.
* @param fixingPeriodAccrualFactor2
* The accrual factor (or year fraction) associated to the fixing period in the Index day count convention.
* @param index1
* The first coupon Ibor index. Should of the same currency as the payment.
* @param index2
* The second coupon Ibor index. Should of the same currency as the payment.
* @param weight1
* The weight of the first index.
* @param weight2
* The weight of the second index.
*/
public CouponIborAverageIndexDefinition(final Currency currency, final ZonedDateTime paymentDate, final ZonedDateTime accrualStartDate,
final ZonedDateTime accrualEndDate,
final double paymentAccrualFactor, final double notional, final ZonedDateTime fixingDate, final ZonedDateTime fixingPeriodStartDate1,
final ZonedDateTime fixingPeriodEndDate1, final double fixingPeriodAccrualFactor1, final ZonedDateTime fixingPeriodStartDate2,
final ZonedDateTime fixingPeriodEndDate2, final double fixingPeriodAccrualFactor2, final IborIndex index1, final IborIndex index2, final double weight1,
final double weight2) {
super(currency, paymentDate, accrualStartDate, accrualEndDate, paymentAccrualFactor, notional, fixingDate);
ArgumentChecker.notNull(index1, "index1");
ArgumentChecker.notNull(index2, "index2");
ArgumentChecker.isTrue(currency.equals(index1.getCurrency()), "index1 currency different from payment currency");
ArgumentChecker.isTrue(currency.equals(index2.getCurrency()), "index2 currency different from payment currency");
_index1 = index1;
_index2 = index2;
_weight1 = weight1;
_weight2 = weight2;
_fixingPeriodStartDate1 = fixingPeriodStartDate1;
_fixingPeriodEndDate1 = fixingPeriodEndDate1;
_fixingPeriodAccrualFactor1 = fixingPeriodAccrualFactor1;
_fixingPeriodStartDate2 = fixingPeriodStartDate2;
_fixingPeriodEndDate2 = fixingPeriodEndDate2;
_fixingPeriodAccrualFactor2 = fixingPeriodAccrualFactor2;
}
/**
* Constructor of a Ibor-like floating coupon from the coupon details and the Ibor index. The payment currency is the index currency.
*
* @param paymentDate
* Coupon payment date.
* @param accrualStartDate
* Start date of the accrual period of the first index.
* @param accrualEndDate
* End date of the accrual period of the first index.
* @param paymentAccrualFactor
* Accrual factor of the accrual period of the first index.
* @param notional
* Coupon notional.
* @param fixingDate
* The coupon fixing date.
* @param index1
* The first coupon Ibor index.
* @param index2
* The second coupon Ibor index.
* @param weight1
* The weight of the first index.
* @param weight2
* The weight of the second index.
* @param iborCalendar1
* The calendar associated to the first index.
* @param iborCalendar2
* The calendar associated to the second index.
* @return The Ibor coupon.
*/
public static CouponIborAverageIndexDefinition from(final ZonedDateTime paymentDate, final ZonedDateTime accrualStartDate, final ZonedDateTime accrualEndDate,
final double paymentAccrualFactor, final double notional, final ZonedDateTime fixingDate, final IborIndex index1, final IborIndex index2,
final double weight1,
final double weight2, final Calendar iborCalendar1, final Calendar iborCalendar2) {
ArgumentChecker.notNull(index1, "index1");
ArgumentChecker.notNull(index2, "index2");
ArgumentChecker.isTrue(index1.getCurrency().equals(index2.getCurrency()), "index1 currency different from index2 currency");
return new CouponIborAverageIndexDefinition(index1.getCurrency(), paymentDate, accrualStartDate, accrualEndDate, paymentAccrualFactor, notional, fixingDate,
index1,
index2, weight1, weight2, iborCalendar1, iborCalendar2);
}
/**
* Builder of Ibor-like coupon from an underlying coupon, the fixing date, the weights and the indeces. The fixing period dates are deduced from the index and
* the fixing date.
*
* @param coupon
* Underlying coupon.
* @param fixingDate
* The coupon fixing date.
* @param index1
* The first coupon Ibor index.
* @param index2
* The second coupon Ibor index.
* @param weight1
* The weight of the first index.
* @param weight2
* The weight of the second index.
* @param iborCalendar1
* The calendar associated to the first index.
* @param iborCalendar2
* The calendar associated to the second index.
* @return The Ibor coupon.
*/
public static CouponIborAverageIndexDefinition from(final CouponDefinition coupon, final ZonedDateTime fixingDate, final IborIndex index1,
final IborIndex index2,
final double weight1, final double weight2, final Calendar iborCalendar1, final Calendar iborCalendar2) {
ArgumentChecker.notNull(coupon, "coupon");
ArgumentChecker.notNull(fixingDate, "fixing date");
ArgumentChecker.notNull(index1, "index1");
ArgumentChecker.notNull(index2, "index1");
ArgumentChecker.isTrue(index1.getCurrency().equals(index2.getCurrency()), "index1 currency different from index2 currency");
return new CouponIborAverageIndexDefinition(index1.getCurrency(), coupon.getPaymentDate(), coupon.getAccrualStartDate(), coupon.getAccrualEndDate(),
coupon.getPaymentYearFraction(), coupon.getNotional(), fixingDate, index1, index2, weight1, weight2, iborCalendar1, iborCalendar2);
}
/**
* Gets the first Ibor index of the instrument.
*
* @return The first index.
*/
public IborIndex getIndex1() {
return _index1;
}
/**
* Gets the second Ibor index of the instrument.
*
* @return The second index.
*/
public IborIndex getIndex2() {
return _index2;
}
/**
* Gets the weight of the first index.
*
* @return The first weight.
*/
public double getWeight1() {
return _weight1;
}
/**
* Gets the weight of the second index.
*
* @return The second weight.
*/
public double getWeight2() {
return _weight2;
}
/**
* Gets the start date of the fixing period.
*
* @return The start date of the fixing period.
*/
public ZonedDateTime getFixingPeriodStartDate1() {
return _fixingPeriodStartDate1;
}
/**
* Gets the end date of the fixing period of the first index.
*
* @return The end date of the fixing period of the first index.
*/
public ZonedDateTime getFixingPeriodEndDate1() {
return _fixingPeriodEndDate1;
}
/**
* Gets the start date of the fixing period of the second index.
*
* @return The start date of the fixing period of the second index.
*/
public ZonedDateTime getFixingPeriodStartDate2() {
return _fixingPeriodStartDate2;
}
/**
* Gets the end date of the fixing period of the second index.
*
* @return The end date of the fixing period of the second index.
*/
public ZonedDateTime getFixingPeriodEndDate2() {
return _fixingPeriodEndDate2;
}
/**
* Gets the accrual factor (or year fraction) associated to the fixing period of the first index in the Index day count convention.
*
* @return The accrual factor of the first index.
*/
public double getFixingPeriodAccrualFactor1() {
return _fixingPeriodAccrualFactor1;
}
/**
* Gets the accrual factor (or year fraction) associated to the fixing period of the second index in the Index day count convention.
*
* @return The accrual factor of the second index.
*/
public double getFixingPeriodAccrualFactor2() {
return _fixingPeriodAccrualFactor2;
}
/**
* Creates a new coupon with all the details the same except the notional which is replaced by the notional provided.
*
* @param notional
* The notional.
* @return The coupon.
*/
public CouponIborAverageIndexDefinition withNotional(final double notional) {
return new CouponIborAverageIndexDefinition(getCurrency(), getPaymentDate(), getAccrualStartDate(), getAccrualEndDate(), getPaymentYearFraction(), notional,
getFixingDate(), _fixingPeriodStartDate1, _fixingPeriodEndDate1, _fixingPeriodAccrualFactor1, _fixingPeriodStartDate2, _fixingPeriodEndDate2,
_fixingPeriodAccrualFactor2, _index1, _index2, _weight1, _weight2);
}
/**
* {@inheritDoc}
*
* @deprecated Use the method that does not take yield curve names
*/
@Deprecated
@Override
public Coupon toDerivative(final ZonedDateTime dateTime, final String... yieldCurveNames) {
return toDerivative(dateTime);
}
/**
* {@inheritDoc} If the fixing date is strictly before the conversion date and the fixing rate is not available, an exception is thrown; if the fixing rate is
* available a fixed coupon is returned. If the fixing date is equal to the conversion date, if the fixing rate is available a fixed coupon is returned, if
* not a coupon Ibor with spread is returned. If the fixing date is strictly after the conversion date, a coupon Ibor is returned. All the comparisons are
* between dates without time.
*
* @deprecated Use the method that does not take yield curve names
*/
@Deprecated
@Override
public Coupon toDerivative(final ZonedDateTime dateTime, final DoubleTimeSeries<ZonedDateTime> indexFixingTimeSeries, final String... yieldCurveNames) {
return toDerivative(dateTime, indexFixingTimeSeries);
}
@Override
public Coupon toDerivative(final ZonedDateTime dateTime) {
ArgumentChecker.notNull(dateTime, "date");
final LocalDate dayConversion = dateTime.toLocalDate();
ArgumentChecker.isTrue(!dayConversion.isAfter(getFixingDate().toLocalDate()), "Do not have any fixing data but are asking for a derivative at " + dateTime
+ " which is after fixing date " + getFixingDate());
ArgumentChecker.isTrue(!dayConversion.isAfter(getPaymentDate().toLocalDate()), "date is after payment date");
final double paymentTime = TimeCalculator.getTimeBetween(dateTime, getPaymentDate());
final double fixingTime = TimeCalculator.getTimeBetween(dateTime, getFixingDate());
final double fixingPeriodStartTime1 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodStartDate1());
final double fixingPeriodEndTime1 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodEndDate1());
final double fixingPeriodStartTime2 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodStartDate2());
final double fixingPeriodEndTime2 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodEndDate2());
return new CouponIborAverage(getCurrency(), paymentTime, getPaymentYearFraction(), getNotional(), fixingTime, getIndex1(), fixingPeriodStartTime1,
fixingPeriodEndTime1, getFixingPeriodAccrualFactor1(), getIndex2(), fixingPeriodStartTime2, fixingPeriodEndTime2, getFixingPeriodAccrualFactor2(),
getWeight1(),
getWeight2());
}
/**
* {@inheritDoc} If the fixing date is strictly before the conversion date and the fixing rate is not available, an exception is thrown; if the fixing rate is
* available a fixed coupon is returned. If the fixing date is equal to the conversion date, if the fixing rate is available a fixed coupon is returned, if
* not a coupon Ibor with spread is returned. If the fixing date is strictly after the conversion date, a coupon Ibor is returned. All the comparisons are
* between dates without time.
*/
@Override
public Coupon toDerivative(final ZonedDateTime dateTime, final DoubleTimeSeries<ZonedDateTime> indexFixingTimeSeries) {
ArgumentChecker.notNull(dateTime, "date");
final LocalDate dayConversion = dateTime.toLocalDate();
ArgumentChecker.notNull(indexFixingTimeSeries, "Index fixing time series");
ArgumentChecker.isTrue(!dayConversion.isAfter(getPaymentDate().toLocalDate()), "date is after payment date");
final double paymentTime = TimeCalculator.getTimeBetween(dateTime, getPaymentDate());
final LocalDate dayFixing = getFixingDate().toLocalDate();
if (dayConversion.equals(dayFixing)) { // The fixing is on the reference date; if known the fixing is used and if not, the floating coupon is created.
final Double fixedRate = indexFixingTimeSeries.getValue(getFixingDate());
if (fixedRate != null) {
return new CouponFixed(getCurrency(), paymentTime, getPaymentYearFraction(), getNotional(), fixedRate);
}
}
if (dayConversion.isAfter(dayFixing)) { // The fixing is required
final ZonedDateTime rezonedFixingDate = ZonedDateTime.of(LocalDateTime.of(getFixingDate().toLocalDate(), LocalTime.of(0, 0)), ZoneOffset.UTC);
final Double fixedRate = indexFixingTimeSeries.getValue(rezonedFixingDate); // TODO: remove time from fixing date.
if (fixedRate == null) {
throw new OpenGammaRuntimeException("Could not get fixing value for date " + getFixingDate());
}
return new CouponFixed(getCurrency(), paymentTime, getPaymentYearFraction(), getNotional(), fixedRate);
}
final double fixingTime = TimeCalculator.getTimeBetween(dateTime, getFixingDate());
final double fixingPeriodStartTime1 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodStartDate1());
final double fixingPeriodEndTime1 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodEndDate1());
final double fixingPeriodStartTime2 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodStartDate2());
final double fixingPeriodEndTime2 = TimeCalculator.getTimeBetween(dateTime, getFixingPeriodEndDate2());
return new CouponIborAverage(getCurrency(), paymentTime, getPaymentYearFraction(), getNotional(), fixingTime, getIndex1(), fixingPeriodStartTime1,
fixingPeriodEndTime1, getFixingPeriodAccrualFactor1(), getIndex2(), fixingPeriodStartTime2, fixingPeriodEndTime2, getFixingPeriodAccrualFactor2(),
getWeight1(),
getWeight2());
}
@Override
public <U, V> V accept(final InstrumentDefinitionVisitor<U, V> visitor, final U data) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitCouponIborAverageDefinition(this, data);
}
@Override
public <V> V accept(final InstrumentDefinitionVisitor<?, V> visitor) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitCouponIborAverageDefinition(this);
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
long temp;
temp = Double.doubleToLongBits(_fixingPeriodAccrualFactor1);
result = prime * result + (int) (temp ^ temp >>> 32);
temp = Double.doubleToLongBits(_fixingPeriodAccrualFactor2);
result = prime * result + (int) (temp ^ temp >>> 32);
result = prime * result + (_fixingPeriodEndDate1 == null ? 0 : _fixingPeriodEndDate1.hashCode());
result = prime * result + (_fixingPeriodEndDate2 == null ? 0 : _fixingPeriodEndDate2.hashCode());
result = prime * result + (_fixingPeriodStartDate1 == null ? 0 : _fixingPeriodStartDate1.hashCode());
result = prime * result + (_fixingPeriodStartDate2 == null ? 0 : _fixingPeriodStartDate2.hashCode());
result = prime * result + (_index1 == null ? 0 : _index1.hashCode());
result = prime * result + (_index2 == null ? 0 : _index2.hashCode());
temp = Double.doubleToLongBits(_weight1);
result = prime * result + (int) (temp ^ temp >>> 32);
temp = Double.doubleToLongBits(_weight2);
result = prime * result + (int) (temp ^ temp >>> 32);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final CouponIborAverageIndexDefinition other = (CouponIborAverageIndexDefinition) obj;
if (Double.doubleToLongBits(_fixingPeriodAccrualFactor1) != Double.doubleToLongBits(other._fixingPeriodAccrualFactor1)) {
return false;
}
if (Double.doubleToLongBits(_fixingPeriodAccrualFactor2) != Double.doubleToLongBits(other._fixingPeriodAccrualFactor2)) {
return false;
}
if (!ObjectUtils.equals(_fixingPeriodEndDate1, other._fixingPeriodEndDate1)) {
return false;
}
if (!ObjectUtils.equals(_fixingPeriodEndDate2, other._fixingPeriodEndDate2)) {
return false;
}
if (!ObjectUtils.equals(_fixingPeriodStartDate1, other._fixingPeriodStartDate1)) {
return false;
}
if (!ObjectUtils.equals(_fixingPeriodStartDate2, other._fixingPeriodStartDate2)) {
return false;
}
if (!ObjectUtils.equals(_index1, other._index1)) {
return false;
}
if (!ObjectUtils.equals(_index2, other._index2)) {
return false;
}
if (Double.doubleToLongBits(_weight1) != Double.doubleToLongBits(other._weight1)) {
return false;
}
if (Double.doubleToLongBits(_weight2) != Double.doubleToLongBits(other._weight2)) {
return false;
}
return true;
}
}
| apache-2.0 |
rolmovel/taildir-v2 | src/main/java/org/keedio/watchdir/WatchDirObserver.java | 6441 | package org.keedio.watchdir;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import name.pachler.nio.file.FileSystems;
import name.pachler.nio.file.Path;
import name.pachler.nio.file.Paths;
import name.pachler.nio.file.StandardWatchEventKind;
import name.pachler.nio.file.WatchEvent;
import name.pachler.nio.file.WatchKey;
import name.pachler.nio.file.WatchService;
import name.pachler.nio.file.ext.ExtendedWatchEventKind;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.nio.file.LinkOption.NOFOLLOW_LINKS;
/**
*
* This thread monitors the directory indicated in the constructor recursively.
* At the time believed a new file to all
* registered listeners are notified.
* <p>
* Events deleted or modified files are not managed.
*
*/
public class WatchDirObserver implements Runnable {
private WatchService watcherSvc;
private List<WatchDirListener> listeners;
private static final Logger LOGGER= LoggerFactory
.getLogger(WatchDirObserver.class);
private final Map<WatchKey, Path> keys;
private WatchDirFileSet set;
public WatchDirObserver(WatchDirFileSet set) {
this.set = set;
keys = new HashMap<WatchKey, Path>();
listeners = new ArrayList<WatchDirListener>();
try {
Path directotyToWatch = Paths.get(set.getPath());
watcherSvc = FileSystems.getDefault().newWatchService();
registerAll(java.nio.file.Paths.get(directotyToWatch.toString()));
} catch (IOException e){
LOGGER.info("No se puede monitorizar el directorio: " + set.getPath(), e);
}
}
static <T> WatchEvent<T> castEvent(WatchEvent<?> event) {
return (WatchEvent<T>)event;
}
/**
* Method used to record listeners. There must be at least one.
* @param listener Must implement WhatchDirListerner. See listeners implementations for more information
*/
public void addWatchDirListener(WatchDirListener listener) {
listeners.add(listener);
}
protected void update(WatchDirEvent event) {
for (WatchDirListener listener:listeners) {
try{
listener.process(event);
} catch (WatchDirException e) {
LOGGER.info("Error procesando el listener", e);
}
}
}
@SuppressWarnings("unchecked")
static <T> WatchEvent<T> cast(WatchEvent<?> event) {
return (WatchEvent<T>) event;
}
/**
* Register the given directory, and all its sub-directories, with the WatchService.
* @param start The initial path to monitor.
*
*/
private void registerAll(final java.nio.file.Path start) throws IOException {
// register directory and sub-directories
java.nio.file.Files.walkFileTree(start, new SimpleFileVisitor<java.nio.file.Path>() {
@Override
public FileVisitResult preVisitDirectory(java.nio.file.Path dir, BasicFileAttributes attrs)
throws IOException {
register(Paths.get(dir.toString()));
return FileVisitResult.CONTINUE;
}
});
}
private void register(Path dir) throws IOException {
LOGGER.trace("WatchDir: register");
// Solo nos preocupamos por los ficheros de nueva creacion
WatchKey key = null;
try {
key = dir.register(watcherSvc, StandardWatchEventKind.ENTRY_CREATE, StandardWatchEventKind.ENTRY_MODIFY, StandardWatchEventKind.ENTRY_DELETE, ExtendedWatchEventKind.ENTRY_RENAME_FROM, ExtendedWatchEventKind.ENTRY_RENAME_TO);
} catch (UnsupportedOperationException e) {
LOGGER.debug("Eventos no soportados. Registramos solo CREATE, DELETED, MODIFY");
key = dir.register(watcherSvc, StandardWatchEventKind.ENTRY_CREATE, StandardWatchEventKind.ENTRY_MODIFY, StandardWatchEventKind.ENTRY_DELETE);
}
Path prev = keys.get(key);
LOGGER.info("Previous directory: " + prev);
if (prev == null) {
LOGGER.info("Registering directory: " + dir);
} else {
if (!dir.equals(prev)) {
LOGGER.info("Updating previous directory: " + "-> " + prev + " to " + dir);
}
}
keys.put(key, dir);
}
@Override
public void run() {
if (listeners.isEmpty()) {
LOGGER.error("No existen listeners. Finalizando");
} else {
try {
boolean fin = false;
// En primer lugar procesamos todos los ficheros pre-existentes
if (set.isReadOnStartup()) {
for(String file:set.getExistingFiles()) {
WatchDirEvent event = new WatchDirEvent(file, "ENTRY_CREATE", set);
update(event);
LOGGER.debug("Fichero existente anteriormente:" + file + " .Se procesa");
}
}
while (!fin) {
// wait for key to be signaled
WatchKey key;
key = watcherSvc.take();
Path dir = keys.get(key);
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent<Path> ev = cast(event);
Path name = ev.context();
Path path = dir.resolve(name);
// Si se crea un nuevo directorio es necesario registrarlo de nuevo
if (java.nio.file.Files.isDirectory(java.nio.file.Paths.get(path.toString()), NOFOLLOW_LINKS))
registerAll(java.nio.file.Paths.get(path.toString()));
else {
if (set.haveToProccess(path.toString())) {
update(new WatchDirEvent(path.toString(), event.kind().name(), set));
}
}
}
// reset key and remove from set if directory no longer
// accessible
key.reset();
Thread.sleep(1000);
}
} catch (InterruptedException e) {
LOGGER.info(e.getMessage(), e);
} catch (Exception e) {
LOGGER.info(e.getMessage(), e);
}
}
}
public static boolean match(String patterns, String string) {
String[] splitPat = patterns.split(",");
boolean match = false;
for (String pattern:splitPat) {
Pattern pat = Pattern.compile(pattern + "$");
Matcher mat = pat.matcher(string);
match = match || mat.find();
if (match) break;
}
return match;
}
}
| apache-2.0 |
michelegonella/zen-project | zen-core/src/main/java/com/nominanuda/zen/reactivepipe/SyncProcessor.java | 564 | package com.nominanuda.zen.reactivepipe;
import org.reactivestreams.Processor;
import org.reactivestreams.Subscriber;
public class SyncProcessor<T,R> implements SynchSubscriber<T>, Processor<T,R> {
@Override
public void onNext(T t) {
// TODO Auto-generated method stub
}
@Override
public void onError(Throwable t) {
// TODO Auto-generated method stub
}
@Override
public void onComplete() {
// TODO Auto-generated method stub
}
@Override
public void subscribe(Subscriber<? super R> s) {
// TODO Auto-generated method stub
}
}
| apache-2.0 |
sdgdsffdsfff/tddl | tddl-executor/src/main/java/com/taobao/tddl/executor/cursor/impl/MergeCursor.java | 16173 | package com.taobao.tddl.executor.cursor.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.lang.StringUtils;
import com.taobao.tddl.common.exception.TddlException;
import com.taobao.tddl.common.utils.GeneralUtil;
import com.taobao.tddl.common.utils.logger.Logger;
import com.taobao.tddl.common.utils.logger.LoggerFactory;
import com.taobao.tddl.executor.codec.CodecFactory;
import com.taobao.tddl.executor.codec.RecordCodec;
import com.taobao.tddl.executor.common.DuplicateKVPair;
import com.taobao.tddl.executor.common.ExecutionContext;
import com.taobao.tddl.executor.common.ExecutorContext;
import com.taobao.tddl.executor.cursor.ICursorMeta;
import com.taobao.tddl.executor.cursor.IMergeCursor;
import com.taobao.tddl.executor.cursor.ISchematicCursor;
import com.taobao.tddl.executor.cursor.SchematicCursor;
import com.taobao.tddl.executor.record.CloneableRecord;
import com.taobao.tddl.executor.rowset.IRowSet;
import com.taobao.tddl.executor.utils.ExecUtils;
import com.taobao.tddl.optimizer.OptimizerContext;
import com.taobao.tddl.optimizer.config.table.ColumnMeta;
import com.taobao.tddl.optimizer.core.ASTNodeFactory;
import com.taobao.tddl.optimizer.core.ast.query.KVIndexNode;
import com.taobao.tddl.optimizer.core.expression.IBooleanFilter;
import com.taobao.tddl.optimizer.core.expression.IColumn;
import com.taobao.tddl.optimizer.core.expression.IFilter;
import com.taobao.tddl.optimizer.core.expression.IFilter.OPERATION;
import com.taobao.tddl.optimizer.core.expression.IFunction;
import com.taobao.tddl.optimizer.core.expression.IOrderBy;
import com.taobao.tddl.optimizer.core.expression.ISelectable;
import com.taobao.tddl.optimizer.core.expression.ISelectable.DATA_TYPE;
import com.taobao.tddl.optimizer.core.plan.IDataNodeExecutor;
import com.taobao.tddl.optimizer.core.plan.query.IQuery;
import com.taobao.tddl.optimizer.utils.FilterUtils;
/**
* @author mengshi.sunmengshi 2013-12-19 下午12:18:29
* @since 5.1.0
*/
public class MergeCursor extends SchematicCursor implements IMergeCursor {
private final Logger logger = LoggerFactory.getLogger(MergeCursor.class);
protected List<ISchematicCursor> cursors;
protected int sizeLimination = 10000;
protected final IDataNodeExecutor currentExecotor;
protected final ExecutionContext executionContext;
protected ValueMappingIRowSetConvertor valueMappingIRowSetConvertor = new ValueMappingIRowSetConvertor();
protected int currentIndex = 0;
public MergeCursor(List<ISchematicCursor> cursors, IDataNodeExecutor currentExecotor,
ExecutionContext executionContext){
super(null, null, null);
this.currentExecotor = currentExecotor;
this.executionContext = executionContext;
this.cursors = cursors;
List<IOrderBy> orderBys = this.cursors.get(0).getOrderBy();
setOrderBy(orderBys);
}
public MergeCursor(List<ISchematicCursor> cursors, ICursorMeta iCursorMeta, IDataNodeExecutor currentExecotor,
ExecutionContext executionContext, List<IOrderBy> orderBys){
super(null, iCursorMeta, orderBys);
this.cursors = cursors;
this.currentExecotor = currentExecotor;
this.executionContext = executionContext;
setOrderBy(orderBys);
}
@Override
protected void init() throws TddlException {
if (this.inited) {
return;
}
super.init();
}
@Override
public IRowSet next() throws TddlException {
init();
/*
* 因为subCursor和first Cursor的meta数据可能排列的顺序不一样。 比如,cursor1 ,顺序可能是pk ,
* Name. 而cursor 2 ,顺序却是反过来的 , Name , pk 这时候在这里需要统一Cursor内的meta信息才可以。
*/
IRowSet iRowSet = innerNext();
return iRowSet;
}
private IRowSet innerNext() throws TddlException {
init();
IRowSet ret;
while (true) {
if (currentIndex >= cursors.size()) {// 取尽所有cursor.
return null;
}
ISchematicCursor isc = cursors.get(currentIndex);
ret = isc.next();
if (ret != null) {
ret = valueMappingIRowSetConvertor.wrapValueMappingIRowSetIfNeed(ret);
return ret;
}
switchCursor();
}
}
private void switchCursor() {
cursors.get(currentIndex).close(exceptionsWhenCloseSubCursor);
currentIndex++;
// 因为每一个cursor的valueMappingMap都不一样,所以这里把这个数据弄成空的
valueMappingIRowSetConvertor.reset();
}
List<TddlException> exceptionsWhenCloseSubCursor = new ArrayList();
private List<ColumnMeta> returnColumns = null;
@Override
public List<TddlException> close(List<TddlException> exs) {
exs.addAll(exceptionsWhenCloseSubCursor);
for (ISchematicCursor _cursor : cursors) {
exs = _cursor.close(exs);
}
return exs;
}
@Override
public boolean skipTo(CloneableRecord key) throws TddlException {
init();
return super.skipTo(key);
}
@Override
public List<ISchematicCursor> getISchematicCursors() {
return cursors;
}
@Override
public List<DuplicateKVPair> mgetWithDuplicateList(List<CloneableRecord> keys, boolean prefixMatch,
boolean keyFilterOrValueFilter) throws TddlException {
logger.error("do mgetWith Duplicatelist in mergeCursor. should not be here");
init();
Map<CloneableRecord, DuplicateKVPair> map = parentCursorMgetWithDuplicate(keys,
prefixMatch,
keyFilterOrValueFilter);
return new ArrayList<DuplicateKVPair>(map.values());
}
@Override
public Map<CloneableRecord, DuplicateKVPair> mgetWithDuplicate(List<CloneableRecord> keys, boolean prefixMatch,
boolean keyFilterOrValueFilter) throws TddlException {
init();
if (prefixMatch) {
throw new UnsupportedOperationException("not supported yet");
} else {
// 这里列的别名也丢了吧 似乎解决了
IQuery iquery = (IQuery) currentExecotor;
OptimizerContext optimizerContext = OptimizerContext.getContext();
IBooleanFilter ibf = ASTNodeFactory.getInstance().createBooleanFilter();
ibf.setOperation(OPERATION.IN);
ibf.setValues(new ArrayList<Object>());
String colName = null;
for (CloneableRecord record : keys) {
Map<String, Object> recordMap = record.getMap();
if (recordMap.size() == 1) {
// 单字段in
Entry<String, Object> entry = recordMap.entrySet().iterator().next();
Object comp = entry.getValue();
colName = entry.getKey();
IColumn col = ASTNodeFactory.getInstance()
.createColumn()
.setColumnName(colName)
.setDataType(DATA_TYPE.LONG_VAL);
col.setTableName(iquery.getAlias());
ibf.setColumn(col);
ibf.getValues().add(comp);
} else {
// 多字段in
if (ibf.getColumn() == null) {
ibf.setColumn(buildRowFunction(recordMap.keySet(), true));
} else {
ibf.getValues().add(buildRowFunction(recordMap.values(), false));
}
}
}
KVIndexNode query = new KVIndexNode(iquery.getIndexName());
query.select(iquery.getColumns());
query.setLimitFrom(iquery.getLimitFrom());
query.setLimitTo(iquery.getLimitTo());
query.setOrderBys(iquery.getOrderBys());
query.setGroupBys(iquery.getGroupBys());
// query.valueQuery(removeDupFilter(iquery.getValueFilter(), ibf));
// query.keyQuery(removeDupFilter(iquery.getKeyFilter(), ibf));
// if (keyFilterOrValueFilter)
// query.keyQuery(FilterUtils.and(query.getKeyFilter(), ibf));
// else query.valueQuery(FilterUtils.and(query.getResultFilter(),
// ibf));
// 直接构造为where条件,优化器进行重新选择
IFilter whereFilter = FilterUtils.and(iquery.getKeyFilter(), iquery.getValueFilter());
query.query(FilterUtils.and(removeDupFilter(whereFilter, ibf), ibf));
query.alias(iquery.getAlias());
query.build();
// IDataNodeExecutor idne = dnc.shard(currentExecotor,
// Collections.EMPTY_MAP, null);
IDataNodeExecutor idne = null;
// 优化做法,将数据分配掉。
Integer currentThread = currentExecotor.getThread();
executionContext.getExtraCmds().put("initThread", currentThread);
// TODO 以后要考虑做cache
idne = optimizerContext.getOptimizer().optimizeAndAssignment(query,
executionContext.getParams(),
executionContext.getExtraCmds());
ISchematicCursor cursor = null;
Map<CloneableRecord, DuplicateKVPair> duplicateKeyMap = null;
try {
ExecutionContext tempContext = new ExecutionContext();
tempContext.setCurrentRepository(executionContext.getCurrentRepository());
tempContext.setExecutorService(executionContext.getExecutorService());
cursor = ExecutorContext.getContext().getTopologyExecutor().execByExecPlanNode(idne, tempContext);
// 用于关闭,统一管理
this.returnColumns = cursor.getReturnColumns();
List<IColumn> cols = new ArrayList<IColumn>();
if (ibf.getColumn() instanceof IColumn) {
cols.add((IColumn) ibf.getColumn());
} else {
cols.addAll(((IFunction) ibf.getColumn()).getArgs());
}
duplicateKeyMap = buildDuplicateKVPairMap(cols, cursor);
} finally {
if (cursor != null) {
List<TddlException> exs = new ArrayList();
exs = cursor.close(exs);
if (!exs.isEmpty()) {
throw GeneralUtil.mergeException(exs);
}
}
}
return duplicateKeyMap;
}
}
private IFunction buildRowFunction(Collection values, boolean isColumn) {
IFunction func = ASTNodeFactory.getInstance().createFunction();
func.setFunctionName("ROW");
StringBuilder columnName = new StringBuilder();
columnName.append('(').append(StringUtils.join(values, ',')).append(')');
func.setColumnName(columnName.toString());
if (isColumn) {
List<IColumn> columns = new ArrayList<IColumn>(values.size());
for (Object value : values) {
IColumn col = ASTNodeFactory.getInstance()
.createColumn()
.setColumnName((String) value)
.setDataType(DATA_TYPE.LONG_VAL);
columns.add(col);
}
func.setArgs(columns);
} else {
func.setArgs(new ArrayList(values));
}
return func;
}
/**
* 合并两个条件去除重复的key条件,比如构造了id in (xxx)的请求后,原先条件中有可能也存在id的条件,这时需要替换原先的id条件
*
* @param srcFilter
* @param mergeFilter
*/
@SuppressWarnings("unused")
private IFilter removeDupFilter(IFilter srcFilter, IBooleanFilter inFilter) {
List<List<IFilter>> filters = FilterUtils.toDNFNodesArray(srcFilter);
List<List<IFilter>> newFilters = new ArrayList<List<IFilter>>();
for (List<IFilter> sf : filters) {
List<IFilter> newSf = new ArrayList<IFilter>();
for (IFilter f : sf) {
if (!((IBooleanFilter) f).getColumn().equals(inFilter.getColumn())) {
newSf.add(f);
}
}
newFilters.add(newSf);
}
return FilterUtils.DNFToOrLogicTree(newFilters);
}
/**
* 根据返回结果,创建重复值的kvpairMap
*
* @param keys
* @param cursor
* @param duplicateKeyMap
* @return
* @throws TddlException
*/
private Map<CloneableRecord, DuplicateKVPair> buildDuplicateKVPairMap(List<IColumn> cols, ISchematicCursor cursor)
throws TddlException {
Map<CloneableRecord, DuplicateKVPair> duplicateKeyMap = new HashMap<CloneableRecord, DuplicateKVPair>();
IRowSet kvPair;
int count = 0;
List<IColumn> icols = new ArrayList<IColumn>();
List<ColumnMeta> colMetas = new ArrayList<ColumnMeta>();
for (IColumn c : cols) {
ISelectable icol = c.copy();
icol.setTableName(null);
colMetas.add(ExecUtils.getColumnMeta(icol));
icols.add((IColumn) icol);
}
RecordCodec codec = CodecFactory.getInstance(CodecFactory.FIXED_LENGTH).getCodec(colMetas);
while ((kvPair = cursor.next()) != null) {
kvPair = ExecUtils.fromIRowSetToArrayRowSet(kvPair);
CloneableRecord key = codec.newEmptyRecord();
for (IColumn icol : icols) {
Object v = ExecUtils.getValueByIColumn(kvPair, icol);
key.put(icol.getColumnName(), v);
}
DuplicateKVPair tempKVPair = duplicateKeyMap.get(key);
if (tempKVPair == null) {// 加新列
tempKVPair = new DuplicateKVPair(kvPair);
duplicateKeyMap.put(key, tempKVPair);
} else {// 加重复列
while (tempKVPair.next != null) {
tempKVPair = tempKVPair.next;
}
tempKVPair.next = new DuplicateKVPair(kvPair);
}
count++;
if (count >= sizeLimination) {// 保护。。。别太多了
throw new IllegalArgumentException("size is more than limination " + sizeLimination);
}
// returnList.add(kvPair);
}
return duplicateKeyMap;
}
@Override
public void beforeFirst() throws TddlException {
init();
for (int i = 0; i < cursors.size(); i++) {
cursors.get(i).beforeFirst();
}
}
@Override
public String toString() {
return toStringWithInden(0);
}
@Override
public String toStringWithInden(int inden) {
String tabTittle = GeneralUtil.getTab(inden);
String tabContent = GeneralUtil.getTab(inden + 1);
StringBuilder sb = new StringBuilder();
GeneralUtil.printlnToStringBuilder(sb, tabTittle + "MergeCursor ");
GeneralUtil.printAFieldToStringBuilder(sb, "orderBy", this.orderBys, tabContent);
for (ISchematicCursor cursor : cursors) {
sb.append(cursor.toStringWithInden(inden + 1));
}
return sb.toString();
}
@Override
public List<ColumnMeta> getReturnColumns() throws TddlException {
if (this.returnColumns != null) {
return this.returnColumns;
}
if (this.cursors != null && !cursors.isEmpty()) {
this.returnColumns = cursors.get(0).getReturnColumns();
}
return this.returnColumns;
}
}
| apache-2.0 |
WellingtonCosta/convalida | convalida-compiler/src/main/java/convalida/compiler/Preconditions.java | 8417 | package convalida.compiler;
import javax.lang.model.element.*;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static convalida.compiler.Constants.EDIT_TEXT;
import static convalida.compiler.Messager.error;
import static javax.lang.model.element.ElementKind.CLASS;
import static javax.lang.model.element.ElementKind.FIELD;
import static javax.lang.model.element.Modifier.PRIVATE;
import static javax.lang.model.element.Modifier.STATIC;
/**
* @author wellingtoncosta on 02/04/18
*/
class Preconditions {
// Can not be instantiated
private Preconditions() { }
static boolean methodHasParams(
ExecutableElement method,
Class<? extends Annotation> annotationClass
) {
boolean hasParams = false;
if (method.getParameters().size() > 0) {
hasParams = true;
error(method, "Method annotated with @%s can not have parameters.",annotationClass.getSimpleName());
}
return hasParams;
}
static boolean methodHasNoOneParameterOfType(
ExecutableElement method,
Class<? extends Annotation> annotationClass,
String type
) {
boolean hasNoParams = method.getParameters().isEmpty();
if(hasNoParams) {
return false;
}
boolean hasMoreThanOneParam = method.getParameters().size() > 1;
boolean firstParamIsNotTypeOf = !isTypeOf(method.getParameters().get(0), type);
if(hasMoreThanOneParam || firstParamIsNotTypeOf) {
error(
method,
"Method annotated with @%s must have one parameter of type %s.",
annotationClass.getSimpleName(),
type
);
return true;
}
return false;
}
private static boolean isTypeOf(VariableElement variable, String type) {
return variable.asType().toString().equals(type);
}
static boolean hasMoreThanOneMethodsAnnotatedWith(
Element parent,
Class<? extends Annotation> annotationClass) {
boolean hasMoreThanOneElement = false;
List<Element> elements = new ArrayList<>();
for(Element e : parent.getEnclosedElements()) {
if(e.getAnnotation(annotationClass) != null) {
elements.add(e);
}
}
if (elements.size() > 1) {
hasMoreThanOneElement = true;
error(
parent,
"The class %s must have only one element annotated with @%s.",
parent.getSimpleName(),
annotationClass.getSimpleName()
);
}
return hasMoreThanOneElement;
}
static boolean hasNoMethodAnnotatedWith(
Element parent,
Class<? extends Annotation> annotationClass) {
boolean hasNoElements = false;
List<Element> elements = new ArrayList<>();
for(Element e : parent.getEnclosedElements()) {
if(e.getAnnotation(annotationClass) != null) {
elements.add(e);
}
}
if (elements.size() == 0) {
hasNoElements = true;
error(
parent,
"The class %s must have one method annotated with @%s.",
parent.getSimpleName(),
annotationClass.getSimpleName()
);
}
return hasNoElements;
}
static boolean confirmValidationElementsHasError(
Class<? extends Annotation> primaryAnnotation,
Class<? extends Annotation> confirmAnnotation,
Element element) {
boolean hasError = false;
String primaryAnnotationClassName = primaryAnnotation.getSimpleName();
String confirmAnnotationClassName = confirmAnnotation.getSimpleName();
int elementsAnnotatedWithPrimaryValidation = 0;
int elementsAnnotatedWithConfirmValidation = 0;
List<? extends Element> elementsOfParent = element.getEnclosingElement().getEnclosedElements();
for(int i = 0; i < elementsOfParent.size(); i++) {
if(elementsOfParent.get(i).getAnnotation(primaryAnnotation) != null) {
elementsAnnotatedWithPrimaryValidation ++;
}
if(elementsOfParent.get(i).getAnnotation(confirmAnnotation) != null) {
elementsAnnotatedWithConfirmValidation ++;
}
}
if (elementsAnnotatedWithPrimaryValidation == 0 && elementsAnnotatedWithConfirmValidation > 0) {
hasError = true;
TypeElement enclosingElement = (TypeElement) element.getEnclosingElement();
error(
element.getEnclosingElement(),
"%s must have at least one element annotated with @%s.",
enclosingElement.getSimpleName(),
primaryAnnotationClassName
);
}
if (elementsAnnotatedWithConfirmValidation > 1) {
hasError = true;
TypeElement enclosingElement = (TypeElement) element.getEnclosingElement();
error(
element.getEnclosingElement(),
"%s must have only one element annotated with @%s.",
enclosingElement.getSimpleName(),
confirmAnnotationClassName
);
}
return hasError;
}
static boolean isInvalid(Class<? extends Annotation> annotationClass, Element element) {
TypeElement enclosingElement = (TypeElement) element.getEnclosingElement();
String elementType = element.asType().toString();
boolean hasError = false;
// Verify element kind
if (!element.getKind().equals(FIELD)) {
error(
element,
"@%s must only be applied in fields. (%s.%s)",
annotationClass.getSimpleName(),
enclosingElement.getQualifiedName(),
element.getSimpleName()
);
hasError = true;
}
// Verify element type
// TODO improve this check to allow use subtypes of EditText
if (!EDIT_TEXT.toString().equals(elementType)) {
error(
element,
"@%s must only be applied in fields of the type TextInputLaytout or EditText. (%s.%s)",
annotationClass.getSimpleName(),
enclosingElement.getQualifiedName(),
element.getSimpleName()
);
hasError = true;
}
return hasError;
}
static boolean isInaccessible(Class<? extends Annotation> annotationClass, Element element) {
TypeElement enclosingElement = (TypeElement) element.getEnclosingElement();
boolean hasError = false;
// Verify element modifiers
Set<Modifier> modifiers = element.getModifiers();
if (modifiers.contains(PRIVATE) || modifiers.contains(STATIC)) {
error(
element,
"@%s must not be applied in private or static fields. (%s.%s)",
annotationClass.getSimpleName(),
enclosingElement.getQualifiedName(),
element.getSimpleName()
);
hasError = true;
}
// Verify containing type
if (enclosingElement.getKind() != CLASS) {
error(
enclosingElement,
"@%s fields may only be contained in classes. (%s.%s)",
annotationClass.getSimpleName(),
enclosingElement.getQualifiedName(),
element.getSimpleName()
);
hasError = true;
}
// Verify containing class visibility is not private
if (enclosingElement.getModifiers().contains(PRIVATE)) {
error(
enclosingElement,
"@%s fields may not be contained in private classes. (%s.%s)",
annotationClass.getSimpleName(),
enclosingElement.getQualifiedName(),
element.getSimpleName()
);
hasError = true;
}
return hasError;
}
}
| apache-2.0 |
peteriliev/kata | InsertionSort/src/main/java/InsertionSort32.java | 390 | package main.java;
public class InsertionSort32 {
public static <T extends Comparable<T>> void sort(final T[] a) {
final int last = a.length - 1;
for (int i = last - 1; i >= 0; i--) {
final T insertMe = a[i];
int index = i + 1;
while (index <= last && a[index].compareTo(insertMe) < 0) {
a[index - 1] = a[index];
index++;
}
a[index - 1] = insertMe;
}
}
}
| apache-2.0 |
mrprona92/SecretBrand | app/src/main/java/com/mrprona/dota2assitant/hero/task/SkillsLoadRequest.java | 1005 | package com.mrprona.dota2assitant.hero.task;
import android.content.Context;
import com.mrprona.dota2assitant.R;
import com.mrprona.dota2assitant.base.service.TaskRequest;
import com.mrprona.dota2assitant.base.util.FileUtils;
import com.mrprona.dota2assitant.hero.api.Skill;
import com.google.gson.Gson;
/**
* Created by ABadretdinov
* 20.08.2015
* 16:38
*/
public class SkillsLoadRequest extends TaskRequest<Skill.List> {
private Context mContext;
private String mHeroDotaId;
public SkillsLoadRequest(Context context, String heroDotaId) {
super(Skill.List.class);
this.mContext = context;
this.mHeroDotaId = heroDotaId;
}
@Override
public Skill.List loadData() throws Exception {
String locale = mContext.getString(R.string.language);
String json = FileUtils
.getTextFromAsset(mContext, "heroes/" + mHeroDotaId + "/skills_" + locale + ".json");
return new Gson().fromJson(json, Skill.List.class);
}
}
| apache-2.0 |
aykuttasil/AzureMobileService | app/src/androidTest/java/com/a/aykut/tryazuremobileservices/ApplicationTest.java | 357 | package com.a.aykut.tryazuremobileservices;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | apache-2.0 |
mufaddalq/cloudstack-datera-driver | api/src/org/apache/cloudstack/api/response/ImageStoreResponse.java | 3971 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.response;
import java.util.LinkedHashSet;
import java.util.Set;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.BaseResponse;
import org.apache.cloudstack.api.EntityReference;
import com.cloud.serializer.Param;
import com.cloud.storage.ImageStore;
import com.cloud.storage.ScopeType;
import com.google.gson.annotations.SerializedName;
@EntityReference(value=ImageStore.class)
public class ImageStoreResponse extends BaseResponse {
@SerializedName("id") @Param(description="the ID of the image store")
private String id;
@SerializedName("zoneid") @Param(description="the Zone ID of the image store")
private String zoneId;
@SerializedName(ApiConstants.ZONE_NAME) @Param(description="the Zone name of the image store")
private String zoneName;
@SerializedName("name") @Param(description="the name of the image store")
private String name;
@SerializedName("url") @Param(description="the url of the image store")
private String url;
@SerializedName("protocol") @Param(description="the protocol of the image store")
private String protocol;
@SerializedName("providername") @Param(description="the provider name of the image store")
private String providerName;
@SerializedName("scope") @Param(description="the scope of the image store")
private ScopeType scope;
@SerializedName("details") @Param(description="the details of the image store")
private Set<ImageStoreDetailResponse> details;
public ImageStoreResponse(){
this.details = new LinkedHashSet<ImageStoreDetailResponse>();
}
@Override
public String getObjectId() {
return this.getId();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getZoneId() {
return zoneId;
}
public void setZoneId(String zoneId) {
this.zoneId = zoneId;
}
public String getZoneName() {
return zoneName;
}
public void setZoneName(String zoneName) {
this.zoneName = zoneName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getProviderName() {
return providerName;
}
public void setProviderName(String providerName) {
this.providerName = providerName;
}
public ScopeType getScope() {
return scope;
}
public void setScope(ScopeType type) {
this.scope = type;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public Set<ImageStoreDetailResponse> getDetails() {
return details;
}
public void setDetails(Set<ImageStoreDetailResponse> details) {
this.details = details;
}
public void addDetail(ImageStoreDetailResponse detail){
this.details.add(detail);
}
}
| apache-2.0 |
axmadjon/AndroidSlidingMenu | app/src/main/java/uz/greenwhite/slidingmenu/support/v10/volley/lib/request/toolbox/RequestFuture.java | 4235 | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uz.greenwhite.slidingmenu.support.v10.volley.lib.request.toolbox;
import uz.greenwhite.slidingmenu.support.v10.volley.lib.request.Request;
import uz.greenwhite.slidingmenu.support.v10.volley.lib.request.Response;
import uz.greenwhite.slidingmenu.support.v10.volley.lib.request.VolleyError;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* A Future that represents a Volley request.
*
* Used by providing as your response and error listeners. For example:
* <pre>
* RequestFuture<JSONObject> future = RequestFuture.newFuture();
* MyRequest request = new MyRequest(URL, future, future);
*
* // If you want to be able to cancel the request:
* future.setRequest(requestQueue.add(request));
*
* // Otherwise:
* requestQueue.add(request);
*
* try {
* JSONObject response = future.get();
* // do something with response
* } catch (InterruptedException e) {
* // handle the error
* } catch (ExecutionException e) {
* // handle the error
* }
* </pre>
*
* @param <T> The type of parsed response this future expects.
*/
public class RequestFuture<T> implements Future<T>, Response.Listener<T>,
Response.ErrorListener {
private Request<?> mRequest;
private boolean mResultReceived = false;
private T mResult;
private VolleyError mException;
public static <E> RequestFuture<E> newFuture() {
return new RequestFuture<E>();
}
private RequestFuture() {}
public void setRequest(Request<?> request) {
mRequest = request;
}
@Override
public synchronized boolean cancel(boolean mayInterruptIfRunning) {
if (mRequest == null) {
return false;
}
if (!isDone()) {
mRequest.cancel();
return true;
} else {
return false;
}
}
@Override
public T get() throws InterruptedException, ExecutionException {
try {
return doGet(null);
} catch (TimeoutException e) {
throw new AssertionError(e);
}
}
@Override
public T get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return doGet(TimeUnit.MILLISECONDS.convert(timeout, unit));
}
private synchronized T doGet(Long timeoutMs)
throws InterruptedException, ExecutionException, TimeoutException {
if (mException != null) {
throw new ExecutionException(mException);
}
if (mResultReceived) {
return mResult;
}
if (timeoutMs == null) {
wait(0);
} else if (timeoutMs > 0) {
wait(timeoutMs);
}
if (mException != null) {
throw new ExecutionException(mException);
}
if (!mResultReceived) {
throw new TimeoutException();
}
return mResult;
}
@Override
public boolean isCancelled() {
if (mRequest == null) {
return false;
}
return mRequest.isCanceled();
}
@Override
public synchronized boolean isDone() {
return mResultReceived || mException != null || isCancelled();
}
@Override
public synchronized void onResponse(T response) {
mResultReceived = true;
mResult = response;
notifyAll();
}
@Override
public synchronized void onErrorResponse(VolleyError error) {
mException = error;
notifyAll();
}
}
| apache-2.0 |
antkar/syn | syn/src/main/java/org/antkar/syn/internal/ebnf/EbnfOptionalElement.java | 2380 | /*
* Copyright 2013 Anton Karmanov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.antkar.syn.internal.ebnf;
import java.util.List;
import org.antkar.syn.SynException;
import org.antkar.syn.TextPos;
import org.antkar.syn.internal.bnf.BnfElement;
import org.antkar.syn.internal.bnf.BnfNonterminal;
import org.antkar.syn.internal.bnf.BnfProduction;
import org.antkar.syn.internal.grammar.EbnfToBnfConverter;
import org.antkar.syn.internal.parser.IParserAction;
import org.antkar.syn.internal.parser.ParserNullAction;
import org.antkar.syn.internal.parser.ParserObjectAction;
/**
* EBNF optional element.
*/
public final class EbnfOptionalElement extends EbnfEmbeddedElement {
public EbnfOptionalElement(String key, TextPos keyPos, EbnfProductions body) {
super(key, keyPos, body);
}
@Override
public BnfElement convert(EbnfToBnfConverter converter, String currentNt) throws SynException {
//Convert productions.
boolean hasEmbeddedObject = hasEmbeddedObject();
List<BnfProduction> bProductions =
converter.convertProductions(currentNt, getBody().asList(), hasEmbeddedObject);
//Add empty production.
IParserAction nullAction = hasEmbeddedObject ? ParserObjectAction.NULL : ParserNullAction.INSTANCE;
BnfProduction bEmptyProduction = converter.createProduction(nullAction);
bProductions.add(bEmptyProduction);
//Create an new anonymous nonterminal.
BnfNonterminal bNonterminal = converter.createAnonymousNonterminal(currentNt, bProductions);
return bNonterminal;
}
@Override
public <T> T invokeProcessor(EbnfElementProcessor<T> processor) throws SynException {
return processor.processOptionalElement(this);
}
@Override
public String toString() {
return "(" + getBody() + ")?";
}
}
| apache-2.0 |
vergilchiu/hive | ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java | 8930 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.tez;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.ObjectCacheFactory;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.OperatorUtils;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
import org.apache.hadoop.hive.ql.io.merge.MergeFileWork;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.StringUtils;
import org.apache.tez.mapreduce.input.MRInputLegacy;
import org.apache.tez.mapreduce.processor.MRTaskReporter;
import org.apache.tez.runtime.api.Input;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.api.LogicalOutput;
import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.library.api.KeyValueReader;
import com.google.common.collect.Lists;
/**
* Record processor for fast merging of files.
*/
public class MergeFileRecordProcessor extends RecordProcessor {
public static final Logger LOG = LoggerFactory
.getLogger(MergeFileRecordProcessor.class);
protected Operator<? extends OperatorDesc> mergeOp;
private ExecMapperContext execContext = null;
protected static final String MAP_PLAN_KEY = "__MAP_PLAN__";
private String cacheKey;
private MergeFileWork mfWork;
MRInputLegacy mrInput = null;
private final Object[] row = new Object[2];
org.apache.hadoop.hive.ql.exec.ObjectCache cache;
public MergeFileRecordProcessor(final JobConf jconf, final ProcessorContext context) {
super(jconf, context);
}
@Override
void init(
MRTaskReporter mrReporter, Map<String, LogicalInput> inputs,
Map<String, LogicalOutput> outputs) throws Exception {
// TODO HIVE-14042. Abort handling.
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.TEZ_INIT_OPERATORS);
super.init(mrReporter, inputs, outputs);
execContext = new ExecMapperContext(jconf);
//Update JobConf using MRInput, info like filename comes via this
mrInput = getMRInput(inputs);
Configuration updatedConf = mrInput.getConfigUpdates();
if (updatedConf != null) {
for (Map.Entry<String, String> entry : updatedConf) {
jconf.set(entry.getKey(), entry.getValue());
}
}
createOutputMap();
// Start all the Outputs.
for (Map.Entry<String, LogicalOutput> outputEntry : outputs.entrySet()) {
outputEntry.getValue().start();
((TezProcessor.TezKVOutputCollector) outMap.get(outputEntry.getKey()))
.initialize();
}
String queryId = HiveConf.getVar(jconf, HiveConf.ConfVars.HIVEQUERYID);
cache = ObjectCacheFactory.getCache(jconf, queryId, true);
try {
execContext.setJc(jconf);
cacheKey = MAP_PLAN_KEY;
MapWork mapWork = (MapWork) cache.retrieve(cacheKey, new Callable<Object>() {
@Override
public Object call() {
return Utilities.getMapWork(jconf);
}
});
Utilities.setMapWork(jconf, mapWork);
if (mapWork instanceof MergeFileWork) {
mfWork = (MergeFileWork) mapWork;
} else {
throw new RuntimeException("MapWork should be an instance of MergeFileWork.");
}
String alias = mfWork.getAliasToWork().keySet().iterator().next();
mergeOp = mfWork.getAliasToWork().get(alias);
LOG.info(mergeOp.dump(0));
MapredContext.init(true, new JobConf(jconf));
((TezContext) MapredContext.get()).setInputs(inputs);
mergeOp.passExecContext(execContext);
mergeOp.initializeLocalWork(jconf);
mergeOp.initialize(jconf, null);
OperatorUtils.setChildrenCollector(mergeOp.getChildOperators(), outMap);
mergeOp.setReporter(reporter);
MapredContext.get().setReporter(reporter);
} catch (Throwable e) {
if (e instanceof OutOfMemoryError) {
// will this be true here?
// Don't create a new object if we are already out of memory
throw (OutOfMemoryError) e;
} else if (e instanceof InterruptedException) {
l4j.info("Hit an interrupt while initializing MergeFileRecordProcessor. Message={}",
e.getMessage());
throw (InterruptedException) e;
} else {
throw new RuntimeException("Map operator initialization failed", e);
}
}
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.TEZ_INIT_OPERATORS);
}
@Override
void run() throws Exception {
KeyValueReader reader = mrInput.getReader();
//process records until done
while (reader.next()) {
boolean needMore = processRow(reader.getCurrentKey(),
reader.getCurrentValue());
if (!needMore || isAborted()) {
break;
}
}
}
@Override
void close() {
if (cache != null && cacheKey != null) {
cache.release(cacheKey);
}
// check if there are IOExceptions
if (!isAborted()) {
setAborted(execContext.getIoCxt().getIOExceptions());
}
// detecting failed executions by exceptions thrown by the operator tree
try {
if (mergeOp == null || mfWork == null) {
return;
}
boolean abort = isAborted();
mergeOp.close(abort);
ExecMapper.ReportStats rps = new ExecMapper.ReportStats(reporter, jconf);
mergeOp.preorderMap(rps);
} catch (Exception e) {
if (!isAborted()) {
// signal new failure to map-reduce
l4j.error("Hit error while closing operators - failing tree");
throw new RuntimeException("Hive Runtime Error while closing operators",
e);
}
} finally {
Utilities.clearWorkMap(jconf);
MapredContext.close();
}
}
/**
* @param key key to process
* @param value value to process
* @return true if it is not done and can take more inputs
*/
private boolean processRow(Object key, Object value) {
// reset the execContext for each new row
execContext.resetRow();
try {
if (mergeOp.getDone()) {
return false; //done
} else {
row[0] = key;
row[1] = value;
mergeOp.process(row, 0);
}
} catch (Throwable e) {
setAborted(true);
if (e instanceof OutOfMemoryError) {
// Don't create a new object if we are already out of memory
throw (OutOfMemoryError) e;
} else {
l4j.error(StringUtils.stringifyException(e));
throw new RuntimeException(e);
}
}
return true; //give me more
}
private MRInputLegacy getMRInput(Map<String, LogicalInput> inputs) throws Exception {
LOG.info("The inputs are: " + inputs);
// start the mr input and wait for ready event. number of MRInput is expected to be 1
List<Input> li = Lists.newArrayList();
int numMRInputs = 0;
for (LogicalInput inp : inputs.values()) {
if (inp instanceof MRInputLegacy) {
numMRInputs++;
if (numMRInputs > 1) {
throw new IllegalArgumentException("Only one MRInput is expected");
}
inp.start();
li.add(inp);
} else {
throw new IllegalArgumentException("Expecting only one input of type MRInputLegacy." +
" Found type: " + inp.getClass().getCanonicalName());
}
}
// typically alter table .. concatenate is run on only one partition/one table,
// so it doesn't matter if we wait for all inputs or any input to be ready.
processorContext.waitForAnyInputReady(li);
final MRInputLegacy theMRInput;
if (li.size() == 1) {
theMRInput = (MRInputLegacy) li.get(0);
theMRInput.init();
} else {
throw new IllegalArgumentException("MRInputs count is expected to be 1");
}
return theMRInput;
}
}
| apache-2.0 |
nla/amberdb | src/main/java/amberdb/graph/AmberPreCommitHook.java | 3058 | package amberdb.graph;
import amberdb.AmberSession;
import amberdb.model.*;
import com.google.common.collect.ImmutableList;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.frames.FramedGraph;
import com.tinkerpop.frames.modules.typedgraph.TypeValue;
import java.util.ArrayList;
import java.util.List;
public abstract class AmberPreCommitHook<T> {
public static List<String> WORK_TYPES = ImmutableList.<String>builder().add(
Work.class.getAnnotation(TypeValue.class).value(),
EADWork.class.getAnnotation(TypeValue.class).value(),
Section.class.getAnnotation(TypeValue.class).value()
).build();
public static List<String> COPY_TYPES = ImmutableList.<String>builder().add(
Copy.class.getAnnotation(TypeValue.class).value()
).build();
public static List<String> SOUND_FILE_TYPES = ImmutableList.<String>builder().add(
SoundFile.class.getAnnotation(TypeValue.class).value()
).build();
public static List<String> MOVINGIMAGE_FILE_TYPES = ImmutableList.<String>builder().add(
MovingImageFile.class.getAnnotation(TypeValue.class).value()
).build();
public abstract boolean shouldHook(List<T> added, List<T> modified, List<T> deleted);
public abstract void runHook(List<T> added, List<T> modified, List<T> deleted, AmberSession amberSession);
public abstract List<String> getValidNodeTypes();
public abstract Class getExpectedNodeType();
public void hook(List<Vertex> addedVertices,
List<Vertex> modifiedVertices,
List<Vertex> deletedVertices,
List addedEdges,
List modifiedEdges,
List deletedEdges,
AmberSession amberSession) {
List<T> addedNodes = retrieveVerticesOfCorrectTypeForThisHook(addedVertices, amberSession.getGraph());
List<T> modifiedNodes = retrieveVerticesOfCorrectTypeForThisHook(modifiedVertices, amberSession.getGraph());
List<T> removedNodes = retrieveVerticesOfCorrectTypeForThisHook(deletedVertices, amberSession.getGraph());
if (getExpectedNodeType() == Edge.class) {
if (shouldHook(addedEdges, modifiedEdges, deletedEdges)) {
runHook(addedEdges, modifiedEdges, deletedEdges, amberSession);
}
} else {
if (shouldHook(addedNodes, modifiedNodes, removedNodes)) {
runHook(addedNodes, modifiedNodes, removedNodes, amberSession);
}
}
}
public List<T> retrieveVerticesOfCorrectTypeForThisHook(List<Vertex> vertices, FramedGraph graph) {
List nodesOfCorrectType = new ArrayList();
for (Vertex element: vertices) {
if (element.getProperty("type") != null && getValidNodeTypes().contains(element.getProperty("type"))) {
nodesOfCorrectType.add(graph.frame(element, getExpectedNodeType()));
}
}
return nodesOfCorrectType;
}
}
| apache-2.0 |
oehf/ipf | commons/ihe/xds/src/main/java/org/openehealth/ipf/commons/ihe/xds/core/requests/query/FindPrescriptionsForDispenseQuery.java | 1593 | /*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openehealth.ipf.commons.ihe.xds.core.requests.query;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import javax.xml.bind.annotation.*;
/**
* Represents a stored query for FindPrescriptionsForDispenseQuery (PHARM-1).
* @author Quentin Ligier
* @since 3.7
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "FindPrescriptionsForDispenseQuery", propOrder = {})
@XmlRootElement(name = "findPrescriptionsForDispenseQuery")
@EqualsAndHashCode(callSuper = true, doNotUseGetters = true)
@ToString(callSuper = true, doNotUseGetters = true)
public class FindPrescriptionsForDispenseQuery extends PharmacyStableDocumentsQuery {
private static final long serialVersionUID = -2056362879334066497L;
/**
* Constructs the query.
*/
public FindPrescriptionsForDispenseQuery() {
super(QueryType.FIND_PRESCRIPTIONS_FOR_DISPENSE);
}
@Override
public void accept(Visitor visitor) {
visitor.visit(this);
}
}
| apache-2.0 |
francisliu/hbase_namespace | hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeLoadBalancer.java | 7069 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.balancer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.master.RegionPlan;
/**
* An implementation of the {@link LoadBalancer} that assigns favored nodes for
* each region. There is a Primary RegionServer that hosts the region, and then
* there is Secondary and Tertiary RegionServers. Currently, the favored nodes
* information is used in creating HDFS files - the Primary RegionServer passes
* the primary, secondary, tertiary node addresses as hints to the DistributedFileSystem
* API for creating files on the filesystem. These nodes are treated as hints by
* the HDFS to place the blocks of the file. This alleviates the problem to do with
* reading from remote nodes (since we can make the Secondary RegionServer as the new
* Primary RegionServer) after a region is recovered. This should help provide consistent
* read latencies for the regions even when their primary region servers die.
*
*/
@InterfaceAudience.Private
public class FavoredNodeLoadBalancer extends BaseLoadBalancer {
private static final Log LOG = LogFactory.getLog(FavoredNodeLoadBalancer.class);
private FavoredNodes globalFavoredNodesAssignmentPlan;
private Configuration configuration;
@Override
public void setConf(Configuration conf) {
this.configuration = conf;
globalFavoredNodesAssignmentPlan = new FavoredNodes();
}
@Override
public List<RegionPlan> balanceCluster(Map<ServerName, List<HRegionInfo>> clusterState) {
//TODO. At a high level, this should look at the block locality per region, and
//then reassign regions based on which nodes have the most blocks of the region
//file(s). There could be different ways like minimize region movement, or, maximum
//locality, etc. The other dimension to look at is whether Stochastic loadbalancer
//can be integrated with this
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Map<ServerName, List<HRegionInfo>> roundRobinAssignment(List<HRegionInfo> regions,
List<ServerName> servers) {
Map<ServerName, List<HRegionInfo>> assignmentMap;
try {
FavoredNodeAssignmentHelper assignmentHelper =
new FavoredNodeAssignmentHelper(servers, configuration);
assignmentHelper.initialize();
if (!assignmentHelper.canPlaceFavoredNodes()) {
return super.roundRobinAssignment(regions, servers);
}
assignmentMap = new HashMap<ServerName, List<HRegionInfo>>();
roundRobinAssignmentImpl(assignmentHelper, assignmentMap, regions, servers);
} catch (Exception ex) {
LOG.warn("Encountered exception while doing favored-nodes assignment " + ex +
" Falling back to regular assignment");
assignmentMap = super.roundRobinAssignment(regions, servers);
}
return assignmentMap;
}
@Override
public ServerName randomAssignment(HRegionInfo regionInfo, List<ServerName> servers) {
try {
FavoredNodeAssignmentHelper assignmentHelper =
new FavoredNodeAssignmentHelper(servers, configuration);
assignmentHelper.initialize();
ServerName primary = super.randomAssignment(regionInfo, servers);
if (!assignmentHelper.canPlaceFavoredNodes()) {
return primary;
}
List<HRegionInfo> regions = new ArrayList<HRegionInfo>(1);
regions.add(regionInfo);
Map<HRegionInfo, ServerName> primaryRSMap = new HashMap<HRegionInfo, ServerName>(1);
primaryRSMap.put(regionInfo, primary);
assignSecondaryAndTertiaryNodesForRegion(assignmentHelper, regions, primaryRSMap);
return primary;
} catch (Exception ex) {
LOG.warn("Encountered exception while doing favored-nodes (random)assignment " + ex +
" Falling back to regular assignment");
return super.randomAssignment(regionInfo, servers);
}
}
public List<ServerName> getFavoredNodes(HRegionInfo regionInfo) {
return this.globalFavoredNodesAssignmentPlan.getFavoredNodes(regionInfo);
}
private void roundRobinAssignmentImpl(FavoredNodeAssignmentHelper assignmentHelper,
Map<ServerName, List<HRegionInfo>> assignmentMap,
List<HRegionInfo> regions, List<ServerName> servers) throws IOException {
Map<HRegionInfo, ServerName> primaryRSMap = new HashMap<HRegionInfo, ServerName>();
// figure the primary RSs
assignmentHelper.placePrimaryRSAsRoundRobin(assignmentMap, primaryRSMap, regions);
assignSecondaryAndTertiaryNodesForRegion(assignmentHelper, regions, primaryRSMap);
}
private void assignSecondaryAndTertiaryNodesForRegion(
FavoredNodeAssignmentHelper assignmentHelper,
List<HRegionInfo> regions, Map<HRegionInfo, ServerName> primaryRSMap) {
// figure the secondary and tertiary RSs
Map<HRegionInfo, ServerName[]> secondaryAndTertiaryRSMap =
assignmentHelper.placeSecondaryAndTertiaryRS(primaryRSMap);
// now record all the assignments so that we can serve queries later
for (HRegionInfo region : regions) {
List<ServerName> favoredNodesForRegion = new ArrayList<ServerName>(3);
favoredNodesForRegion.add(primaryRSMap.get(region));
ServerName[] secondaryAndTertiaryNodes = secondaryAndTertiaryRSMap.get(region);
if (secondaryAndTertiaryNodes != null) {
favoredNodesForRegion.add(secondaryAndTertiaryNodes[0]);
favoredNodesForRegion.add(secondaryAndTertiaryNodes[1]);
}
globalFavoredNodesAssignmentPlan.updateFavoredNodesMap(region, favoredNodesForRegion);
}
}
void noteFavoredNodes(final Map<HRegionInfo, ServerName[]> favoredNodesMap) {
for (Map.Entry<HRegionInfo, ServerName[]> entry : favoredNodesMap.entrySet()) {
globalFavoredNodesAssignmentPlan.updateFavoredNodesMap(entry.getKey(),
Arrays.asList(entry.getValue()));
}
}
} | apache-2.0 |
eFaps/eFapsApp-Sales | src/main/efaps/ESJP/org/efaps/esjp/sales/payment/PaymentSupplierOut_Base.java | 2755 | /*
* Copyright 2003 - 2012 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev$
* Last Changed: $Date$
* Last Changed By: $Author$
*/
package org.efaps.esjp.sales.payment;
import java.io.File;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Return;
import org.efaps.admin.event.Return.ReturnValues;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.db.Insert;
import org.efaps.esjp.ci.CISales;
import org.efaps.util.EFapsException;
/**
* TODO comment!
*
* @author The eFaps Team
* @version $Id: PaymentExchange_Base.java 8156 2012-11-05 15:32:12Z
* jan@moxter.net $
*/
@EFapsUUID("37a7800c-10c5-4f35-8f8f-ac7c8e3edcdb")
@EFapsApplication("eFapsApp-Sales")
public abstract class PaymentSupplierOut_Base
extends AbstractPaymentOut
{
/**
* @param _parameter Parameter as passed by the eFaps API
* @return new Return
* @throws EFapsException on error
*/
public Return create(final Parameter _parameter)
throws EFapsException
{
final CreatedDoc createdDoc = createDoc(_parameter);
createPayment(_parameter, createdDoc);
final Return ret = new Return();
final File file = createReport(_parameter, createdDoc);
if (file != null) {
ret.put(ReturnValues.VALUES, file);
ret.put(ReturnValues.TRUE, true);
}
return ret;
}
@Override
protected CreatedDoc createDoc(final Parameter _parameter)
throws EFapsException
{
final CreatedDoc ret = super.createDoc(_parameter);
// in case of bulkpayment connect the paymentdoc to the bulkpayment
if (_parameter.getInstance() != null
&& _parameter.getInstance().getType().isKindOf(CISales.BulkPayment.getType())) {
final Insert insert = new Insert(CISales.BulkPayment2PaymentDocument);
insert.add(CISales.BulkPayment2PaymentDocument.FromLink, _parameter.getInstance().getId());
insert.add(CISales.BulkPayment2PaymentDocument.ToLink, ret.getInstance().getId());
insert.execute();
}
return ret;
}
}
| apache-2.0 |
vschs007/buck | src-gen/com/facebook/buck/distributed/thrift/BuildSlaveStatus.java | 57652 | /**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.facebook.buck.distributed.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2017-03-29")
public class BuildSlaveStatus implements org.apache.thrift.TBase<BuildSlaveStatus, BuildSlaveStatus._Fields>, java.io.Serializable, Cloneable, Comparable<BuildSlaveStatus> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BuildSlaveStatus");
private static final org.apache.thrift.protocol.TField STAMPEDE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("stampedeId", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final org.apache.thrift.protocol.TField RUN_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("runId", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final org.apache.thrift.protocol.TField TOTAL_RULES_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("totalRulesCount", org.apache.thrift.protocol.TType.I32, (short)10);
private static final org.apache.thrift.protocol.TField RULES_STARTED_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("rulesStartedCount", org.apache.thrift.protocol.TType.I32, (short)11);
private static final org.apache.thrift.protocol.TField RULES_FINISHED_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("rulesFinishedCount", org.apache.thrift.protocol.TType.I32, (short)12);
private static final org.apache.thrift.protocol.TField RULES_SUCCESS_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("rulesSuccessCount", org.apache.thrift.protocol.TType.I32, (short)13);
private static final org.apache.thrift.protocol.TField RULES_FAILURE_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("rulesFailureCount", org.apache.thrift.protocol.TType.I32, (short)14);
private static final org.apache.thrift.protocol.TField CACHE_HITS_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("cacheHitsCount", org.apache.thrift.protocol.TType.I32, (short)20);
private static final org.apache.thrift.protocol.TField CACHE_MISSES_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("cacheMissesCount", org.apache.thrift.protocol.TType.I32, (short)21);
private static final org.apache.thrift.protocol.TField CACHE_IGNORES_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("cacheIgnoresCount", org.apache.thrift.protocol.TType.I32, (short)22);
private static final org.apache.thrift.protocol.TField CACHE_ERRORS_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("cacheErrorsCount", org.apache.thrift.protocol.TType.I32, (short)23);
private static final org.apache.thrift.protocol.TField CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("cacheLocalKeyUnchangedHitsCount", org.apache.thrift.protocol.TType.I32, (short)24);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new BuildSlaveStatusStandardSchemeFactory());
schemes.put(TupleScheme.class, new BuildSlaveStatusTupleSchemeFactory());
}
public com.facebook.buck.distributed.thrift.StampedeId stampedeId; // optional
public com.facebook.buck.distributed.thrift.RunId runId; // optional
public int totalRulesCount; // optional
public int rulesStartedCount; // optional
public int rulesFinishedCount; // optional
public int rulesSuccessCount; // optional
public int rulesFailureCount; // optional
public int cacheHitsCount; // optional
public int cacheMissesCount; // optional
public int cacheIgnoresCount; // optional
public int cacheErrorsCount; // optional
public int cacheLocalKeyUnchangedHitsCount; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
STAMPEDE_ID((short)1, "stampedeId"),
RUN_ID((short)2, "runId"),
TOTAL_RULES_COUNT((short)10, "totalRulesCount"),
RULES_STARTED_COUNT((short)11, "rulesStartedCount"),
RULES_FINISHED_COUNT((short)12, "rulesFinishedCount"),
RULES_SUCCESS_COUNT((short)13, "rulesSuccessCount"),
RULES_FAILURE_COUNT((short)14, "rulesFailureCount"),
CACHE_HITS_COUNT((short)20, "cacheHitsCount"),
CACHE_MISSES_COUNT((short)21, "cacheMissesCount"),
CACHE_IGNORES_COUNT((short)22, "cacheIgnoresCount"),
CACHE_ERRORS_COUNT((short)23, "cacheErrorsCount"),
CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT((short)24, "cacheLocalKeyUnchangedHitsCount");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // STAMPEDE_ID
return STAMPEDE_ID;
case 2: // RUN_ID
return RUN_ID;
case 10: // TOTAL_RULES_COUNT
return TOTAL_RULES_COUNT;
case 11: // RULES_STARTED_COUNT
return RULES_STARTED_COUNT;
case 12: // RULES_FINISHED_COUNT
return RULES_FINISHED_COUNT;
case 13: // RULES_SUCCESS_COUNT
return RULES_SUCCESS_COUNT;
case 14: // RULES_FAILURE_COUNT
return RULES_FAILURE_COUNT;
case 20: // CACHE_HITS_COUNT
return CACHE_HITS_COUNT;
case 21: // CACHE_MISSES_COUNT
return CACHE_MISSES_COUNT;
case 22: // CACHE_IGNORES_COUNT
return CACHE_IGNORES_COUNT;
case 23: // CACHE_ERRORS_COUNT
return CACHE_ERRORS_COUNT;
case 24: // CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT
return CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __TOTALRULESCOUNT_ISSET_ID = 0;
private static final int __RULESSTARTEDCOUNT_ISSET_ID = 1;
private static final int __RULESFINISHEDCOUNT_ISSET_ID = 2;
private static final int __RULESSUCCESSCOUNT_ISSET_ID = 3;
private static final int __RULESFAILURECOUNT_ISSET_ID = 4;
private static final int __CACHEHITSCOUNT_ISSET_ID = 5;
private static final int __CACHEMISSESCOUNT_ISSET_ID = 6;
private static final int __CACHEIGNORESCOUNT_ISSET_ID = 7;
private static final int __CACHEERRORSCOUNT_ISSET_ID = 8;
private static final int __CACHELOCALKEYUNCHANGEDHITSCOUNT_ISSET_ID = 9;
private short __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.STAMPEDE_ID,_Fields.RUN_ID,_Fields.TOTAL_RULES_COUNT,_Fields.RULES_STARTED_COUNT,_Fields.RULES_FINISHED_COUNT,_Fields.RULES_SUCCESS_COUNT,_Fields.RULES_FAILURE_COUNT,_Fields.CACHE_HITS_COUNT,_Fields.CACHE_MISSES_COUNT,_Fields.CACHE_IGNORES_COUNT,_Fields.CACHE_ERRORS_COUNT,_Fields.CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.STAMPEDE_ID, new org.apache.thrift.meta_data.FieldMetaData("stampedeId", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, com.facebook.buck.distributed.thrift.StampedeId.class)));
tmpMap.put(_Fields.RUN_ID, new org.apache.thrift.meta_data.FieldMetaData("runId", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, com.facebook.buck.distributed.thrift.RunId.class)));
tmpMap.put(_Fields.TOTAL_RULES_COUNT, new org.apache.thrift.meta_data.FieldMetaData("totalRulesCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.RULES_STARTED_COUNT, new org.apache.thrift.meta_data.FieldMetaData("rulesStartedCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.RULES_FINISHED_COUNT, new org.apache.thrift.meta_data.FieldMetaData("rulesFinishedCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.RULES_SUCCESS_COUNT, new org.apache.thrift.meta_data.FieldMetaData("rulesSuccessCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.RULES_FAILURE_COUNT, new org.apache.thrift.meta_data.FieldMetaData("rulesFailureCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.CACHE_HITS_COUNT, new org.apache.thrift.meta_data.FieldMetaData("cacheHitsCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.CACHE_MISSES_COUNT, new org.apache.thrift.meta_data.FieldMetaData("cacheMissesCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.CACHE_IGNORES_COUNT, new org.apache.thrift.meta_data.FieldMetaData("cacheIgnoresCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.CACHE_ERRORS_COUNT, new org.apache.thrift.meta_data.FieldMetaData("cacheErrorsCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT, new org.apache.thrift.meta_data.FieldMetaData("cacheLocalKeyUnchangedHitsCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(BuildSlaveStatus.class, metaDataMap);
}
public BuildSlaveStatus() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public BuildSlaveStatus(BuildSlaveStatus other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetStampedeId()) {
this.stampedeId = new com.facebook.buck.distributed.thrift.StampedeId(other.stampedeId);
}
if (other.isSetRunId()) {
this.runId = new com.facebook.buck.distributed.thrift.RunId(other.runId);
}
this.totalRulesCount = other.totalRulesCount;
this.rulesStartedCount = other.rulesStartedCount;
this.rulesFinishedCount = other.rulesFinishedCount;
this.rulesSuccessCount = other.rulesSuccessCount;
this.rulesFailureCount = other.rulesFailureCount;
this.cacheHitsCount = other.cacheHitsCount;
this.cacheMissesCount = other.cacheMissesCount;
this.cacheIgnoresCount = other.cacheIgnoresCount;
this.cacheErrorsCount = other.cacheErrorsCount;
this.cacheLocalKeyUnchangedHitsCount = other.cacheLocalKeyUnchangedHitsCount;
}
public BuildSlaveStatus deepCopy() {
return new BuildSlaveStatus(this);
}
@Override
public void clear() {
this.stampedeId = null;
this.runId = null;
setTotalRulesCountIsSet(false);
this.totalRulesCount = 0;
setRulesStartedCountIsSet(false);
this.rulesStartedCount = 0;
setRulesFinishedCountIsSet(false);
this.rulesFinishedCount = 0;
setRulesSuccessCountIsSet(false);
this.rulesSuccessCount = 0;
setRulesFailureCountIsSet(false);
this.rulesFailureCount = 0;
setCacheHitsCountIsSet(false);
this.cacheHitsCount = 0;
setCacheMissesCountIsSet(false);
this.cacheMissesCount = 0;
setCacheIgnoresCountIsSet(false);
this.cacheIgnoresCount = 0;
setCacheErrorsCountIsSet(false);
this.cacheErrorsCount = 0;
setCacheLocalKeyUnchangedHitsCountIsSet(false);
this.cacheLocalKeyUnchangedHitsCount = 0;
}
public com.facebook.buck.distributed.thrift.StampedeId getStampedeId() {
return this.stampedeId;
}
public BuildSlaveStatus setStampedeId(com.facebook.buck.distributed.thrift.StampedeId stampedeId) {
this.stampedeId = stampedeId;
return this;
}
public void unsetStampedeId() {
this.stampedeId = null;
}
/** Returns true if field stampedeId is set (has been assigned a value) and false otherwise */
public boolean isSetStampedeId() {
return this.stampedeId != null;
}
public void setStampedeIdIsSet(boolean value) {
if (!value) {
this.stampedeId = null;
}
}
public com.facebook.buck.distributed.thrift.RunId getRunId() {
return this.runId;
}
public BuildSlaveStatus setRunId(com.facebook.buck.distributed.thrift.RunId runId) {
this.runId = runId;
return this;
}
public void unsetRunId() {
this.runId = null;
}
/** Returns true if field runId is set (has been assigned a value) and false otherwise */
public boolean isSetRunId() {
return this.runId != null;
}
public void setRunIdIsSet(boolean value) {
if (!value) {
this.runId = null;
}
}
public int getTotalRulesCount() {
return this.totalRulesCount;
}
public BuildSlaveStatus setTotalRulesCount(int totalRulesCount) {
this.totalRulesCount = totalRulesCount;
setTotalRulesCountIsSet(true);
return this;
}
public void unsetTotalRulesCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __TOTALRULESCOUNT_ISSET_ID);
}
/** Returns true if field totalRulesCount is set (has been assigned a value) and false otherwise */
public boolean isSetTotalRulesCount() {
return EncodingUtils.testBit(__isset_bitfield, __TOTALRULESCOUNT_ISSET_ID);
}
public void setTotalRulesCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __TOTALRULESCOUNT_ISSET_ID, value);
}
public int getRulesStartedCount() {
return this.rulesStartedCount;
}
public BuildSlaveStatus setRulesStartedCount(int rulesStartedCount) {
this.rulesStartedCount = rulesStartedCount;
setRulesStartedCountIsSet(true);
return this;
}
public void unsetRulesStartedCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RULESSTARTEDCOUNT_ISSET_ID);
}
/** Returns true if field rulesStartedCount is set (has been assigned a value) and false otherwise */
public boolean isSetRulesStartedCount() {
return EncodingUtils.testBit(__isset_bitfield, __RULESSTARTEDCOUNT_ISSET_ID);
}
public void setRulesStartedCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RULESSTARTEDCOUNT_ISSET_ID, value);
}
public int getRulesFinishedCount() {
return this.rulesFinishedCount;
}
public BuildSlaveStatus setRulesFinishedCount(int rulesFinishedCount) {
this.rulesFinishedCount = rulesFinishedCount;
setRulesFinishedCountIsSet(true);
return this;
}
public void unsetRulesFinishedCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RULESFINISHEDCOUNT_ISSET_ID);
}
/** Returns true if field rulesFinishedCount is set (has been assigned a value) and false otherwise */
public boolean isSetRulesFinishedCount() {
return EncodingUtils.testBit(__isset_bitfield, __RULESFINISHEDCOUNT_ISSET_ID);
}
public void setRulesFinishedCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RULESFINISHEDCOUNT_ISSET_ID, value);
}
public int getRulesSuccessCount() {
return this.rulesSuccessCount;
}
public BuildSlaveStatus setRulesSuccessCount(int rulesSuccessCount) {
this.rulesSuccessCount = rulesSuccessCount;
setRulesSuccessCountIsSet(true);
return this;
}
public void unsetRulesSuccessCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RULESSUCCESSCOUNT_ISSET_ID);
}
/** Returns true if field rulesSuccessCount is set (has been assigned a value) and false otherwise */
public boolean isSetRulesSuccessCount() {
return EncodingUtils.testBit(__isset_bitfield, __RULESSUCCESSCOUNT_ISSET_ID);
}
public void setRulesSuccessCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RULESSUCCESSCOUNT_ISSET_ID, value);
}
public int getRulesFailureCount() {
return this.rulesFailureCount;
}
public BuildSlaveStatus setRulesFailureCount(int rulesFailureCount) {
this.rulesFailureCount = rulesFailureCount;
setRulesFailureCountIsSet(true);
return this;
}
public void unsetRulesFailureCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RULESFAILURECOUNT_ISSET_ID);
}
/** Returns true if field rulesFailureCount is set (has been assigned a value) and false otherwise */
public boolean isSetRulesFailureCount() {
return EncodingUtils.testBit(__isset_bitfield, __RULESFAILURECOUNT_ISSET_ID);
}
public void setRulesFailureCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RULESFAILURECOUNT_ISSET_ID, value);
}
public int getCacheHitsCount() {
return this.cacheHitsCount;
}
public BuildSlaveStatus setCacheHitsCount(int cacheHitsCount) {
this.cacheHitsCount = cacheHitsCount;
setCacheHitsCountIsSet(true);
return this;
}
public void unsetCacheHitsCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __CACHEHITSCOUNT_ISSET_ID);
}
/** Returns true if field cacheHitsCount is set (has been assigned a value) and false otherwise */
public boolean isSetCacheHitsCount() {
return EncodingUtils.testBit(__isset_bitfield, __CACHEHITSCOUNT_ISSET_ID);
}
public void setCacheHitsCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __CACHEHITSCOUNT_ISSET_ID, value);
}
public int getCacheMissesCount() {
return this.cacheMissesCount;
}
public BuildSlaveStatus setCacheMissesCount(int cacheMissesCount) {
this.cacheMissesCount = cacheMissesCount;
setCacheMissesCountIsSet(true);
return this;
}
public void unsetCacheMissesCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __CACHEMISSESCOUNT_ISSET_ID);
}
/** Returns true if field cacheMissesCount is set (has been assigned a value) and false otherwise */
public boolean isSetCacheMissesCount() {
return EncodingUtils.testBit(__isset_bitfield, __CACHEMISSESCOUNT_ISSET_ID);
}
public void setCacheMissesCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __CACHEMISSESCOUNT_ISSET_ID, value);
}
public int getCacheIgnoresCount() {
return this.cacheIgnoresCount;
}
public BuildSlaveStatus setCacheIgnoresCount(int cacheIgnoresCount) {
this.cacheIgnoresCount = cacheIgnoresCount;
setCacheIgnoresCountIsSet(true);
return this;
}
public void unsetCacheIgnoresCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __CACHEIGNORESCOUNT_ISSET_ID);
}
/** Returns true if field cacheIgnoresCount is set (has been assigned a value) and false otherwise */
public boolean isSetCacheIgnoresCount() {
return EncodingUtils.testBit(__isset_bitfield, __CACHEIGNORESCOUNT_ISSET_ID);
}
public void setCacheIgnoresCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __CACHEIGNORESCOUNT_ISSET_ID, value);
}
public int getCacheErrorsCount() {
return this.cacheErrorsCount;
}
public BuildSlaveStatus setCacheErrorsCount(int cacheErrorsCount) {
this.cacheErrorsCount = cacheErrorsCount;
setCacheErrorsCountIsSet(true);
return this;
}
public void unsetCacheErrorsCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __CACHEERRORSCOUNT_ISSET_ID);
}
/** Returns true if field cacheErrorsCount is set (has been assigned a value) and false otherwise */
public boolean isSetCacheErrorsCount() {
return EncodingUtils.testBit(__isset_bitfield, __CACHEERRORSCOUNT_ISSET_ID);
}
public void setCacheErrorsCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __CACHEERRORSCOUNT_ISSET_ID, value);
}
public int getCacheLocalKeyUnchangedHitsCount() {
return this.cacheLocalKeyUnchangedHitsCount;
}
public BuildSlaveStatus setCacheLocalKeyUnchangedHitsCount(int cacheLocalKeyUnchangedHitsCount) {
this.cacheLocalKeyUnchangedHitsCount = cacheLocalKeyUnchangedHitsCount;
setCacheLocalKeyUnchangedHitsCountIsSet(true);
return this;
}
public void unsetCacheLocalKeyUnchangedHitsCount() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __CACHELOCALKEYUNCHANGEDHITSCOUNT_ISSET_ID);
}
/** Returns true if field cacheLocalKeyUnchangedHitsCount is set (has been assigned a value) and false otherwise */
public boolean isSetCacheLocalKeyUnchangedHitsCount() {
return EncodingUtils.testBit(__isset_bitfield, __CACHELOCALKEYUNCHANGEDHITSCOUNT_ISSET_ID);
}
public void setCacheLocalKeyUnchangedHitsCountIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __CACHELOCALKEYUNCHANGEDHITSCOUNT_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case STAMPEDE_ID:
if (value == null) {
unsetStampedeId();
} else {
setStampedeId((com.facebook.buck.distributed.thrift.StampedeId)value);
}
break;
case RUN_ID:
if (value == null) {
unsetRunId();
} else {
setRunId((com.facebook.buck.distributed.thrift.RunId)value);
}
break;
case TOTAL_RULES_COUNT:
if (value == null) {
unsetTotalRulesCount();
} else {
setTotalRulesCount((Integer)value);
}
break;
case RULES_STARTED_COUNT:
if (value == null) {
unsetRulesStartedCount();
} else {
setRulesStartedCount((Integer)value);
}
break;
case RULES_FINISHED_COUNT:
if (value == null) {
unsetRulesFinishedCount();
} else {
setRulesFinishedCount((Integer)value);
}
break;
case RULES_SUCCESS_COUNT:
if (value == null) {
unsetRulesSuccessCount();
} else {
setRulesSuccessCount((Integer)value);
}
break;
case RULES_FAILURE_COUNT:
if (value == null) {
unsetRulesFailureCount();
} else {
setRulesFailureCount((Integer)value);
}
break;
case CACHE_HITS_COUNT:
if (value == null) {
unsetCacheHitsCount();
} else {
setCacheHitsCount((Integer)value);
}
break;
case CACHE_MISSES_COUNT:
if (value == null) {
unsetCacheMissesCount();
} else {
setCacheMissesCount((Integer)value);
}
break;
case CACHE_IGNORES_COUNT:
if (value == null) {
unsetCacheIgnoresCount();
} else {
setCacheIgnoresCount((Integer)value);
}
break;
case CACHE_ERRORS_COUNT:
if (value == null) {
unsetCacheErrorsCount();
} else {
setCacheErrorsCount((Integer)value);
}
break;
case CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT:
if (value == null) {
unsetCacheLocalKeyUnchangedHitsCount();
} else {
setCacheLocalKeyUnchangedHitsCount((Integer)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case STAMPEDE_ID:
return getStampedeId();
case RUN_ID:
return getRunId();
case TOTAL_RULES_COUNT:
return getTotalRulesCount();
case RULES_STARTED_COUNT:
return getRulesStartedCount();
case RULES_FINISHED_COUNT:
return getRulesFinishedCount();
case RULES_SUCCESS_COUNT:
return getRulesSuccessCount();
case RULES_FAILURE_COUNT:
return getRulesFailureCount();
case CACHE_HITS_COUNT:
return getCacheHitsCount();
case CACHE_MISSES_COUNT:
return getCacheMissesCount();
case CACHE_IGNORES_COUNT:
return getCacheIgnoresCount();
case CACHE_ERRORS_COUNT:
return getCacheErrorsCount();
case CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT:
return getCacheLocalKeyUnchangedHitsCount();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case STAMPEDE_ID:
return isSetStampedeId();
case RUN_ID:
return isSetRunId();
case TOTAL_RULES_COUNT:
return isSetTotalRulesCount();
case RULES_STARTED_COUNT:
return isSetRulesStartedCount();
case RULES_FINISHED_COUNT:
return isSetRulesFinishedCount();
case RULES_SUCCESS_COUNT:
return isSetRulesSuccessCount();
case RULES_FAILURE_COUNT:
return isSetRulesFailureCount();
case CACHE_HITS_COUNT:
return isSetCacheHitsCount();
case CACHE_MISSES_COUNT:
return isSetCacheMissesCount();
case CACHE_IGNORES_COUNT:
return isSetCacheIgnoresCount();
case CACHE_ERRORS_COUNT:
return isSetCacheErrorsCount();
case CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT:
return isSetCacheLocalKeyUnchangedHitsCount();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof BuildSlaveStatus)
return this.equals((BuildSlaveStatus)that);
return false;
}
public boolean equals(BuildSlaveStatus that) {
if (that == null)
return false;
boolean this_present_stampedeId = true && this.isSetStampedeId();
boolean that_present_stampedeId = true && that.isSetStampedeId();
if (this_present_stampedeId || that_present_stampedeId) {
if (!(this_present_stampedeId && that_present_stampedeId))
return false;
if (!this.stampedeId.equals(that.stampedeId))
return false;
}
boolean this_present_runId = true && this.isSetRunId();
boolean that_present_runId = true && that.isSetRunId();
if (this_present_runId || that_present_runId) {
if (!(this_present_runId && that_present_runId))
return false;
if (!this.runId.equals(that.runId))
return false;
}
boolean this_present_totalRulesCount = true && this.isSetTotalRulesCount();
boolean that_present_totalRulesCount = true && that.isSetTotalRulesCount();
if (this_present_totalRulesCount || that_present_totalRulesCount) {
if (!(this_present_totalRulesCount && that_present_totalRulesCount))
return false;
if (this.totalRulesCount != that.totalRulesCount)
return false;
}
boolean this_present_rulesStartedCount = true && this.isSetRulesStartedCount();
boolean that_present_rulesStartedCount = true && that.isSetRulesStartedCount();
if (this_present_rulesStartedCount || that_present_rulesStartedCount) {
if (!(this_present_rulesStartedCount && that_present_rulesStartedCount))
return false;
if (this.rulesStartedCount != that.rulesStartedCount)
return false;
}
boolean this_present_rulesFinishedCount = true && this.isSetRulesFinishedCount();
boolean that_present_rulesFinishedCount = true && that.isSetRulesFinishedCount();
if (this_present_rulesFinishedCount || that_present_rulesFinishedCount) {
if (!(this_present_rulesFinishedCount && that_present_rulesFinishedCount))
return false;
if (this.rulesFinishedCount != that.rulesFinishedCount)
return false;
}
boolean this_present_rulesSuccessCount = true && this.isSetRulesSuccessCount();
boolean that_present_rulesSuccessCount = true && that.isSetRulesSuccessCount();
if (this_present_rulesSuccessCount || that_present_rulesSuccessCount) {
if (!(this_present_rulesSuccessCount && that_present_rulesSuccessCount))
return false;
if (this.rulesSuccessCount != that.rulesSuccessCount)
return false;
}
boolean this_present_rulesFailureCount = true && this.isSetRulesFailureCount();
boolean that_present_rulesFailureCount = true && that.isSetRulesFailureCount();
if (this_present_rulesFailureCount || that_present_rulesFailureCount) {
if (!(this_present_rulesFailureCount && that_present_rulesFailureCount))
return false;
if (this.rulesFailureCount != that.rulesFailureCount)
return false;
}
boolean this_present_cacheHitsCount = true && this.isSetCacheHitsCount();
boolean that_present_cacheHitsCount = true && that.isSetCacheHitsCount();
if (this_present_cacheHitsCount || that_present_cacheHitsCount) {
if (!(this_present_cacheHitsCount && that_present_cacheHitsCount))
return false;
if (this.cacheHitsCount != that.cacheHitsCount)
return false;
}
boolean this_present_cacheMissesCount = true && this.isSetCacheMissesCount();
boolean that_present_cacheMissesCount = true && that.isSetCacheMissesCount();
if (this_present_cacheMissesCount || that_present_cacheMissesCount) {
if (!(this_present_cacheMissesCount && that_present_cacheMissesCount))
return false;
if (this.cacheMissesCount != that.cacheMissesCount)
return false;
}
boolean this_present_cacheIgnoresCount = true && this.isSetCacheIgnoresCount();
boolean that_present_cacheIgnoresCount = true && that.isSetCacheIgnoresCount();
if (this_present_cacheIgnoresCount || that_present_cacheIgnoresCount) {
if (!(this_present_cacheIgnoresCount && that_present_cacheIgnoresCount))
return false;
if (this.cacheIgnoresCount != that.cacheIgnoresCount)
return false;
}
boolean this_present_cacheErrorsCount = true && this.isSetCacheErrorsCount();
boolean that_present_cacheErrorsCount = true && that.isSetCacheErrorsCount();
if (this_present_cacheErrorsCount || that_present_cacheErrorsCount) {
if (!(this_present_cacheErrorsCount && that_present_cacheErrorsCount))
return false;
if (this.cacheErrorsCount != that.cacheErrorsCount)
return false;
}
boolean this_present_cacheLocalKeyUnchangedHitsCount = true && this.isSetCacheLocalKeyUnchangedHitsCount();
boolean that_present_cacheLocalKeyUnchangedHitsCount = true && that.isSetCacheLocalKeyUnchangedHitsCount();
if (this_present_cacheLocalKeyUnchangedHitsCount || that_present_cacheLocalKeyUnchangedHitsCount) {
if (!(this_present_cacheLocalKeyUnchangedHitsCount && that_present_cacheLocalKeyUnchangedHitsCount))
return false;
if (this.cacheLocalKeyUnchangedHitsCount != that.cacheLocalKeyUnchangedHitsCount)
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_stampedeId = true && (isSetStampedeId());
list.add(present_stampedeId);
if (present_stampedeId)
list.add(stampedeId);
boolean present_runId = true && (isSetRunId());
list.add(present_runId);
if (present_runId)
list.add(runId);
boolean present_totalRulesCount = true && (isSetTotalRulesCount());
list.add(present_totalRulesCount);
if (present_totalRulesCount)
list.add(totalRulesCount);
boolean present_rulesStartedCount = true && (isSetRulesStartedCount());
list.add(present_rulesStartedCount);
if (present_rulesStartedCount)
list.add(rulesStartedCount);
boolean present_rulesFinishedCount = true && (isSetRulesFinishedCount());
list.add(present_rulesFinishedCount);
if (present_rulesFinishedCount)
list.add(rulesFinishedCount);
boolean present_rulesSuccessCount = true && (isSetRulesSuccessCount());
list.add(present_rulesSuccessCount);
if (present_rulesSuccessCount)
list.add(rulesSuccessCount);
boolean present_rulesFailureCount = true && (isSetRulesFailureCount());
list.add(present_rulesFailureCount);
if (present_rulesFailureCount)
list.add(rulesFailureCount);
boolean present_cacheHitsCount = true && (isSetCacheHitsCount());
list.add(present_cacheHitsCount);
if (present_cacheHitsCount)
list.add(cacheHitsCount);
boolean present_cacheMissesCount = true && (isSetCacheMissesCount());
list.add(present_cacheMissesCount);
if (present_cacheMissesCount)
list.add(cacheMissesCount);
boolean present_cacheIgnoresCount = true && (isSetCacheIgnoresCount());
list.add(present_cacheIgnoresCount);
if (present_cacheIgnoresCount)
list.add(cacheIgnoresCount);
boolean present_cacheErrorsCount = true && (isSetCacheErrorsCount());
list.add(present_cacheErrorsCount);
if (present_cacheErrorsCount)
list.add(cacheErrorsCount);
boolean present_cacheLocalKeyUnchangedHitsCount = true && (isSetCacheLocalKeyUnchangedHitsCount());
list.add(present_cacheLocalKeyUnchangedHitsCount);
if (present_cacheLocalKeyUnchangedHitsCount)
list.add(cacheLocalKeyUnchangedHitsCount);
return list.hashCode();
}
@Override
public int compareTo(BuildSlaveStatus other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetStampedeId()).compareTo(other.isSetStampedeId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetStampedeId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.stampedeId, other.stampedeId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRunId()).compareTo(other.isSetRunId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRunId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.runId, other.runId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTotalRulesCount()).compareTo(other.isSetTotalRulesCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTotalRulesCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.totalRulesCount, other.totalRulesCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRulesStartedCount()).compareTo(other.isSetRulesStartedCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRulesStartedCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rulesStartedCount, other.rulesStartedCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRulesFinishedCount()).compareTo(other.isSetRulesFinishedCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRulesFinishedCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rulesFinishedCount, other.rulesFinishedCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRulesSuccessCount()).compareTo(other.isSetRulesSuccessCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRulesSuccessCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rulesSuccessCount, other.rulesSuccessCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRulesFailureCount()).compareTo(other.isSetRulesFailureCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRulesFailureCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rulesFailureCount, other.rulesFailureCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetCacheHitsCount()).compareTo(other.isSetCacheHitsCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCacheHitsCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.cacheHitsCount, other.cacheHitsCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetCacheMissesCount()).compareTo(other.isSetCacheMissesCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCacheMissesCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.cacheMissesCount, other.cacheMissesCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetCacheIgnoresCount()).compareTo(other.isSetCacheIgnoresCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCacheIgnoresCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.cacheIgnoresCount, other.cacheIgnoresCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetCacheErrorsCount()).compareTo(other.isSetCacheErrorsCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCacheErrorsCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.cacheErrorsCount, other.cacheErrorsCount);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetCacheLocalKeyUnchangedHitsCount()).compareTo(other.isSetCacheLocalKeyUnchangedHitsCount());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCacheLocalKeyUnchangedHitsCount()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.cacheLocalKeyUnchangedHitsCount, other.cacheLocalKeyUnchangedHitsCount);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("BuildSlaveStatus(");
boolean first = true;
if (isSetStampedeId()) {
sb.append("stampedeId:");
if (this.stampedeId == null) {
sb.append("null");
} else {
sb.append(this.stampedeId);
}
first = false;
}
if (isSetRunId()) {
if (!first) sb.append(", ");
sb.append("runId:");
if (this.runId == null) {
sb.append("null");
} else {
sb.append(this.runId);
}
first = false;
}
if (isSetTotalRulesCount()) {
if (!first) sb.append(", ");
sb.append("totalRulesCount:");
sb.append(this.totalRulesCount);
first = false;
}
if (isSetRulesStartedCount()) {
if (!first) sb.append(", ");
sb.append("rulesStartedCount:");
sb.append(this.rulesStartedCount);
first = false;
}
if (isSetRulesFinishedCount()) {
if (!first) sb.append(", ");
sb.append("rulesFinishedCount:");
sb.append(this.rulesFinishedCount);
first = false;
}
if (isSetRulesSuccessCount()) {
if (!first) sb.append(", ");
sb.append("rulesSuccessCount:");
sb.append(this.rulesSuccessCount);
first = false;
}
if (isSetRulesFailureCount()) {
if (!first) sb.append(", ");
sb.append("rulesFailureCount:");
sb.append(this.rulesFailureCount);
first = false;
}
if (isSetCacheHitsCount()) {
if (!first) sb.append(", ");
sb.append("cacheHitsCount:");
sb.append(this.cacheHitsCount);
first = false;
}
if (isSetCacheMissesCount()) {
if (!first) sb.append(", ");
sb.append("cacheMissesCount:");
sb.append(this.cacheMissesCount);
first = false;
}
if (isSetCacheIgnoresCount()) {
if (!first) sb.append(", ");
sb.append("cacheIgnoresCount:");
sb.append(this.cacheIgnoresCount);
first = false;
}
if (isSetCacheErrorsCount()) {
if (!first) sb.append(", ");
sb.append("cacheErrorsCount:");
sb.append(this.cacheErrorsCount);
first = false;
}
if (isSetCacheLocalKeyUnchangedHitsCount()) {
if (!first) sb.append(", ");
sb.append("cacheLocalKeyUnchangedHitsCount:");
sb.append(this.cacheLocalKeyUnchangedHitsCount);
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
if (stampedeId != null) {
stampedeId.validate();
}
if (runId != null) {
runId.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class BuildSlaveStatusStandardSchemeFactory implements SchemeFactory {
public BuildSlaveStatusStandardScheme getScheme() {
return new BuildSlaveStatusStandardScheme();
}
}
private static class BuildSlaveStatusStandardScheme extends StandardScheme<BuildSlaveStatus> {
public void read(org.apache.thrift.protocol.TProtocol iprot, BuildSlaveStatus struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // STAMPEDE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.stampedeId = new com.facebook.buck.distributed.thrift.StampedeId();
struct.stampedeId.read(iprot);
struct.setStampedeIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // RUN_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.runId = new com.facebook.buck.distributed.thrift.RunId();
struct.runId.read(iprot);
struct.setRunIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 10: // TOTAL_RULES_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.totalRulesCount = iprot.readI32();
struct.setTotalRulesCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 11: // RULES_STARTED_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.rulesStartedCount = iprot.readI32();
struct.setRulesStartedCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 12: // RULES_FINISHED_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.rulesFinishedCount = iprot.readI32();
struct.setRulesFinishedCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 13: // RULES_SUCCESS_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.rulesSuccessCount = iprot.readI32();
struct.setRulesSuccessCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 14: // RULES_FAILURE_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.rulesFailureCount = iprot.readI32();
struct.setRulesFailureCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 20: // CACHE_HITS_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.cacheHitsCount = iprot.readI32();
struct.setCacheHitsCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 21: // CACHE_MISSES_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.cacheMissesCount = iprot.readI32();
struct.setCacheMissesCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 22: // CACHE_IGNORES_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.cacheIgnoresCount = iprot.readI32();
struct.setCacheIgnoresCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 23: // CACHE_ERRORS_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.cacheErrorsCount = iprot.readI32();
struct.setCacheErrorsCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 24: // CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.cacheLocalKeyUnchangedHitsCount = iprot.readI32();
struct.setCacheLocalKeyUnchangedHitsCountIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, BuildSlaveStatus struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.stampedeId != null) {
if (struct.isSetStampedeId()) {
oprot.writeFieldBegin(STAMPEDE_ID_FIELD_DESC);
struct.stampedeId.write(oprot);
oprot.writeFieldEnd();
}
}
if (struct.runId != null) {
if (struct.isSetRunId()) {
oprot.writeFieldBegin(RUN_ID_FIELD_DESC);
struct.runId.write(oprot);
oprot.writeFieldEnd();
}
}
if (struct.isSetTotalRulesCount()) {
oprot.writeFieldBegin(TOTAL_RULES_COUNT_FIELD_DESC);
oprot.writeI32(struct.totalRulesCount);
oprot.writeFieldEnd();
}
if (struct.isSetRulesStartedCount()) {
oprot.writeFieldBegin(RULES_STARTED_COUNT_FIELD_DESC);
oprot.writeI32(struct.rulesStartedCount);
oprot.writeFieldEnd();
}
if (struct.isSetRulesFinishedCount()) {
oprot.writeFieldBegin(RULES_FINISHED_COUNT_FIELD_DESC);
oprot.writeI32(struct.rulesFinishedCount);
oprot.writeFieldEnd();
}
if (struct.isSetRulesSuccessCount()) {
oprot.writeFieldBegin(RULES_SUCCESS_COUNT_FIELD_DESC);
oprot.writeI32(struct.rulesSuccessCount);
oprot.writeFieldEnd();
}
if (struct.isSetRulesFailureCount()) {
oprot.writeFieldBegin(RULES_FAILURE_COUNT_FIELD_DESC);
oprot.writeI32(struct.rulesFailureCount);
oprot.writeFieldEnd();
}
if (struct.isSetCacheHitsCount()) {
oprot.writeFieldBegin(CACHE_HITS_COUNT_FIELD_DESC);
oprot.writeI32(struct.cacheHitsCount);
oprot.writeFieldEnd();
}
if (struct.isSetCacheMissesCount()) {
oprot.writeFieldBegin(CACHE_MISSES_COUNT_FIELD_DESC);
oprot.writeI32(struct.cacheMissesCount);
oprot.writeFieldEnd();
}
if (struct.isSetCacheIgnoresCount()) {
oprot.writeFieldBegin(CACHE_IGNORES_COUNT_FIELD_DESC);
oprot.writeI32(struct.cacheIgnoresCount);
oprot.writeFieldEnd();
}
if (struct.isSetCacheErrorsCount()) {
oprot.writeFieldBegin(CACHE_ERRORS_COUNT_FIELD_DESC);
oprot.writeI32(struct.cacheErrorsCount);
oprot.writeFieldEnd();
}
if (struct.isSetCacheLocalKeyUnchangedHitsCount()) {
oprot.writeFieldBegin(CACHE_LOCAL_KEY_UNCHANGED_HITS_COUNT_FIELD_DESC);
oprot.writeI32(struct.cacheLocalKeyUnchangedHitsCount);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class BuildSlaveStatusTupleSchemeFactory implements SchemeFactory {
public BuildSlaveStatusTupleScheme getScheme() {
return new BuildSlaveStatusTupleScheme();
}
}
private static class BuildSlaveStatusTupleScheme extends TupleScheme<BuildSlaveStatus> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, BuildSlaveStatus struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetStampedeId()) {
optionals.set(0);
}
if (struct.isSetRunId()) {
optionals.set(1);
}
if (struct.isSetTotalRulesCount()) {
optionals.set(2);
}
if (struct.isSetRulesStartedCount()) {
optionals.set(3);
}
if (struct.isSetRulesFinishedCount()) {
optionals.set(4);
}
if (struct.isSetRulesSuccessCount()) {
optionals.set(5);
}
if (struct.isSetRulesFailureCount()) {
optionals.set(6);
}
if (struct.isSetCacheHitsCount()) {
optionals.set(7);
}
if (struct.isSetCacheMissesCount()) {
optionals.set(8);
}
if (struct.isSetCacheIgnoresCount()) {
optionals.set(9);
}
if (struct.isSetCacheErrorsCount()) {
optionals.set(10);
}
if (struct.isSetCacheLocalKeyUnchangedHitsCount()) {
optionals.set(11);
}
oprot.writeBitSet(optionals, 12);
if (struct.isSetStampedeId()) {
struct.stampedeId.write(oprot);
}
if (struct.isSetRunId()) {
struct.runId.write(oprot);
}
if (struct.isSetTotalRulesCount()) {
oprot.writeI32(struct.totalRulesCount);
}
if (struct.isSetRulesStartedCount()) {
oprot.writeI32(struct.rulesStartedCount);
}
if (struct.isSetRulesFinishedCount()) {
oprot.writeI32(struct.rulesFinishedCount);
}
if (struct.isSetRulesSuccessCount()) {
oprot.writeI32(struct.rulesSuccessCount);
}
if (struct.isSetRulesFailureCount()) {
oprot.writeI32(struct.rulesFailureCount);
}
if (struct.isSetCacheHitsCount()) {
oprot.writeI32(struct.cacheHitsCount);
}
if (struct.isSetCacheMissesCount()) {
oprot.writeI32(struct.cacheMissesCount);
}
if (struct.isSetCacheIgnoresCount()) {
oprot.writeI32(struct.cacheIgnoresCount);
}
if (struct.isSetCacheErrorsCount()) {
oprot.writeI32(struct.cacheErrorsCount);
}
if (struct.isSetCacheLocalKeyUnchangedHitsCount()) {
oprot.writeI32(struct.cacheLocalKeyUnchangedHitsCount);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, BuildSlaveStatus struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(12);
if (incoming.get(0)) {
struct.stampedeId = new com.facebook.buck.distributed.thrift.StampedeId();
struct.stampedeId.read(iprot);
struct.setStampedeIdIsSet(true);
}
if (incoming.get(1)) {
struct.runId = new com.facebook.buck.distributed.thrift.RunId();
struct.runId.read(iprot);
struct.setRunIdIsSet(true);
}
if (incoming.get(2)) {
struct.totalRulesCount = iprot.readI32();
struct.setTotalRulesCountIsSet(true);
}
if (incoming.get(3)) {
struct.rulesStartedCount = iprot.readI32();
struct.setRulesStartedCountIsSet(true);
}
if (incoming.get(4)) {
struct.rulesFinishedCount = iprot.readI32();
struct.setRulesFinishedCountIsSet(true);
}
if (incoming.get(5)) {
struct.rulesSuccessCount = iprot.readI32();
struct.setRulesSuccessCountIsSet(true);
}
if (incoming.get(6)) {
struct.rulesFailureCount = iprot.readI32();
struct.setRulesFailureCountIsSet(true);
}
if (incoming.get(7)) {
struct.cacheHitsCount = iprot.readI32();
struct.setCacheHitsCountIsSet(true);
}
if (incoming.get(8)) {
struct.cacheMissesCount = iprot.readI32();
struct.setCacheMissesCountIsSet(true);
}
if (incoming.get(9)) {
struct.cacheIgnoresCount = iprot.readI32();
struct.setCacheIgnoresCountIsSet(true);
}
if (incoming.get(10)) {
struct.cacheErrorsCount = iprot.readI32();
struct.setCacheErrorsCountIsSet(true);
}
if (incoming.get(11)) {
struct.cacheLocalKeyUnchangedHitsCount = iprot.readI32();
struct.setCacheLocalKeyUnchangedHitsCountIsSet(true);
}
}
}
}
| apache-2.0 |
tufangorel/hazelcast | hazelcast/src/test/java/com/hazelcast/map/impl/mapstore/MapLoaderTest.java | 36424 | /*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl.mapstore;
import com.hazelcast.config.Config;
import com.hazelcast.config.EvictionPolicy;
import com.hazelcast.config.ManagementCenterConfig;
import com.hazelcast.config.MapConfig;
import com.hazelcast.config.MapIndexConfig;
import com.hazelcast.config.MapStoreConfig;
import com.hazelcast.config.MaxSizeConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.core.LifecycleEvent;
import com.hazelcast.core.LifecycleListener;
import com.hazelcast.core.MapLoader;
import com.hazelcast.core.MapStore;
import com.hazelcast.core.MapStoreAdapter;
import com.hazelcast.core.MapStoreFactory;
import com.hazelcast.instance.Node;
import com.hazelcast.internal.partition.InternalPartition;
import com.hazelcast.internal.partition.InternalPartitionService;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.map.MapInterceptor;
import com.hazelcast.map.impl.mapstore.writebehind.TestMapUsingMapStoreBuilder;
import com.hazelcast.nio.Address;
import com.hazelcast.query.SqlPredicate;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastSerialClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.util.EmptyStatement;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static com.hazelcast.config.MaxSizeConfig.MaxSizePolicy.PER_PARTITION;
import static com.hazelcast.test.TestCollectionUtils.setOfValuesBetween;
import static com.hazelcast.test.TimeConstants.MINUTE;
import static java.lang.String.format;
import static java.util.Collections.singleton;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@RunWith(HazelcastSerialClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class MapLoaderTest extends HazelcastTestSupport {
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Test
public void testSenderAndBackupTerminates_AfterInitialLoad() {
final ILogger logger = Logger.getLogger(MapLoaderTest.class);
String name = randomString();
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER)
.setImplementation(new DummyMapLoader());
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
logger.info("Starting cluster with 5 members");
TestHazelcastInstanceFactory instanceFactory = createHazelcastInstanceFactory(5);
HazelcastInstance[] instances = instanceFactory.newInstances(config);
for (HazelcastInstance instance : instances) {
instance.getLifecycleService().addLifecycleListener(new LoggingLifecycleListener(instance.getName()));
}
IMap<Object, Object> map = instances[0].getMap(name);
map.clear();
HazelcastInstance[] ownerAndReplicas = findOwnerAndReplicas(instances, name);
logger.info("Terminating 2 nodes from ownerOrReplica");
ownerAndReplicas[0].getLifecycleService().terminate();
ownerAndReplicas[1].getLifecycleService().terminate();
logger.info("2 nodes got terminated");
assertClusterSizeEventually(3, ownerAndReplicas[3]);
logger.info("Cluster size is 3 now");
map = ownerAndReplicas[3].getMap(name);
logger.info("Loading all items into the map");
map.loadAll(false);
logger.info("All items are loaded into the map");
assertEquals(DummyMapLoader.DEFAULT_SIZE, map.size());
}
private HazelcastInstance[] findOwnerAndReplicas(HazelcastInstance[] instances, String name) {
Node node = getNode(instances[0]);
InternalPartitionService partitionService = node.getPartitionService();
int partitionId = partitionService.getPartitionId(name);
InternalPartition partition = partitionService.getPartition(partitionId);
HazelcastInstance[] ownerAndReplicas = new HazelcastInstance[instances.length];
for (int i = 0; i < instances.length; i++) {
ownerAndReplicas[i] = getInstanceForAddress(instances, partition.getReplicaAddress(i));
}
return ownerAndReplicas;
}
@Test
public void givenSpecificKeysWereReloaded_whenLoadAllIsCalled_thenAllEntriesAreLoadedFromTheStore() {
String name = randomString();
int keysInMapStore = 10000;
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER)
.setImplementation(new DummyMapLoader(keysInMapStore));
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
TestHazelcastInstanceFactory instanceFactory = createHazelcastInstanceFactory(2);
HazelcastInstance[] instances = instanceFactory.newInstances(config);
IMap<Integer, Integer> map = instances[0].getMap(name);
// load specific keys
map.loadAll(setOfValuesBetween(0, keysInMapStore), true);
// remove everything
map.clear();
// assert loadAll() with load all entries provided by the mapLoader
map.loadAll(true);
assertEquals(keysInMapStore, map.size());
}
@Test
public void testNullChecks_withMapStore_nullInKeys() {
String name = "testNullChecks_withMapStore";
int keysInMapStore = 10000;
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER)
.setImplementation(new MapLoaderTest.DummyMapLoader(keysInMapStore));
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<String, String> map = instance.getMap(name);
final Set<String> keys = new HashSet<String>();
keys.add("key");
keys.add(null);
expectedException.expect(NullPointerException.class);
map.loadAll(keys, true);
}
@Test
public void testNullKey_loadAll() {
String name = "testNullIn_loadAll";
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.LAZY)
.setImplementation(new MapLoader<String, String>() {
@Override
public String load(String key) {
if (key.equals("1")) {
return "1";
}
if (key.equals("2")) {
return "2";
}
if (key.equals("3")) {
return "3";
}
return null;
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
Map<String, String> val = new HashMap<String, String>();
if (keys.contains("1")) {
val.put("1", "1");
}
if (keys.contains("2")) {
val.put(null, "2");
}
if (keys.contains("3")) {
val.put("3", "3");
}
return val;
}
@Override
public Iterable<String> loadAllKeys() {
List<String> keys = new ArrayList<String>();
keys.add("1");
keys.add("2");
keys.add("3");
return keys;
}
});
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<String, String> map = instance.getMap(name);
try {
map.size();
// We can't expect that since the exception transmission in map-loader is heavily dependant on operation execution.
// See: https://github.com/hazelcast/hazelcast/issues/11931
// fail("Expected a NPE due to a null key in a MapLoader");
} catch (NullPointerException e) {
assertEquals("Key loaded by a MapLoader cannot be null.", e.getMessage());
}
try {
assertEquals(2, map.size());
assertEquals("1", map.get("1"));
assertEquals("2", map.get("2"));
assertEquals("3", map.get("3"));
} catch (NullPointerException e) {
handleNpeFromKnownIssue(e);
}
}
@Test
public void testNullValue_loadAll() {
String name = "testNullIn_loadAll";
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.LAZY)
.setImplementation(new MapLoader<String, String>() {
@Override
public String load(String key) {
if (key.equals("1")) {
return "1";
}
if (key.equals("2")) {
return null;
}
if (key.equals("3")) {
return "3";
}
return null;
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
Map<String, String> val = new HashMap<String, String>();
if (keys.contains("1")) {
val.put("1", "1");
}
if (keys.contains("2")) {
val.put("2", null);
}
if (keys.contains("3")) {
val.put("3", "3");
}
return val;
}
@Override
public Iterable<String> loadAllKeys() {
List<String> keys = new ArrayList<String>();
keys.add("1");
keys.add("2");
keys.add("3");
return keys;
}
});
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<String, String> map = instance.getMap(name);
// THIS DOES NOT THROW ANY EXCEPTION
map.size();
assertEquals(2, map.size());
assertEquals("1", map.get("1"));
assertEquals(null, map.get("2"));
assertEquals("3", map.get("3"));
}
@Test
public void testNullValue_loadAll_withInterceptor() {
String name = "testNullIn_loadAll";
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.LAZY)
.setImplementation(new MapLoader<String, String>() {
@Override
public String load(String key) {
if (key.equals("1")) {
return "1";
}
if (key.equals("2")) {
return null;
}
if (key.equals("3")) {
return "3";
}
return null;
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
Map<String, String> val = new HashMap<String, String>();
if (keys.contains("1")) {
val.put("1", "1");
}
if (keys.contains("2")) {
val.put("2", null);
}
if (keys.contains("3")) {
val.put("3", "3");
}
return val;
}
@Override
public Iterable<String> loadAllKeys() {
List<String> keys = new ArrayList<String>();
keys.add("1");
keys.add("2");
keys.add("3");
return keys;
}
});
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<String, String> map = instance.getMap(name);
map.addInterceptor(new TestInterceptor());
try {
map.size();
// We can't expect that since the exception transmission in map-loader is heavily dependant on operation execution.
// See: https://github.com/hazelcast/hazelcast/issues/11931
// fail("Expected a NPE due to a null value in a MapLoader");
} catch (NullPointerException e) {
assertEquals("Value loaded by a MapLoader cannot be null.", e.getMessage());
}
assertEquals(2, map.size());
assertEquals("1", map.get("1"));
assertEquals(null, map.get("2"));
assertEquals("3", map.get("3"));
}
@Test
public void testNullKey_loadAllKeys() {
String name = "testNullIn_loadAllKeys";
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.LAZY)
.setImplementation(new MapLoader<String, String>() {
@Override
public String load(String key) {
if (key.equals("1")) {
return "1";
}
if (key.equals("2")) {
return "2";
}
if (key.equals("3")) {
return "3";
}
return null;
}
@Override
public Map<String, String> loadAll(Collection keys) {
Map<String, String> val = new HashMap<String, String>();
if (keys.contains("1")) {
val.put("1", "1");
}
if (keys.contains("2")) {
val.put("2", "2");
}
if (keys.contains("3")) {
val.put("3", "3");
}
return val;
}
@Override
public Iterable<String> loadAllKeys() {
List<String> keys = new ArrayList<String>();
keys.add("1");
keys.add(null);
keys.add("3");
return keys;
}
});
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<String, String> map = instance.getMap(name);
try {
map.size();
// We can't expect that since the exception transmission in map-loader is heavily dependant on operation execution.
// See: https://github.com/hazelcast/hazelcast/issues/11931
// fail("Expected a NPE due to a null key in a MapLoader");
} catch (NullPointerException e) {
assertEquals("Key loaded by a MapLoader cannot be null.", e.getMessage());
}
try {
assertEquals(0, map.size());
assertEquals("1", map.get("1"));
assertEquals("2", map.get("2"));
assertEquals("3", map.get("3"));
} catch (NullPointerException e) {
handleNpeFromKnownIssue(e);
}
}
private void handleNpeFromKnownIssue(NullPointerException e) {
if ("Key loaded by a MapLoader cannot be null.".equals(e.getMessage())) {
// this case is a known issue, which may break the test rarely
// map operations following the previous size() operation may still see this NPE cached in a Future
// in DefaultRecordStore#loadingFutures
EmptyStatement.ignore(e);
} else {
// otherwise we see a new issue, which we should be notified about
throw e;
}
}
@Test
public void testNullChecks_withMapStore_nullKeys() {
String name = "testNullChecks_withMapStore";
int keysInMapStore = 10000;
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER)
.setImplementation(new MapLoaderTest.DummyMapLoader(keysInMapStore));
Config config = new Config();
config.getMapConfig(name)
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<String, String> map = instance.getMap(name);
expectedException.expect(NullPointerException.class);
map.loadAll(null, true);
}
/**
* https://github.com/hazelcast/hazelcast/issues/1770
*/
@Test
public void test1770() {
final AtomicBoolean loadAllCalled = new AtomicBoolean();
MapStoreConfig mapStoreConfig = new MapStoreConfig()
.setEnabled(true)
.setImplementation(new MapLoader<Object, Object>() {
@Override
public Object load(Object key) {
return null;
}
@Override
public Map<Object, Object> loadAll(Collection keys) {
loadAllCalled.set(true);
return new HashMap<Object, Object>();
}
@Override
public Set<Object> loadAllKeys() {
return new HashSet<Object>(Collections.singletonList(1));
}
});
ManagementCenterConfig managementCenterConfig = new ManagementCenterConfig()
.setEnabled(true)
.setUrl("http://127.0.0.1:8090/mancenter");
Config config = getConfig()
.setManagementCenterConfig(managementCenterConfig);
MapConfig mapConfig = config.getMapConfig("foo")
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance hz = createHazelcastInstance(config);
hz.getMap(mapConfig.getName());
assertTrueAllTheTime(new AssertTask() {
@Override
public void run() {
assertFalse("LoadAll should not have been called", loadAllCalled.get());
}
}, 10);
}
@Test
public void testMapLoaderLoadUpdatingIndex_noPreload() {
final int nodeCount = 3;
String mapName = randomString();
SampleIndexableObjectMapLoader loader = new SampleIndexableObjectMapLoader();
Config config = createMapConfig(mapName, loader);
NodeBuilder nodeBuilder = new NodeBuilder(nodeCount, config).build();
HazelcastInstance node = nodeBuilder.getRandomNode();
IMap<Integer, SampleIndexableObject> map = node.getMap(mapName);
for (int i = 0; i < 10; i++) {
map.put(i, new SampleIndexableObject("My-" + i, i));
}
SqlPredicate predicate = new SqlPredicate("name='My-5'");
assertPredicateResultCorrect(map, predicate);
}
@Test
public void testMapLoaderLoadUpdatingIndex_withPreload() {
final int nodeCount = 3;
String mapName = randomString();
SampleIndexableObjectMapLoader loader = new SampleIndexableObjectMapLoader();
loader.preloadValues = true;
Config config = createMapConfig(mapName, loader);
NodeBuilder nodeBuilder = new NodeBuilder(nodeCount, config).build();
HazelcastInstance node = nodeBuilder.getRandomNode();
IMap<Integer, SampleIndexableObject> map = node.getMap(mapName);
SqlPredicate predicate = new SqlPredicate("name='My-5'");
assertLoadAllKeysCount(loader, 1);
assertPredicateResultCorrect(map, predicate);
}
@Test
public void testGetAll_putsLoadedItemsToIMap() {
Integer[] requestedKeys = {1, 2, 3};
AtomicInteger loadedKeysCounter = new AtomicInteger(0);
MapStore<Integer, Integer> mapStore = createMapLoader(loadedKeysCounter);
IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create()
.withMapStore(mapStore)
.withNodeCount(1)
.withNodeFactory(createHazelcastInstanceFactory(1))
.withPartitionCount(1)
.build();
Set<Integer> keySet = new HashSet<Integer>(Arrays.asList(requestedKeys));
map.getAll(keySet);
map.getAll(keySet);
map.getAll(keySet);
assertEquals(requestedKeys.length, loadedKeysCounter.get());
}
@Test(timeout = MINUTE)
public void testMapCanBeLoaded_whenLoadAllKeysThrowsExceptionFirstTime() {
Config config = getConfig();
MapLoader failingMapLoader = new FailingMapLoader();
MapStoreConfig mapStoreConfig = new MapStoreConfig().setImplementation(failingMapLoader);
MapConfig mapConfig = config.getMapConfig(getClass().getName()).setMapStoreConfig(mapStoreConfig);
final ILogger logger = Logger.getLogger(LoggingLifecycleListener.class);
HazelcastInstance[] hz = createHazelcastInstanceFactory(2).newInstances(config, 2);
final IMap map = hz[0].getMap(mapConfig.getName());
Throwable exception = null;
try {
map.get(generateKeyNotOwnedBy(hz[0]));
} catch (Throwable e) {
exception = e;
}
assertNotNull("Exception wasn't propagated", exception);
// In the first map load, partitions are notified asynchronously
// by the com.hazelcast.map.impl.MapKeyLoader.sendKeyLoadCompleted
// method and also some partitions are notified twice.
// Because of this, a subsequent map load might get completed with the
// results of the first map load.
// This is why a subsequent map load might fail with the exception from
// a previous load. In this case, we need to try again.
// An alternative would be to wait for all partitions to be notified by
// the result from the first load before initiating a second load but
// unfortunately we can't observe this as some partitions are completed
// twice and we might just end up observing the first completion.
assertTrueEventually(new AssertTask() {
@Override
public void run() {
try {
map.loadAll(true);
assertEquals(1, map.size());
} catch (IllegalStateException e) {
logger.info("Map load observed result from a previous load, retrying...", e);
}
}
});
}
@Test
@SuppressWarnings("deprecation")
public void testMapLoaderHittingEvictionOnInitialLoad() {
String mapName = "testMapLoaderHittingEvictionOnInitialLoad";
int sizePerPartition = 1;
int partitionCount = 10;
int entriesCount = 1000000;
MaxSizeConfig maxSizeConfig = new MaxSizeConfig()
.setMaxSizePolicy(PER_PARTITION)
.setSize(sizePerPartition);
MapStoreConfig storeConfig = new MapStoreConfig()
.setEnabled(true)
.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER)
.setImplementation(new SimpleLoader(entriesCount));
Config config = getConfig()
.setProperty(GroupProperty.PARTITION_COUNT.getName(), String.valueOf(partitionCount));
config.getMapConfig(mapName)
.setEvictionPolicy(EvictionPolicy.LRU)
.setEvictionPercentage(50)
.setMinEvictionCheckMillis(0)
.setMaxSizeConfig(maxSizeConfig)
.setMapStoreConfig(storeConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap imap = instance.getMap(mapName);
imap.addInterceptor(new TestInterceptor());
assertEquals(sizePerPartition * partitionCount, imap.size());
}
private MapStore<Integer, Integer> createMapLoader(final AtomicInteger loadAllCounter) {
return new MapStoreAdapter<Integer, Integer>() {
@Override
public Map<Integer, Integer> loadAll(Collection<Integer> keys) {
loadAllCounter.addAndGet(keys.size());
Map<Integer, Integer> map = new HashMap<Integer, Integer>();
for (Integer key : keys) {
map.put(key, key);
}
return map;
}
@Override
public Integer load(Integer key) {
loadAllCounter.incrementAndGet();
return super.load(key);
}
};
}
private static final class SimpleLoader implements MapLoader<Integer, Integer> {
private final int entriesCount;
SimpleLoader(int entriesCount) {
this.entriesCount = entriesCount;
}
@Override
public Integer load(Integer key) {
return key;
}
@Override
public Map<Integer, Integer> loadAll(Collection<Integer> keys) {
Map<Integer, Integer> entries = new HashMap<Integer, Integer>(keys.size());
for (Integer key : keys) {
entries.put(key, key);
}
return entries;
}
@Override
public Iterable<Integer> loadAllKeys() {
Collection<Integer> keys = new ArrayList<Integer>();
for (int i = 0; i < entriesCount; i++) {
keys.add(i);
}
return keys;
}
}
private Config createMapConfig(String mapName, SampleIndexableObjectMapLoader loader) {
Config config = getConfig();
MapConfig mapConfig = config.getMapConfig(mapName);
List<MapIndexConfig> indexConfigs = mapConfig.getMapIndexConfigs();
indexConfigs.add(new MapIndexConfig("name", true));
MapStoreConfig storeConfig = new MapStoreConfig();
storeConfig.setFactoryImplementation(loader);
storeConfig.setEnabled(true);
mapConfig.setMapStoreConfig(storeConfig);
return config;
}
@SuppressWarnings("SameParameterValue")
private void assertLoadAllKeysCount(final SampleIndexableObjectMapLoader loader, final int instanceCount) {
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals("call-count of loadAllKeys method is problematic", instanceCount, loader.loadAllKeysCallCount.get());
}
});
}
private void assertPredicateResultCorrect(final IMap<Integer, SampleIndexableObject> map, final SqlPredicate predicate) {
assertTrueEventually(new AssertTask() {
@Override
public void run() {
final int mapSize = map.size();
final String message = format("Map size is %d", mapSize);
Set<Map.Entry<Integer, SampleIndexableObject>> result = map.entrySet(predicate);
assertEquals(message, 1, result.size());
assertEquals(message, 5, (int) result.iterator().next().getValue().value);
}
});
}
private static HazelcastInstance getInstanceForAddress(HazelcastInstance[] instances, Address address) {
for (HazelcastInstance instance : instances) {
Address instanceAddress = instance.getCluster().getLocalMember().getAddress();
if (address.equals(instanceAddress)) {
return instance;
}
}
throw new IllegalArgumentException();
}
private static class TestInterceptor implements MapInterceptor, Serializable {
@Override
public Object interceptGet(Object value) {
return null;
}
@Override
public void afterGet(Object value) {
}
@Override
public Object interceptPut(Object oldValue, Object newValue) {
return null;
}
@Override
public void afterPut(Object value) {
}
@Override
public Object interceptRemove(Object removedValue) {
return null;
}
@Override
public void afterRemove(Object value) {
}
}
public static class DummyMapLoader implements MapLoader<Integer, Integer> {
static final int DEFAULT_SIZE = 1000;
final Map<Integer, Integer> map = new ConcurrentHashMap<Integer, Integer>(DEFAULT_SIZE);
public DummyMapLoader() {
this(DEFAULT_SIZE);
}
public DummyMapLoader(int size) {
for (int i = 0; i < size; i++) {
map.put(i, i);
}
}
@Override
public Integer load(Integer key) {
return map.get(key);
}
@Override
public Map<Integer, Integer> loadAll(Collection<Integer> keys) {
HashMap<Integer, Integer> hashMap = new HashMap<Integer, Integer>();
for (Integer key : keys) {
hashMap.put(key, map.get(key));
}
return hashMap;
}
@Override
public Iterable<Integer> loadAllKeys() {
return map.keySet();
}
}
public static class SampleIndexableObjectMapLoader
implements MapLoader<Integer, SampleIndexableObject>, MapStoreFactory<Integer, SampleIndexableObject> {
volatile boolean preloadValues = false;
private SampleIndexableObject[] values = new SampleIndexableObject[10];
private Set<Integer> keys = new HashSet<Integer>();
private AtomicInteger loadAllKeysCallCount = new AtomicInteger(0);
public SampleIndexableObjectMapLoader() {
for (int i = 0; i < 10; i++) {
keys.add(i);
values[i] = new SampleIndexableObject("My-" + i, i);
}
}
@Override
public SampleIndexableObject load(Integer key) {
if (!preloadValues) {
return null;
}
return values[key];
}
@Override
public Map<Integer, SampleIndexableObject> loadAll(Collection<Integer> keys) {
if (!preloadValues) {
return Collections.emptyMap();
}
Map<Integer, SampleIndexableObject> data = new HashMap<Integer, SampleIndexableObject>();
for (Integer key : keys) {
data.put(key, values[key]);
}
return data;
}
@Override
public Set<Integer> loadAllKeys() {
if (!preloadValues) {
return Collections.emptySet();
}
loadAllKeysCallCount.incrementAndGet();
return Collections.unmodifiableSet(keys);
}
@Override
public MapLoader<Integer, SampleIndexableObject> newMapStore(String mapName, Properties properties) {
return this;
}
}
public static class SampleIndexableObject implements Serializable {
String name;
Integer value;
SampleIndexableObject() {
}
SampleIndexableObject(String name, Integer value) {
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getValue() {
return value;
}
public void setValue(Integer value) {
this.value = value;
}
}
static class FailingMapLoader extends MapStoreAdapter {
AtomicBoolean first = new AtomicBoolean(true);
@Override
public Set loadAllKeys() {
if (first.compareAndSet(true, false)) {
throw new IllegalStateException("Intentional exception");
}
return singleton("key");
}
@Override
public Map loadAll(Collection keys) {
return Collections.singletonMap("key", "value");
}
}
private class NodeBuilder {
private final int nodeCount;
private final Config config;
private final Random random = new Random();
private final TestHazelcastInstanceFactory factory;
private HazelcastInstance[] nodes;
NodeBuilder(int nodeCount, Config config) {
this.nodeCount = nodeCount;
this.config = config;
this.factory = createHazelcastInstanceFactory(nodeCount);
}
NodeBuilder build() {
nodes = factory.newInstances(config);
return this;
}
HazelcastInstance getRandomNode() {
final int nodeIndex = random.nextInt(nodeCount);
return nodes[nodeIndex];
}
}
private static class LoggingLifecycleListener implements LifecycleListener {
private final String nodeInfo;
private final ILogger logger;
private LoggingLifecycleListener(String nodeInfo) {
this.nodeInfo = nodeInfo;
logger = Logger.getLogger(LoggingLifecycleListener.class);
}
@Override
public void stateChanged(LifecycleEvent event) {
logger.info("State changed for " + nodeInfo + " to " + event.getState());
}
}
}
| apache-2.0 |
PetroRavlinko/concordion-foundation-extension | src/main/java/com/ravlinko/concordion/extension/foundation/JavaScriptEndBodyEmbedder.java | 845 | package com.ravlinko.concordion.extension.foundation;
import nu.xom.Attribute;
import nu.xom.Document;
import nu.xom.Element;
import org.concordion.api.listener.DocumentParsingListener;
import org.concordion.internal.util.Check;
public class JavaScriptEndBodyEmbedder implements DocumentParsingListener {
private final String javaScript;
JavaScriptEndBodyEmbedder(String javaScript) {
this.javaScript = javaScript;
}
@Override
public void beforeParsing(Document document) {
Element html = document.getRootElement();
Element body = html.getFirstChildElement("body");
Check.notNull(body, "<body> section is missing from document");
Element script = new Element("script");
script.addAttribute(new Attribute("type", "text/javascript") );
script.appendChild(javaScript);
body.insertChild(script, body.getChildCount());
}
}
| apache-2.0 |
jbeecham/ovirt-engine | frontend/webadmin/modules/userportal-gwtp/src/main/java/org/ovirt/engine/ui/userportal/section/main/view/popup/template/TemplateInterfacePopupView.java | 1466 | package org.ovirt.engine.ui.userportal.section.main.view.popup.template;
import org.ovirt.engine.ui.common.idhandler.ElementIdHandler;
import org.ovirt.engine.ui.common.view.popup.AbstractModelBoundWidgetPopupView;
import org.ovirt.engine.ui.common.widget.uicommon.popup.template.TemplateNetworkInterfacePopupWidget;
import org.ovirt.engine.ui.uicommonweb.models.vms.VmInterfaceModel;
import org.ovirt.engine.ui.userportal.ApplicationConstants;
import org.ovirt.engine.ui.userportal.ApplicationResources;
import org.ovirt.engine.ui.userportal.section.main.presenter.popup.template.TemplateInterfacePopupPresenterWidget;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.shared.EventBus;
import com.google.inject.Inject;
public class TemplateInterfacePopupView extends AbstractModelBoundWidgetPopupView<VmInterfaceModel> implements TemplateInterfacePopupPresenterWidget.ViewDef {
interface ViewIdHandler extends ElementIdHandler<TemplateInterfacePopupView> {
ViewIdHandler idHandler = GWT.create(ViewIdHandler.class);
}
@Inject
public TemplateInterfacePopupView(EventBus eventBus, ApplicationResources resources, ApplicationConstants constants) {
super(eventBus,
resources,
new TemplateNetworkInterfacePopupWidget(eventBus, constants),
"400px", //$NON-NLS-1$
"320px"); //$NON-NLS-1$
ViewIdHandler.idHandler.generateAndSetIds(this);
}
}
| apache-2.0 |
andrescabrera/gwt-dojo-toolkit | src/gwt/dojo/core/client/store/ObservableStore.java | 1315 | /*
* Copyright 2012 ArkaSoft LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package gwt.dojo.core.client.store;
import gwt.dojo.core.client.JsObject;
import gwt.dojo.core.client.store.api.Store;
/**
* The observable {@link Store} wrapper takes a store and stes an observable
* method on results that can be used to monitor results for changes.
* <p>
* TODO Example:
*/
public class ObservableStore extends Store {
public static final String MODULE = "dojo/store/Observable";
protected ObservableStore() {}
public static final ObservableStore create(Store store) {
ObservableStore observableStore = JsObject.ref(ObservableStore.MODULE);
return observableStore._create(store);
}
private final native ObservableStore _create(Store store) /*-{
return this(store);
}-*/;
}
| apache-2.0 |
pleacu/jbpm | jbpm-flow/src/main/java/org/jbpm/ruleflow/core/validation/RuleFlowProcessValidator.java | 37813 | /**
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.ruleflow.core.validation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import org.jbpm.process.core.Work;
import org.jbpm.process.core.datatype.DataType;
import org.drools.core.time.impl.CronExpression;
import org.jbpm.process.core.context.exception.CompensationScope;
import org.jbpm.process.core.context.variable.Variable;
import org.jbpm.process.core.event.EventFilter;
import org.jbpm.process.core.event.EventTypeFilter;
import org.jbpm.process.core.timer.DateTimeUtils;
import org.jbpm.process.core.timer.Timer;
import org.jbpm.process.core.validation.ProcessValidationError;
import org.jbpm.process.core.validation.ProcessValidator;
import org.jbpm.process.core.validation.impl.ProcessValidationErrorImpl;
import org.jbpm.ruleflow.core.RuleFlowProcess;
import org.jbpm.workflow.core.WorkflowProcess;
import org.jbpm.workflow.core.impl.DroolsConsequenceAction;
import org.jbpm.workflow.core.impl.NodeImpl;
import org.jbpm.workflow.core.node.ActionNode;
import org.jbpm.workflow.core.node.BoundaryEventNode;
import org.jbpm.workflow.core.node.CatchLinkNode;
import org.jbpm.workflow.core.node.CompositeNode;
import org.jbpm.workflow.core.node.CompositeNode.CompositeNodeEnd;
import org.jbpm.workflow.core.node.CompositeNode.NodeAndType;
import org.jbpm.workflow.core.node.DynamicNode;
import org.jbpm.workflow.core.node.EndNode;
import org.jbpm.workflow.core.node.EventNode;
import org.jbpm.workflow.core.node.EventSubProcessNode;
import org.jbpm.workflow.core.node.FaultNode;
import org.jbpm.workflow.core.node.ForEachNode;
import org.jbpm.workflow.core.node.ForEachNode.ForEachJoinNode;
import org.jbpm.workflow.core.node.ForEachNode.ForEachSplitNode;
import org.jbpm.workflow.core.node.Join;
import org.jbpm.workflow.core.node.MilestoneNode;
import org.jbpm.workflow.core.node.RuleSetNode;
import org.jbpm.workflow.core.node.Split;
import org.jbpm.workflow.core.node.StartNode;
import org.jbpm.workflow.core.node.StateNode;
import org.jbpm.workflow.core.node.SubProcessNode;
import org.jbpm.workflow.core.node.ThrowLinkNode;
import org.jbpm.workflow.core.node.TimerNode;
import org.jbpm.workflow.core.node.WorkItemNode;
import org.kie.api.definition.process.Connection;
import org.kie.api.definition.process.Node;
import org.kie.api.definition.process.NodeContainer;
import org.kie.api.definition.process.Process;
import org.kie.api.io.Resource;
import org.mvel2.ErrorDetail;
import org.mvel2.ParserContext;
import org.mvel2.compiler.ExpressionCompiler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default implementation of a RuleFlow validator.
*
*/
public class RuleFlowProcessValidator implements ProcessValidator {
public static final String ASSOCIATIONS = "BPMN.Associations";
// TODO: make this pluggable
// TODO: extract generic process stuff and generic workflow stuff
private static RuleFlowProcessValidator instance;
private static final Logger logger = LoggerFactory.getLogger(RuleFlowProcessValidator.class);
private RuleFlowProcessValidator() {
}
public static RuleFlowProcessValidator getInstance() {
if ( instance == null ) {
instance = new RuleFlowProcessValidator();
}
return instance;
}
public ProcessValidationError[] validateProcess(final RuleFlowProcess process) {
final List<ProcessValidationError> errors = new ArrayList<ProcessValidationError>();
if (process.getName() == null) {
errors.add(new ProcessValidationErrorImpl(process,
"Process has no name."));
}
if (process.getId() == null || "".equals(process.getId())) {
errors.add(new ProcessValidationErrorImpl(process,
"Process has no id."));
}
// check start node of process
if ( process.getStartNodes().isEmpty() && !process.isDynamic()) {
errors.add(new ProcessValidationErrorImpl(process,
"Process has no start node."));
}
// Check end node of the process.
if (process.getEndNodes().isEmpty() && !process.isDynamic()) {
errors.add(new ProcessValidationErrorImpl(process,
"Process has no end node."));
}
validateNodes(process.getNodes(), errors, process);
validateVariables(errors, process);
checkAllNodesConnectedToStart(process, process.isDynamic(), errors, process);
return errors.toArray(new ProcessValidationError[errors.size()]);
}
private void validateNodes(Node[] nodes, List<ProcessValidationError> errors, RuleFlowProcess process) {
String isForCompensation = "isForCompensation";
for ( int i = 0; i < nodes.length; i++ ) {
final Node node = nodes[i];
if (node instanceof StartNode) {
final StartNode startNode = (StartNode) node;
if (startNode.getTo() == null) {
addErrorMessage(process, node, errors, "Start has no outgoing connection.");
}
if (startNode.getTimer() != null) {
validateTimer(startNode.getTimer(), node, process, errors);
}
} else if (node instanceof EndNode) {
final EndNode endNode = (EndNode) node;
if (endNode.getFrom() == null) {
addErrorMessage(process, node, errors, "End has no incoming connection.");
}
validateCompensationIntermediateOrEndEvent(endNode, process, errors);
} else if (node instanceof RuleSetNode) {
final RuleSetNode ruleSetNode = (RuleSetNode) node;
if (ruleSetNode.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "RuleSet has no incoming connection.");
}
if (ruleSetNode.getTo() == null && !acceptsNoOutgoingConnections(node)) {
addErrorMessage(process, node, errors, "RuleSet has no outgoing connection.");
}
final String language = ruleSetNode.getLanguage();
if (RuleSetNode.DRL_LANG.equals(language)) {
final String ruleFlowGroup = ruleSetNode.getRuleFlowGroup();
if (ruleFlowGroup == null || "".equals(ruleFlowGroup)) {
addErrorMessage(process, node, errors, "RuleSet (DRL) has no ruleflow-group.");
}
} else if (RuleSetNode.DMN_LANG.equals(language)) {
final String namespace = ruleSetNode.getNamespace();
if (namespace == null || "".equals(namespace)) {
addErrorMessage(process, node, errors, "RuleSet (DMN) has no namespace.");
}
final String model = ruleSetNode.getModel();
if (model == null || "".equals(model)) {
addErrorMessage(process, node, errors, "RuleSet (DMN) has no model.");
}
} else {
addErrorMessage(process, node, errors, "Unsupported rule language '" + language + "'");
}
if (ruleSetNode.getTimers() != null) {
for (Timer timer: ruleSetNode.getTimers().keySet()) {
validateTimer(timer, node, process, errors);
}
}
} else if (node instanceof Split) {
final Split split = (Split) node;
if (split.getType() == Split.TYPE_UNDEFINED) {
addErrorMessage(process, node, errors, "Split has no type.");
}
if (split.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Split has no incoming connection.");
}
if (split.getDefaultOutgoingConnections().size() < 2) {
addErrorMessage(process, node, errors, "Split does not have more than one outgoing connection: " + split.getOutgoingConnections().size() + ".");
}
if (split.getType() == Split.TYPE_XOR || split.getType() == Split.TYPE_OR ) {
for ( final Iterator<Connection> it = split.getDefaultOutgoingConnections().iterator(); it.hasNext(); ) {
final Connection connection = it.next();
if (split.getConstraint(connection) == null && !split.isDefault(connection)
|| (!split.isDefault(connection)
&& (split.getConstraint(connection).getConstraint() == null
|| split.getConstraint(connection).getConstraint().trim().length() == 0))) {
addErrorMessage(process, node, errors, "Split does not have a constraint for " + connection.toString() + ".");
}
}
}
} else if (node instanceof Join) {
final Join join = (Join) node;
if (join.getType() == Join.TYPE_UNDEFINED) {
addErrorMessage(process, node, errors, "Join has no type.");
}
if (join.getDefaultIncomingConnections().size() < 2) {
addErrorMessage(process, node, errors, "Join does not have more than one incoming connection: " + join.getIncomingConnections().size() + ".");
}
if (join.getTo() == null && !acceptsNoOutgoingConnections(node)) {
addErrorMessage(process, node, errors, "Join has no outgoing connection.");
}
if (join.getType() == Join.TYPE_N_OF_M) {
String n = join.getN();
if (!n.startsWith("#{") || !n.endsWith("}")) {
try {
new Integer(n);
} catch (NumberFormatException e) {
addErrorMessage(process, node, errors, "Join has illegal n value: " + n);
}
}
}
} else if (node instanceof MilestoneNode) {
final MilestoneNode milestone = (MilestoneNode) node;
if (milestone.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Milestone has no incoming connection.");
}
if (milestone.getTo() == null && !acceptsNoOutgoingConnections(node)) {
addErrorMessage(process, node, errors, "Milestone has no outgoing connection.");
}
if (milestone.getConstraint() == null) {
addErrorMessage(process, node, errors, "Milestone has no constraint.");
}
if (milestone.getTimers() != null) {
for (Timer timer: milestone.getTimers().keySet()) {
validateTimer(timer, node, process, errors);
}
}
} else if (node instanceof StateNode) {
final StateNode stateNode = (StateNode) node;
if (stateNode.getDefaultIncomingConnections().size() == 0 && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "State has no incoming connection");
}
}
else if (node instanceof SubProcessNode) {
final SubProcessNode subProcess = (SubProcessNode) node;
if (subProcess.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "SubProcess has no incoming connection.");
}
if (subProcess.getTo() == null && !acceptsNoOutgoingConnections(node)) {
Object compensationObj = subProcess.getMetaData(isForCompensation);
if( compensationObj == null || ! ((Boolean) compensationObj) ) {
addErrorMessage(process, node, errors, "SubProcess has no outgoing connection.");
}
}
if (subProcess.getProcessId() == null && subProcess.getProcessName() == null) {
addErrorMessage(process, node, errors, "SubProcess has no process id.");
}
if (subProcess.getTimers() != null) {
for (Timer timer: subProcess.getTimers().keySet()) {
validateTimer(timer, node, process, errors);
}
}
if(!subProcess.isIndependent() && !subProcess.isWaitForCompletion()){
addErrorMessage(process, node, errors, "SubProcess you can only set " +
"independent to 'false' only when 'Wait for completion' is set to true.");
}
} else if (node instanceof ActionNode) {
final ActionNode actionNode = (ActionNode) node;
if (actionNode.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Action has no incoming connection.");
}
if (actionNode.getTo() == null && !acceptsNoOutgoingConnections(node)) {
Object compensationObj = actionNode.getMetaData(isForCompensation);
if( compensationObj == null || ! ((Boolean) compensationObj) ) {
addErrorMessage(process, node, errors, "Action has no outgoing connection.");
}
}
if (actionNode.getAction() == null) {
addErrorMessage(process, node, errors, "Action has no action.");
} else if (actionNode.getAction() instanceof DroolsConsequenceAction) {
DroolsConsequenceAction droolsAction = (DroolsConsequenceAction) actionNode.getAction();
String actionString = droolsAction.getConsequence();
if (actionString == null) {
addErrorMessage(process, node, errors, "Action has empty action.");
} else if( "mvel".equals( droolsAction.getDialect() ) ) {
try {
ParserContext parserContext = new ParserContext();
//parserContext.setStrictTypeEnforcement(true);
ExpressionCompiler compiler = new ExpressionCompiler(actionString, parserContext);
compiler.setVerifying(true);
compiler.compile();
List<ErrorDetail> mvelErrors = parserContext.getErrorList();
if (mvelErrors != null) {
for (Iterator<ErrorDetail> iterator = mvelErrors.iterator(); iterator.hasNext(); ) {
ErrorDetail error = iterator.next();
addErrorMessage(process, node, errors, "Action has invalid action: " + error.getMessage() + ".");
}
}
} catch (Throwable t) {
addErrorMessage(process, node, errors, "Action has invalid action: " + t.getMessage() + ".");
}
}
// TODO: validation for "java" and "drools" scripts!
validateCompensationIntermediateOrEndEvent(actionNode, process, errors);
}
} else if (node instanceof WorkItemNode) {
final WorkItemNode workItemNode = (WorkItemNode) node;
if (workItemNode.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Task has no incoming connection.");
}
if (workItemNode.getTo() == null && !acceptsNoOutgoingConnections(node)) {
Object compensationObj = workItemNode.getMetaData(isForCompensation);
if( compensationObj == null || ! ((Boolean) compensationObj) ) {
addErrorMessage(process, node, errors, "Task has no outgoing connection.");
}
}
if (workItemNode.getWork() == null) {
addErrorMessage(process, node, errors, "Task has no work specified.");
} else {
Work work = workItemNode.getWork();
if (work.getName() == null || work.getName().trim().length() == 0) {
addErrorMessage(process, node, errors, "Task has no task type.");
}
}
if (workItemNode.getTimers() != null) {
for (Timer timer: workItemNode.getTimers().keySet()) {
validateTimer(timer, node, process, errors);
}
}
} else if (node instanceof ForEachNode) {
final ForEachNode forEachNode = (ForEachNode) node;
String variableName = forEachNode.getVariableName();
if (variableName == null || "".equals(variableName)) {
addErrorMessage(process, node, errors, "ForEach has no variable name");
}
String collectionExpression = forEachNode.getCollectionExpression();
if (collectionExpression == null || "".equals(collectionExpression)) {
addErrorMessage(process, node, errors, "ForEach has no collection expression");
}
if (forEachNode.getDefaultIncomingConnections().size() == 0 && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "ForEach has no incoming connection");
}
if (forEachNode.getDefaultOutgoingConnections().size() == 0 && !acceptsNoOutgoingConnections(node)) {
addErrorMessage(process, node, errors, "ForEach has no outgoing connection");
}
// TODO: check, if no linked connections, for start and end node(s)
// if (forEachNode.getLinkedIncomingNode(org.drools.workflow.core.Node.CONNECTION_DEFAULT_TYPE) == null) {
// errors.add(new ProcessValidationErrorImpl(process,
// "ForEach node '%s' [%d] has no linked start node"));
// }
// if (forEachNode.getLinkedOutgoingNode(org.drools.workflow.core.Node.CONNECTION_DEFAULT_TYPE) == null) {
// errors.add(new ProcessValidationErrorImpl(process,
// "ForEach node '%s' [%d] has no linked end node"));
// }
validateNodes(forEachNode.getNodes(), errors, process);
} else if (node instanceof DynamicNode) {
final DynamicNode dynamicNode = (DynamicNode) node;
if (dynamicNode.getDefaultIncomingConnections().size() == 0 && !acceptsNoIncomingConnections(dynamicNode)) {
addErrorMessage(process, node, errors, "Dynamic has no incoming connection");
}
if (dynamicNode.getDefaultOutgoingConnections().size() == 0 && !acceptsNoOutgoingConnections(dynamicNode)) {
addErrorMessage(process, node, errors, "Dynamic has no outgoing connection");
}
if ("".equals(dynamicNode.getCompletionExpression()) && !dynamicNode.isAutoComplete()) {
addErrorMessage(process, node, errors, "Dynamic has no completion condition set");
}
validateNodes(dynamicNode.getNodes(), errors, process);
} else if (node instanceof CompositeNode) {
final CompositeNode compositeNode = (CompositeNode) node;
for (Map.Entry<String, NodeAndType> inType: compositeNode.getLinkedIncomingNodes().entrySet()) {
if (compositeNode.getIncomingConnections(inType.getKey()).size() == 0 && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Composite has no incoming connection for type " + inType.getKey());
}
if (inType.getValue().getNode() == null && !acceptsNoOutgoingConnections(node)) {
addErrorMessage(process, node, errors, "Composite has invalid linked incoming node for type " + inType.getKey());
}
}
for (Map.Entry<String, NodeAndType> outType: compositeNode.getLinkedOutgoingNodes().entrySet()) {
if (compositeNode.getOutgoingConnections(outType.getKey()).size() == 0) {
addErrorMessage(process, node, errors, "Composite has no outgoing connection for type " + outType.getKey());
}
if (outType.getValue().getNode() == null) {
addErrorMessage(process, node, errors, "Composite has invalid linked outgoing node for type " + outType.getKey());
}
}
if( compositeNode instanceof EventSubProcessNode ) {
if( compositeNode.getIncomingConnections().size() > 0 ) {
addErrorMessage(process, node, errors, "Event subprocess is not allowed to have any incoming connections." );
}
if( compositeNode.getOutgoingConnections().size() > 0 ) {
addErrorMessage(process, node, errors, "Event subprocess is not allowed to have any outgoing connections." );
}
Node [] eventSubProcessNodes = compositeNode.getNodes();
int startEventCount = 0;
for( int j = 0; j < eventSubProcessNodes.length; ++j ) {
if( eventSubProcessNodes[j] instanceof StartNode ) {
StartNode startNode = (StartNode) eventSubProcessNodes[j];
if( ++startEventCount == 2 ) {
addErrorMessage(process, compositeNode, errors, "Event subprocess is not allowed to have more than one start node.");
}
if( startNode.getTriggers() == null || startNode.getTriggers().isEmpty() ) {
addErrorMessage(process, startNode, errors, "Start in Event SubProcess '" + compositeNode.getName() + "' [" + compositeNode.getId() + "] must contain a trigger (event definition).");
}
}
}
} else {
Boolean isForCompensationObject = (Boolean) compositeNode.getMetaData("isForCompensation");
if( compositeNode.getIncomingConnections().size() == 0 && !Boolean.TRUE.equals(isForCompensationObject)) {
addErrorMessage(process, node, errors, "Embedded subprocess does not have incoming connection.");
}
if( compositeNode.getOutgoingConnections().size() == 0 && !Boolean.TRUE.equals(isForCompensationObject)) {
addErrorMessage(process, node, errors, "Embedded subprocess does not have outgoing connection.");
}
}
if (compositeNode.getTimers() != null) {
for (Timer timer: compositeNode.getTimers().keySet()) {
validateTimer(timer, node, process, errors);
}
}
validateNodes(compositeNode.getNodes(), errors, process);
} else if (node instanceof EventNode) {
final EventNode eventNode = (EventNode) node;
if (eventNode.getEventFilters().size() == 0) {
addErrorMessage(process, node, errors, "Event should specify an event type");
}
if (eventNode.getDefaultOutgoingConnections().size() == 0) {
addErrorMessage(process, node, errors, "Event has no outgoing connection");
} else {
List<EventFilter> eventFilters = eventNode.getEventFilters();
boolean compensationHandler = false;
for( EventFilter eventFilter : eventFilters ) {
if( ((EventTypeFilter) eventFilter).getType().startsWith("Compensation") ) {
compensationHandler = true;
break;
}
}
if( compensationHandler && eventNode instanceof BoundaryEventNode) {
Connection connection = eventNode.getDefaultOutgoingConnections().get(0);
Boolean isAssociation = (Boolean) connection.getMetaData().get("association");
if( isAssociation == null ) {
isAssociation = false;
}
if( ! (eventNode.getDefaultOutgoingConnections().size() == 1 && connection != null && isAssociation) ) {
addErrorMessage(process, node, errors, "Compensation Boundary Event is only allowed to have 1 association to 1 compensation activity.");
}
}
}
} else if (node instanceof FaultNode) {
final FaultNode faultNode = (FaultNode) node;
if (faultNode.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Fault has no incoming connection.");
}
if (faultNode.getFaultName() == null) {
addErrorMessage(process, node, errors, "Fault has no fault name.");
}
} else if (node instanceof TimerNode) {
TimerNode timerNode = (TimerNode) node;
if (timerNode.getFrom() == null && !acceptsNoIncomingConnections(node)) {
addErrorMessage(process, node, errors, "Timer has no incoming connection.");
}
if (timerNode.getTo() == null && !acceptsNoOutgoingConnections(node)) {
addErrorMessage(process, node, errors, "Timer has no outgoing connection.");
}
if (timerNode.getTimer() == null) {
addErrorMessage(process, node, errors, "Timer has no timer specified.");
} else {
validateTimer(timerNode.getTimer(), node, process, errors);
}
} else if (node instanceof CatchLinkNode) {
// catchlink validation here, there also are validations in
// ProcessHandler regarding connection issues
}
else if (node instanceof ThrowLinkNode) {
// throw validation here, there also are validations in
// ProcessHandler regarding connection issues
}
else {
errors.add(new ProcessValidationErrorImpl(process,
"Unknown node type '" + node.getClass().getName() + "'"));
}
}
}
private void checkAllNodesConnectedToStart(final NodeContainer container, boolean isDynamic,
final List<ProcessValidationError> errors, RuleFlowProcess process) {
final Map<Node, Boolean> processNodes = new HashMap<Node, Boolean>();
final Node[] nodes;
if (container instanceof CompositeNode) {
nodes = ((CompositeNode) container).internalGetNodes();
} else {
nodes = container.getNodes();
}
List<Node> eventNodes = new ArrayList<Node>();
List<CompositeNode> compositeNodes = new ArrayList<CompositeNode>();
for (int i = 0; i < nodes.length; i++) {
final Node node = nodes[i];
processNodes.put(node, Boolean.FALSE);
if (node instanceof EventNode) {
eventNodes.add(node);
}
if (node instanceof CompositeNode) {
compositeNodes.add((CompositeNode) node);
}
}
if (isDynamic) {
for (Node node: nodes) {
if (node.getIncomingConnections(NodeImpl.CONNECTION_DEFAULT_TYPE).isEmpty()) {
processNode(node, processNodes);
}
}
} else {
final List<Node> start = RuleFlowProcess.getStartNodes(nodes);
if (start != null) {
for (Node s : start) {
processNode(s, processNodes);
}
}
if (container instanceof CompositeNode) {
for (CompositeNode.NodeAndType nodeAndTypes: ((CompositeNode) container).getLinkedIncomingNodes().values()) {
processNode(nodeAndTypes.getNode(), processNodes);
}
}
}
for (Node eventNode: eventNodes) {
processNode(eventNode, processNodes);
}
for (CompositeNode compositeNode: compositeNodes) {
checkAllNodesConnectedToStart(
compositeNode, compositeNode instanceof DynamicNode, errors, process);
}
for ( final Iterator<Node> it = processNodes.keySet().iterator(); it.hasNext(); ) {
final Node node = it.next();
if (Boolean.FALSE.equals(processNodes.get(node)) && !(node instanceof StartNode) && !(node instanceof EventSubProcessNode)) {
addErrorMessage(process, node, errors, "Has no connection to the start node.");
}
}
}
private void processNode(final Node node, final Map<Node, Boolean> nodes) {
if (!nodes.containsKey(node) && !((node instanceof CompositeNodeEnd) || (node instanceof ForEachSplitNode) || (node instanceof ForEachJoinNode))) {
throw new IllegalStateException("A process node is connected with a node that does not belong to the process: " + node.getName());
}
final Boolean prevValue = (Boolean) nodes.put(node, Boolean.TRUE);
if (prevValue == Boolean.FALSE || prevValue == null) {
for (final Iterator<List<Connection>> it = node.getOutgoingConnections().values().iterator(); it.hasNext(); ) {
final List<Connection> list = it.next();
for (final Iterator<Connection> it2 = list.iterator(); it2.hasNext(); ) {
processNode(it2.next().getTo(), nodes);
}
}
}
}
private boolean acceptsNoIncomingConnections(Node node) {
NodeContainer nodeContainer = node.getNodeContainer();
return nodeContainer instanceof DynamicNode ||
(nodeContainer instanceof WorkflowProcess && ((WorkflowProcess) nodeContainer).isDynamic());
}
private boolean acceptsNoOutgoingConnections(Node node) {
NodeContainer nodeContainer = node.getNodeContainer();
return nodeContainer instanceof DynamicNode ||
(nodeContainer instanceof WorkflowProcess && ((WorkflowProcess) nodeContainer).isDynamic());
}
private void validateTimer(final Timer timer, final Node node,
final RuleFlowProcess process, final List<ProcessValidationError> errors) {
if (timer.getDelay() == null && timer.getDate() == null) {
addErrorMessage(process, node, errors, "Has timer with no delay or date specified.");
} else {
if (timer.getDelay() != null && !timer.getDelay().contains("#{")) {
try {
switch (timer.getTimeType()) {
case Timer.TIME_CYCLE:
if (CronExpression.isValidExpression(timer.getDelay())){
} else {
// when using ISO date/time period is not set
DateTimeUtils.parseRepeatableDateTime(timer.getDelay());
}
break;
case Timer.TIME_DURATION:
DateTimeUtils.parseDuration(timer.getDelay());
break;
case Timer.TIME_DATE:
DateTimeUtils.parseDateAsDuration(timer.getDate());
break;
default:
break;
}
} catch (RuntimeException e) {
addErrorMessage(process, node, errors, "Could not parse delay '" + timer.getDelay() + "': " + e.getMessage());
}
}
}
if (timer.getPeriod() != null) {
if (!timer.getPeriod().contains("#{")) {
try {
if (CronExpression.isValidExpression(timer.getPeriod())){
} else {
// when using ISO date/time period is not set
DateTimeUtils.parseRepeatableDateTime(timer.getPeriod());
}
} catch (RuntimeException e) {
addErrorMessage(process, node, errors, "Could not parse period '" + timer.getPeriod() + "': " + e.getMessage());
}
}
}
if (timer.getDate() != null) {
if (!timer.getDate().contains("#{")) {
try {
DateTimeUtils.parseDateAsDuration(timer.getDate());
} catch (RuntimeException e) {
addErrorMessage(process, node, errors, "Could not parse date '" + timer.getDate() + "': " + e.getMessage());
}
}
}
}
public ProcessValidationError[] validateProcess(Process process) {
if (!(process instanceof RuleFlowProcess)) {
throw new IllegalArgumentException(
"This validator can only validate ruleflow processes!");
}
return validateProcess((RuleFlowProcess) process);
}
private void validateVariables(List<ProcessValidationError> errors, RuleFlowProcess process) {
List<Variable> variables = process.getVariableScope().getVariables();
if (variables != null) {
for (Variable var : variables) {
DataType varDataType = var.getType();
if (varDataType == null) {
errors.add(new ProcessValidationErrorImpl(process, "Variable '" + var.getName() + "' has no type."));
}
}
}
}
@Override
public boolean accept(Process process, Resource resource) {
if (RuleFlowProcess.RULEFLOW_TYPE.equals(process.getType())) {
return true;
}
return false;
}
protected void validateCompensationIntermediateOrEndEvent(Node node, RuleFlowProcess process, List<ProcessValidationError> errors) {
if( node.getMetaData().containsKey("Compensation") ) {
// Validate that activityRef in throw/end compensation event refers to "visible" compensation
String activityRef = (String) node.getMetaData().get("Compensation");
Node refNode = null;
if( activityRef != null ) {
Queue<Node> nodeQueue = new LinkedList<Node>();
nodeQueue.addAll(Arrays.asList(process.getNodes()));
while( ! nodeQueue.isEmpty() ) {
Node polledNode = nodeQueue.poll();
if( activityRef.equals(polledNode.getMetaData().get("UniqueId")) ) {
refNode = polledNode;
break;
}
if( node instanceof NodeContainer ) {
nodeQueue.addAll(Arrays.asList(((NodeContainer) node).getNodes()));
}
}
}
if( refNode == null ) {
addErrorMessage(process, node, errors,
"Does not reference an activity that exists (" + activityRef
+ ") in its compensation event definition.");
}
CompensationScope compensationScope
= (CompensationScope) ((NodeImpl) node).resolveContext(CompensationScope.COMPENSATION_SCOPE, activityRef);
if( compensationScope == null ) {
addErrorMessage(process, node, errors,
"References an activity (" + activityRef
+ ") in its compensation event definition that is not visible to it.");
}
}
}
@Override
public boolean compilationSupported() {
return true;
}
protected void addErrorMessage(RuleFlowProcess process, Node node, List<ProcessValidationError> errors, String message) {
String error = String.format("Node '%s' [%d] " + message, node.getName(), node.getId());
errors.add(new ProcessValidationErrorImpl(process, error));
}
}
| apache-2.0 |
apache/openejb | itests/openejb-itests-client/src/main/java/org/apache/openejb/test/stateless/StatelessPojoContextLookupTests.java | 7312 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.test.stateless;
import org.apache.openejb.test.TestManager;
import org.apache.openejb.test.TestFailureException;
/**
* [4] Should be run as the fourth test suite of the EncStatelessTestClients
*
* @author <a href="mailto:david.blevins@visi.com">David Blevins</a>
* @author <a href="mailto:Richard@Monson-Haefel.com">Richard Monson-Haefel</a>
*/
public class StatelessPojoContextLookupTests extends StatelessTestClient{
protected EncStatelessHome ejbHome;
protected EncStatelessObject ejbObject;
public StatelessPojoContextLookupTests(){
super("JNDI_ENC.");
}
protected void setUp() throws Exception{
super.setUp();
Object obj = initialContext.lookup("client/tests/stateless/ContextLookupStatelessPojoBean");
ejbHome = (EncStatelessHome)javax.rmi.PortableRemoteObject.narrow( obj, EncStatelessHome.class);
ejbObject = ejbHome.create();
/*[2] Create database table */
TestManager.getDatabase().createEntityTable();
}
/**
* Tears down the fixture, for example, close a network connection.
* This method is called after a test is executed.
*/
protected void tearDown() throws Exception {
try {
/*[1] Drop database table */
TestManager.getDatabase().dropEntityTable();
} catch (Exception e){
throw e;
} finally {
super.tearDown();
}
}
public void test01_lookupStringEntry() {
try{
ejbObject.lookupStringEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test02_lookupDoubleEntry() {
try{
ejbObject.lookupDoubleEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test03_lookupLongEntry() {
try{
ejbObject.lookupLongEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test04_lookupFloatEntry() {
try{
ejbObject.lookupFloatEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test05_lookupIntegerEntry() {
try{
ejbObject.lookupIntegerEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test06_lookupShortEntry() {
try{
ejbObject.lookupShortEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test07_lookupBooleanEntry() {
try{
ejbObject.lookupBooleanEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test08_lookupByteEntry() {
try{
ejbObject.lookupByteEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test09_lookupCharacterEntry() {
try{
ejbObject.lookupCharacterEntry();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test10_lookupEntityBean() {
try{
ejbObject.lookupEntityBean();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test11_lookupStatefulBean() {
try{
ejbObject.lookupStatefulBean();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test12_lookupStatelessBean() {
try{
ejbObject.lookupStatelessBean();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test13_lookupResource() {
try{
ejbObject.lookupResource();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test14_lookupPersistenceUnit() {
try{
ejbObject.lookupPersistenceUnit();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test15_lookupPersistenceContext() {
try{
ejbObject.lookupPersistenceContext();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test18_lookupSessionContext() {
try{
ejbObject.lookupSessionContext();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
public void test23_lookupJMSConnectionFactory() {
try{
ejbObject.lookupJMSConnectionFactory();
} catch (TestFailureException e){
throw e.error;
} catch (Exception e){
fail("Received Exception "+e.getClass()+ " : "+e.getMessage());
}
}
}
| apache-2.0 |
leleuj/pac4j | pac4j-http/src/main/java/org/pac4j/http/client/FormClient.java | 7154 | /*
Copyright 2012 - 2014 Jerome Leleu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pac4j.http.client;
import org.pac4j.core.client.BaseClient;
import org.pac4j.core.client.Mechanism;
import org.pac4j.core.client.RedirectAction;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.exception.RequiresHttpAction;
import org.pac4j.core.exception.TechnicalException;
import org.pac4j.core.util.CommonHelper;
import org.pac4j.http.credentials.UsernamePasswordAuthenticator;
import org.pac4j.http.credentials.UsernamePasswordCredentials;
import org.pac4j.http.profile.HttpProfile;
import org.pac4j.http.profile.ProfileCreator;
/**
* This class is the client to authenticate users through HTTP form.
* <p />
* The login url of the form must be defined through the {@link #setLoginUrl(String)} method. For authentication, the user is redirected to
* this login form. The username and password inputs must be posted on the callback url. Their names can be defined by using the
* {@link #setUsernameParameter(String)} and {@link #setPasswordParameter(String)} methods.
* <p />
* It returns a {@link org.pac4j.http.profile.HttpProfile}.
*
* @see org.pac4j.http.profile.HttpProfile
* @author Jerome Leleu
* @since 1.4.0
*/
public class FormClient extends BaseHttpClient {
private String loginUrl;
public final static String ERROR_PARAMETER = "error";
public final static String MISSING_FIELD_ERROR = "missing_field";
public final static String DEFAULT_USERNAME_PARAMETER = "username";
private String usernameParameter = DEFAULT_USERNAME_PARAMETER;
public final static String DEFAULT_PASSWORD_PARAMETER = "password";
private String passwordParameter = DEFAULT_PASSWORD_PARAMETER;
public FormClient() {
}
public FormClient(final String loginUrl, final UsernamePasswordAuthenticator usernamePasswordAuthenticator) {
setLoginUrl(loginUrl);
setUsernamePasswordAuthenticator(usernamePasswordAuthenticator);
}
public FormClient(final String loginUrl, final UsernamePasswordAuthenticator usernamePasswordAuthenticator,
final ProfileCreator profileCreator) {
this(loginUrl, usernamePasswordAuthenticator);
setProfileCreator(profileCreator);
}
@Override
protected BaseClient<UsernamePasswordCredentials, HttpProfile> newClient() {
final FormClient newClient = new FormClient();
newClient.setLoginUrl(this.loginUrl);
newClient.setUsernameParameter(this.usernameParameter);
newClient.setPasswordParameter(this.passwordParameter);
return newClient;
}
@Override
protected void internalInit() {
super.internalInit();
CommonHelper.assertNotBlank("loginUrl", this.loginUrl);
}
@Override
protected RedirectAction retrieveRedirectAction(final WebContext context) {
return RedirectAction.redirect(this.loginUrl);
}
@Override
protected UsernamePasswordCredentials retrieveCredentials(final WebContext context) throws RequiresHttpAction {
final String username = context.getRequestParameter(this.usernameParameter);
final String password = context.getRequestParameter(this.passwordParameter);
if (CommonHelper.isNotBlank(username) && CommonHelper.isNotBlank(password)) {
final UsernamePasswordCredentials credentials = new UsernamePasswordCredentials(username, password,
getName());
logger.debug("usernamePasswordCredentials : {}", credentials);
try {
// validate credentials
this.usernamePasswordAuthenticator.validate(credentials);
} catch (final TechnicalException e) {
String redirectionUrl = CommonHelper.addParameter(this.loginUrl, this.usernameParameter, username);
String errorMessage = computeErrorMessage(e);
redirectionUrl = CommonHelper.addParameter(redirectionUrl, ERROR_PARAMETER, errorMessage);
logger.debug("redirectionUrl : {}", redirectionUrl);
final String message = "Credentials validation fails -> return to the form with error";
logger.debug(message);
throw RequiresHttpAction.redirect(message, context, redirectionUrl);
}
return credentials;
}
String redirectionUrl = CommonHelper.addParameter(this.loginUrl, this.usernameParameter, username);
redirectionUrl = CommonHelper.addParameter(redirectionUrl, ERROR_PARAMETER, MISSING_FIELD_ERROR);
logger.debug("redirectionUrl : {}", redirectionUrl);
final String message = "Username and password cannot be blank -> return to the form with error";
logger.debug(message);
throw RequiresHttpAction.redirect(message, context, redirectionUrl);
}
/**
* Return the error message depending on the thrown exception. Can be overriden for other message computation.
*
* @param e
* @return the error message
*/
protected String computeErrorMessage(final TechnicalException e) {
return e.getClass().getSimpleName();
}
public String getLoginUrl() {
return this.loginUrl;
}
public void setLoginUrl(final String loginUrl) {
this.loginUrl = loginUrl;
}
public String getUsernameParameter() {
return this.usernameParameter;
}
public void setUsernameParameter(final String usernameParameter) {
this.usernameParameter = usernameParameter;
}
public String getPasswordParameter() {
return this.passwordParameter;
}
public void setPasswordParameter(final String passwordParameter) {
this.passwordParameter = passwordParameter;
}
@Override
public String toString() {
return CommonHelper.toString(this.getClass(), "callbackUrl", this.callbackUrl, "name", getName(), "loginUrl",
this.loginUrl, "usernameParameter", this.usernameParameter, "passwordParameter",
this.passwordParameter, "usernamePasswordAuthenticator",
getUsernamePasswordAuthenticator(), "profileCreator", getProfileCreator());
}
@Override
protected boolean isDirectRedirection() {
return true;
}
@Override
public Mechanism getMechanism() {
return Mechanism.FORM_MECHANISM;
}
}
| apache-2.0 |
v-drinkup/alpaca | alpaca/alpaca-memqueue/src/main/java/net/vdrinkup/alpaca/memqueue/MemQueueConsumer.java | 437 | package net.vdrinkup.alpaca.memqueue;
public class MemQueueConsumer {
private MemoryQueue queue;
public MemQueueConsumer( String name ) {
this.queue = MemoryQueueManager.getInstance().lookup( name );
}
@SuppressWarnings( "unchecked" )
public < T > T consume() {
Object t = null;
try {
t = this.queue.getProcessor().take();
} catch ( Exception e ) {
e.printStackTrace();
}
return ( T ) t;
}
} | apache-2.0 |
wilkinsona/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/liquibase/LiquibaseAutoConfiguration.java | 9056 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.liquibase;
import java.util.function.Supplier;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import liquibase.change.DatabaseChange;
import liquibase.integration.spring.SpringLiquibase;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.AnyNestedCondition;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.data.jpa.EntityManagerFactoryDependsOnPostProcessor;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import org.springframework.boot.autoconfigure.jdbc.JdbcOperationsDependsOnPostProcessor;
import org.springframework.boot.autoconfigure.jdbc.NamedParameterJdbcOperationsDependsOnPostProcessor;
import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration.LiquibaseDataSourceCondition;
import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration.LiquibaseJdbcOperationsDependsOnPostProcessor;
import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration.LiquibaseNamedParameterJdbcOperationsDependsOnPostProcessor;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations;
import org.springframework.orm.jpa.AbstractEntityManagerFactoryBean;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
/**
* {@link EnableAutoConfiguration Auto-configuration} for Liquibase.
*
* @author Marcel Overdijk
* @author Dave Syer
* @author Phillip Webb
* @author Eddú Meléndez
* @author Andy Wilkinson
* @author Dominic Gunn
* @author Dan Zheng
* @author András Deák
* @since 1.1.0
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass({ SpringLiquibase.class, DatabaseChange.class })
@ConditionalOnProperty(prefix = "spring.liquibase", name = "enabled", matchIfMissing = true)
@Conditional(LiquibaseDataSourceCondition.class)
@AutoConfigureAfter({ DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class })
@Import({ LiquibaseJdbcOperationsDependsOnPostProcessor.class,
LiquibaseNamedParameterJdbcOperationsDependsOnPostProcessor.class })
public class LiquibaseAutoConfiguration {
@Bean
public LiquibaseSchemaManagementProvider liquibaseDefaultDdlModeProvider(
ObjectProvider<SpringLiquibase> liquibases) {
return new LiquibaseSchemaManagementProvider(liquibases);
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(SpringLiquibase.class)
@EnableConfigurationProperties({ DataSourceProperties.class, LiquibaseProperties.class })
@Import(LiquibaseEntityManagerFactoryDependsOnPostProcessor.class)
public static class LiquibaseConfiguration {
private final LiquibaseProperties properties;
public LiquibaseConfiguration(LiquibaseProperties properties) {
this.properties = properties;
}
@Bean
public SpringLiquibase liquibase(DataSourceProperties dataSourceProperties,
ObjectProvider<DataSource> dataSource,
@LiquibaseDataSource ObjectProvider<DataSource> liquibaseDataSource) {
SpringLiquibase liquibase = createSpringLiquibase(liquibaseDataSource.getIfAvailable(),
dataSource.getIfUnique(), dataSourceProperties);
liquibase.setChangeLog(this.properties.getChangeLog());
liquibase.setContexts(this.properties.getContexts());
liquibase.setDefaultSchema(this.properties.getDefaultSchema());
liquibase.setLiquibaseSchema(this.properties.getLiquibaseSchema());
liquibase.setLiquibaseTablespace(this.properties.getLiquibaseTablespace());
liquibase.setDatabaseChangeLogTable(this.properties.getDatabaseChangeLogTable());
liquibase.setDatabaseChangeLogLockTable(this.properties.getDatabaseChangeLogLockTable());
liquibase.setDropFirst(this.properties.isDropFirst());
liquibase.setShouldRun(this.properties.isEnabled());
liquibase.setLabels(this.properties.getLabels());
liquibase.setChangeLogParameters(this.properties.getParameters());
liquibase.setRollbackFile(this.properties.getRollbackFile());
liquibase.setTestRollbackOnUpdate(this.properties.isTestRollbackOnUpdate());
return liquibase;
}
private SpringLiquibase createSpringLiquibase(DataSource liquibaseDatasource, DataSource dataSource,
DataSourceProperties dataSourceProperties) {
DataSource liquibaseDataSource = getDataSource(liquibaseDatasource, dataSource);
if (liquibaseDataSource != null) {
SpringLiquibase liquibase = new SpringLiquibase();
liquibase.setDataSource(liquibaseDataSource);
return liquibase;
}
SpringLiquibase liquibase = new DataSourceClosingSpringLiquibase();
liquibase.setDataSource(createNewDataSource(dataSourceProperties));
return liquibase;
}
private DataSource getDataSource(DataSource liquibaseDataSource, DataSource dataSource) {
if (liquibaseDataSource != null) {
return liquibaseDataSource;
}
if (this.properties.getUrl() == null && this.properties.getUser() == null) {
return dataSource;
}
return null;
}
private DataSource createNewDataSource(DataSourceProperties dataSourceProperties) {
String url = getProperty(this.properties::getUrl, dataSourceProperties::determineUrl);
String user = getProperty(this.properties::getUser, dataSourceProperties::determineUsername);
String password = getProperty(this.properties::getPassword, dataSourceProperties::determinePassword);
return DataSourceBuilder.create().url(url).username(user).password(password).build();
}
private String getProperty(Supplier<String> property, Supplier<String> defaultValue) {
String value = property.get();
return (value != null) ? value : defaultValue.get();
}
}
/**
* Post processor to ensure that {@link EntityManagerFactory} beans depend on the
* liquibase bean.
*/
@ConditionalOnClass(LocalContainerEntityManagerFactoryBean.class)
@ConditionalOnBean(AbstractEntityManagerFactoryBean.class)
static class LiquibaseEntityManagerFactoryDependsOnPostProcessor
extends EntityManagerFactoryDependsOnPostProcessor {
LiquibaseEntityManagerFactoryDependsOnPostProcessor() {
super(SpringLiquibase.class);
}
}
/**
* Additional configuration to ensure that {@link JdbcOperations} beans depend on the
* liquibase bean.
*/
@ConditionalOnClass(JdbcOperations.class)
@ConditionalOnBean(JdbcOperations.class)
static class LiquibaseJdbcOperationsDependsOnPostProcessor extends JdbcOperationsDependsOnPostProcessor {
LiquibaseJdbcOperationsDependsOnPostProcessor() {
super(SpringLiquibase.class);
}
}
/**
* Post processor to ensure that {@link NamedParameterJdbcOperations} beans depend on
* the liquibase bean.
*/
@ConditionalOnClass(NamedParameterJdbcOperations.class)
@ConditionalOnBean(NamedParameterJdbcOperations.class)
static class LiquibaseNamedParameterJdbcOperationsDependsOnPostProcessor
extends NamedParameterJdbcOperationsDependsOnPostProcessor {
LiquibaseNamedParameterJdbcOperationsDependsOnPostProcessor() {
super(SpringLiquibase.class);
}
}
static final class LiquibaseDataSourceCondition extends AnyNestedCondition {
LiquibaseDataSourceCondition() {
super(ConfigurationPhase.REGISTER_BEAN);
}
@ConditionalOnBean(DataSource.class)
private static final class DataSourceBeanCondition {
}
@ConditionalOnProperty(prefix = "spring.liquibase", name = "url", matchIfMissing = false)
private static final class LiquibaseUrlCondition {
}
}
}
| apache-2.0 |
RamonQuirino/tatuape | EJB/src/main/java/Service/DAO/Impl/AdministradorDAOImpl.java | 494 | package Service.DAO.Impl;
import Model.AdministradorModel;
import Service.DAO.AdministradorDAO;
import javax.ejb.Stateless;
import java.io.Serializable;
/**
* Created by ramon on 13/03/16.
*/
@Stateless
public class AdministradorDAOImpl extends BaseDAO implements AdministradorDAO, Serializable {
public AdministradorModel getAdministradorById(Integer id){
AdministradorModel entity;
entity = super.find(AdministradorModel.class, id);
return entity;
}
}
| apache-2.0 |
remibergsma/cosmic | cosmic-core/plugins/api/rate-limit/src/main/java/com/cloud/ratelimit/ApiRateLimitServiceImpl.java | 6345 | package com.cloud.ratelimit;
import com.cloud.acl.APIChecker;
import com.cloud.api.command.admin.ratelimit.ResetApiLimitCmd;
import com.cloud.api.command.user.ratelimit.GetApiLimitCmd;
import com.cloud.api.response.ApiLimitResponse;
import com.cloud.configuration.Config;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.RequestLimitException;
import com.cloud.framework.config.dao.ConfigurationDao;
import com.cloud.user.Account;
import com.cloud.user.AccountService;
import com.cloud.user.User;
import com.cloud.utils.component.AdapterBase;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class ApiRateLimitServiceImpl extends AdapterBase implements APIChecker, ApiRateLimitService {
private static final Logger s_logger = LoggerFactory.getLogger(ApiRateLimitServiceImpl.class);
@Inject
AccountService _accountService;
@Inject
ConfigurationDao _configDao;
/**
* True if api rate limiting is enabled
*/
private boolean enabled = false;
/**
* Fixed time duration where api rate limit is set, in seconds
*/
private int timeToLive = 1;
/**
* Max number of api requests during timeToLive duration.
*/
private int maxAllowed = 30;
private LimitStore _store = null;
@Override
public boolean configure(final String name, final Map<String, Object> params) throws ConfigurationException {
super.configure(name, params);
if (_store == null) {
// get global configured duration and max values
final String isEnabled = _configDao.getValue(Config.ApiLimitEnabled.key());
if (isEnabled != null) {
enabled = Boolean.parseBoolean(isEnabled);
}
final String duration = _configDao.getValue(Config.ApiLimitInterval.key());
if (duration != null) {
timeToLive = Integer.parseInt(duration);
}
final String maxReqs = _configDao.getValue(Config.ApiLimitMax.key());
if (maxReqs != null) {
maxAllowed = Integer.parseInt(maxReqs);
}
// create limit store
final EhcacheLimitStore cacheStore = new EhcacheLimitStore();
int maxElements = 10000;
final String cachesize = _configDao.getValue(Config.ApiLimitCacheSize.key());
if (cachesize != null) {
maxElements = Integer.parseInt(cachesize);
}
final CacheManager cm = CacheManager.create();
final Cache cache = new Cache("api-limit-cache", maxElements, false, false, timeToLive, timeToLive);
cm.addCache(cache);
s_logger.info("Limit Cache created with timeToLive=" + timeToLive + ", maxAllowed=" + maxAllowed + ", maxElements=" + maxElements);
cacheStore.setCache(cache);
_store = cacheStore;
}
return true;
}
@Override
public ApiLimitResponse searchApiLimit(final Account caller) {
final ApiLimitResponse response = new ApiLimitResponse();
response.setAccountId(caller.getUuid());
response.setAccountName(caller.getAccountName());
StoreEntry entry = _store.get(caller.getId());
if (entry == null) {
/* Populate the entry, thus unlocking any underlying mutex */
entry = _store.create(caller.getId(), timeToLive);
response.setApiIssued(0);
response.setApiAllowed(maxAllowed);
response.setExpireAfter(timeToLive);
} else {
response.setApiIssued(entry.getCounter());
response.setApiAllowed(maxAllowed - entry.getCounter());
response.setExpireAfter(entry.getExpireDuration());
}
return response;
}
@Override
public boolean resetApiLimit(final Long accountId) {
if (accountId != null) {
_store.create(accountId, timeToLive);
} else {
_store.resetCounters();
}
return true;
}
@Override
public void setTimeToLive(final int timeToLive) {
this.timeToLive = timeToLive;
}
@Override
public void setMaxAllowed(final int max) {
maxAllowed = max;
}
@Override
public void setEnabled(final boolean enabled) {
this.enabled = enabled;
}
@Override
public boolean checkAccess(final User user, final String apiCommandName) throws PermissionDeniedException {
// check if api rate limiting is enabled or not
if (!enabled) {
return true;
}
final Long accountId = user.getAccountId();
final Account account = _accountService.getAccount(accountId);
if (_accountService.isRootAdmin(account.getId())) {
// no API throttling on root admin
return true;
}
StoreEntry entry = _store.get(accountId);
if (entry == null) {
/* Populate the entry, thus unlocking any underlying mutex */
entry = _store.create(accountId, timeToLive);
}
/* Increment the client count and see whether we have hit the maximum allowed clients yet. */
final int current = entry.incrementAndGet();
if (current <= maxAllowed) {
s_logger.trace("account (" + account.getAccountId() + "," + account.getAccountName() + ") has current count = " + current);
return true;
} else {
final long expireAfter = entry.getExpireDuration();
// for this exception, we can just show the same message to user and admin users.
final String msg = "The given user has reached his/her account api limit, please retry after " + expireAfter + " ms.";
s_logger.warn(msg);
throw new RequestLimitException(msg);
}
}
@Override
public List<Class<?>> getCommands() {
final List<Class<?>> cmdList = new ArrayList<>();
cmdList.add(ResetApiLimitCmd.class);
cmdList.add(GetApiLimitCmd.class);
return cmdList;
}
}
| apache-2.0 |
freeVM/freeVM | enhanced/archive/classlib/modules/rmi2/rmi-1.4.2/src/ar/org/fitc/test/rmi/tunneling/testrunner/RemoteCalculatorClientSunHttpCgi.java | 345 | package ar.org.fitc.test.rmi.tunneling.testrunner;
import ar.org.fitc.test.rmi.tunneling.integration.RemoteCalculatorClient;
public class RemoteCalculatorClientSunHttpCgi {
public static void main(String[] args) {
String[] data = {"cgi","90.0.0.1","3128","10.100.2.246","sun"};
RemoteCalculatorClient.main(data);
}
}
| apache-2.0 |
HenningLanghorst/java8-functional | src/main/java/de/henninglanghorst/functional/sql/function/Consumer.java | 336 | package de.henninglanghorst.functional.sql.function;
import java.sql.SQLException;
/**
* Interface for method accepting one Object of type {@link T} which can throw an {@link SQLException}.
* <p/>
* Created by henning on 15.05.16.
*/
@FunctionalInterface
public interface Consumer<T> {
void accept(T t) throws SQLException;
}
| apache-2.0 |
elminsterjimmy/java | Commons/src/main/java/com/elminster/common/exception/CloseException.java | 237 | package com.elminster.common.exception;
public class CloseException extends RuntimeException {
/** */
private static final long serialVersionUID = -5175850852094662459L;
public CloseException(Throwable t) {
super(t);
}
}
| apache-2.0 |
citlab/Intercloud | xmpp-occi/src/main/java/de/tu_berlin/cit/intercloud/occi/monitoring/CollectorLink.java | 2450 | /**
* Copyright 2010-2015 Complex and Distributed IT Systems, TU Berlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tu_berlin.cit.intercloud.occi.monitoring;
import de.tu_berlin.cit.intercloud.occi.core.annotations.Attribute;
import de.tu_berlin.cit.intercloud.occi.core.annotations.Link;
import de.tu_berlin.cit.intercloud.occi.core.annotations.Attribute.AttributeType;
import de.tu_berlin.cit.intercloud.occi.core.annotations.LinkCategory;
/**
* TODO
*
* @author Alexander Stanik <alexander.stanik@tu-berlin.de>
* @author Daniel Thilo Schroeder <daniel.schroeder@mailbox.tu-berlin.de>
*/
@Link(schema = MonitoringSchemas.MonitoringSchema, term = CollectorLink.CollectorTerm,
relation = MonitoringSchemas.MonitoringSchema + MeterKind.MeterTerm)
public class CollectorLink extends LinkCategory {
public final static String CollectorTitle = "Collector Link";
public final static String CollectorTerm = "collector";
public CollectorLink() {
super(CollectorTitle);
}
public CollectorLink(String title) {
super(title);
}
public enum State {
upToDate,
obsolete
}
/**
* The unit of the measurement.
*/
@Attribute(name = "occi.collector.unit",
type = AttributeType.STRING,
mutable = false,
required = false,
description = "The unit of the measurement.")
public String unit = null;
/**
* Current state of the instance: Enum{upToDate, obsolete}
*/
@Attribute(name = "occi.collector.state",
type = AttributeType.ENUM,
mutable = false,
required = true,
description = "Current state of the instance: Enum{upToDate, obsolete}")
public State state = null;
/**
* Human-readable explanation of the current instance state
*/
@Attribute(name = "occi.collector.message",
type = AttributeType.STRING,
mutable = false,
required = false,
description = "Human-readable explanation of the current instance state")
public String message = null;
}
| apache-2.0 |
pellcorp/fop | src/java/org/apache/fop/render/java2d/Base14FontCollection.java | 10256 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: Base14FontCollection.java 1296496 2012-03-02 22:19:46Z gadams $ */
package org.apache.fop.render.java2d;
import org.apache.fop.fonts.Font;
import org.apache.fop.fonts.FontCollection;
import org.apache.fop.fonts.FontInfo;
/**
* A base 14 font collection for graphics 2D
*/
public class Base14FontCollection implements FontCollection {
/** required when creating new instances of SystemFontMetricsMapper */
private final Java2DFontMetrics java2DFontMetrics;
/**
* Main constructor
* @param java2DFontMetrics required when creating new instances of SystemFontMetricsMapper
*/
public Base14FontCollection(Java2DFontMetrics java2DFontMetrics) {
this.java2DFontMetrics = java2DFontMetrics;
}
/**
* {@inheritDoc}
*/
public int setup(int start, FontInfo fontInfo) {
/*
* available java fonts are:
* Serif - bold, normal, italic, bold-italic
* SansSerif - bold, normal, italic, bold-italic
* MonoSpaced - bold, normal, italic, bold-italic
*/
final int normal = java.awt.Font.PLAIN;
final int bold = java.awt.Font.BOLD;
final int italic = java.awt.Font.ITALIC;
final int bolditalic = java.awt.Font.BOLD + java.awt.Font.ITALIC;
FontMetricsMapper metric;
metric = new SystemFontMetricsMapper("SansSerif", normal, java2DFontMetrics);
// --> goes to F1
fontInfo.addMetrics("F1", metric);
metric = new SystemFontMetricsMapper("SansSerif", italic, java2DFontMetrics);
// --> goes to F2
fontInfo.addMetrics("F2", metric);
metric = new SystemFontMetricsMapper("SansSerif", bold, java2DFontMetrics);
// --> goes to F3
fontInfo.addMetrics("F3", metric);
metric = new SystemFontMetricsMapper("SansSerif", bolditalic, java2DFontMetrics);
// --> goes to F4
fontInfo.addMetrics("F4", metric);
metric = new SystemFontMetricsMapper("Serif", normal, java2DFontMetrics);
// --> goes to F5
fontInfo.addMetrics("F5", metric);
metric = new SystemFontMetricsMapper("Serif", italic, java2DFontMetrics);
// --> goes to F6
fontInfo.addMetrics("F6", metric);
metric = new SystemFontMetricsMapper("Serif", bold, java2DFontMetrics);
// --> goes to F7
fontInfo.addMetrics("F7", metric);
metric = new SystemFontMetricsMapper("Serif", bolditalic, java2DFontMetrics);
// --> goes to F8
fontInfo.addMetrics("F8", metric);
metric = new SystemFontMetricsMapper("MonoSpaced", normal, java2DFontMetrics);
// --> goes to F9
fontInfo.addMetrics("F9", metric);
metric = new SystemFontMetricsMapper("MonoSpaced", italic, java2DFontMetrics);
// --> goes to F10
fontInfo.addMetrics("F10", metric);
metric = new SystemFontMetricsMapper("MonoSpaced", bold, java2DFontMetrics);
// --> goes to F11
fontInfo.addMetrics("F11", metric);
metric = new SystemFontMetricsMapper("MonoSpaced", bolditalic, java2DFontMetrics);
// --> goes to F12
fontInfo.addMetrics("F12", metric);
metric = new SystemFontMetricsMapper("Serif", normal, java2DFontMetrics);
//"Symbol" doesn't seem to work here, but "Serif" does the job just fine. *shrug*
// --> goes to F13 and F14
fontInfo.addMetrics("F13", metric);
fontInfo.addMetrics("F14", metric);
// Custom type 1 fonts step 1/2
// fontInfo.addMetrics("F15", new OMEP());
// fontInfo.addMetrics("F16", new GaramondLightCondensed());
// fontInfo.addMetrics("F17", new BauerBodoniBoldItalic());
/* any is treated as serif */
fontInfo.addFontProperties("F5", "any", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "any", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "any", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F7", "any", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "any", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "any", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F1", "sans-serif", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F2", "sans-serif", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F2", "sans-serif", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F3", "sans-serif", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F4", "sans-serif", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F4", "sans-serif", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F5", "serif", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "serif", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "serif", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F7", "serif", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "serif", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "serif", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F9", "monospace", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F10", "monospace", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F10", "monospace", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F11", "monospace", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F12", "monospace", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F12", "monospace", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F1", "Helvetica", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F2", "Helvetica", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F2", "Helvetica", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F3", "Helvetica", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F4", "Helvetica", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F4", "Helvetica", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F5", "Times", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "Times", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "Times", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F7", "Times", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "Times", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "Times", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F9", "Courier", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F10", "Courier", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F10", "Courier", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F11", "Courier", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F12", "Courier", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F12", "Courier", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F13", "Symbol", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F14", "ZapfDingbats", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
// Custom type 1 fonts step 2/2
// fontInfo.addFontProperties("F15", "OMEP", "normal", FontInfo.NORMAL);
// fontInfo.addFontProperties("F16", "Garamond-LightCondensed", "normal", FontInfo.NORMAL);
// fontInfo.addFontProperties("F17", "BauerBodoni", "italic", FontInfo.BOLD);
/* for compatibility with PassiveTex */
fontInfo.addFontProperties("F5", "Times-Roman", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "Times-Roman", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "Times-Roman", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F7", "Times-Roman", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "Times-Roman", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "Times-Roman", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F5", "Times Roman", Font.STYLE_NORMAL, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "Times Roman", Font.STYLE_OBLIQUE, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F6", "Times Roman", Font.STYLE_ITALIC, Font.WEIGHT_NORMAL);
fontInfo.addFontProperties("F7", "Times Roman", Font.STYLE_NORMAL, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "Times Roman", Font.STYLE_OBLIQUE, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F8", "Times Roman", Font.STYLE_ITALIC, Font.WEIGHT_BOLD);
fontInfo.addFontProperties("F9", "Computer-Modern-Typewriter",
"normal", Font.WEIGHT_NORMAL);
return 15;
}
}
| apache-2.0 |
gtkrug/authy_shib | src/main/java/net/gfipm/idp/AuthyTwoFactorLoginServlet.java | 14913 | /*
* Licensed to the University Corporation for Advanced Internet Development,
* Inc. (UCAID) under one or more contributor license agreements. See the
* NOTICE file distributed with this work for additional information regarding
* copyright ownership. The UCAID licenses this file to You under the Apache
* License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Copyright 2014 Georgia Tech Research Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.gfipm.idp;
import com.authy.*;
import com.authy.api.*;
import java.io.IOException;
import java.security.Principal;
import java.util.Set;
import java.util.Iterator;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.LoginException;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.opensaml.saml2.core.AuthnContext;
import org.opensaml.xml.util.DatatypeHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.internet2.middleware.shibboleth.idp.authn.AuthenticationEngine;
import edu.internet2.middleware.shibboleth.idp.authn.AuthenticationException;
import edu.internet2.middleware.shibboleth.idp.authn.LoginHandler;
import edu.internet2.middleware.shibboleth.idp.authn.UsernamePrincipal;
import edu.internet2.middleware.shibboleth.idp.authn.provider.UsernamePasswordCredential;
/**
* This Servlet authenticates a user via JAAS, and then authenticates with
* Authy for a second factor. Based on UsernamePasswordLoginServlet and DuoSecurity.
* The user's credential is always added to the returned {@link Subject} as
* a {@link UsernamePasswordCredential} within the subject's private credentials.
*
* By default, this Servlet assumes that the authentication method {@value AuthnContext#PPT_AUTHN_CTX} to be returned to
* the authentication engine. This can be override by setting the servlet configuration parameter
* {@value LoginHandler#AUTHENTICATION_METHOD_KEY}.
*/
public class AuthyTwoFactorLoginServlet extends HttpServlet {
/** Serial version UID. */
private static final long serialVersionUID = 20140328L;
/** Class logger. */
private final Logger log = LoggerFactory.getLogger(AuthyTwoFactorLoginServlet.class);
/** The authentication method returned to the authentication engine. */
private String authenticationMethod;
/** Name of JAAS configuration used to authenticate users. */
private String jaasConfigName = "ShibUserPassAuth";
/** init-param which can be passed to the servlet to override the default JAAS config. */
private final String jaasInitParam = "jaasConfigName";
/** Login page name. */
private String loginPage = "login.jsp";
/** init-param which can be passed to the servlet to override the default login page. */
private final String loginPageInitParam = "loginPage";
/** Authy authentication page name. */
private String authyPage = "authy.jsp";
/** init-param which can be passed to the servlet to override the default Authy authentication page. */
private final String authyPageInitParam = "authyPage";
/** Parameter name to indicate login failure. */
private final String failureParam = "loginFailed";
/** HTTP request parameter containing the user name. */
private final String usernameAttribute = "j_username";
/** HTTP request parameter containing the user's password. */
private final String passwordAttribute = "j_password";
/** HTTP request parameter containing the user's authy token. */
private final String authyResponseAttribute = "authy_token";
/** Authy API Client */
private AuthyApiClient authyClient;
/** the key in a HttpSession where user subjects are stored. */
public static final String USER_SUBJECT_KEY = "authy.usersubject";
/** keys in a HttpSevletRequest where Authy attributes are stored. */
public static final String API_KEY = "authy.apikey";
public static final String API_HOST = "authy.apihost";
public static final String API_NAME = "authy.name";
public static final String AUTHY_USER = "authy.user";
/** {@inheritDoc} */
public void init(ServletConfig config) throws ServletException {
super.init(config);
if (getInitParameter(jaasInitParam) != null) {
jaasConfigName = getInitParameter(jaasInitParam);
}
if (getInitParameter(loginPageInitParam) != null) {
loginPage = getInitParameter(loginPageInitParam);
}
if (!loginPage.startsWith("/")) {
loginPage = "/" + loginPage;
}
if (getInitParameter(authyPageInitParam) != null) {
authyPage = getInitParameter(authyPageInitParam);
}
if (!authyPage.startsWith("/")) {
authyPage = "/" + authyPage;
}
String method =
DatatypeHelper.safeTrimOrNullString(config.getInitParameter(LoginHandler.AUTHENTICATION_METHOD_KEY));
if (method != null) {
authenticationMethod = method;
} else {
authenticationMethod = AuthnContext.PPT_AUTHN_CTX;
}
}
/** {@inheritDoc} */
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException,
IOException {
String username = request.getParameter(usernameAttribute);
String password = request.getParameter(passwordAttribute);
String token = request.getParameter(authyResponseAttribute);
if ( token != null )
{
log.debug("Found authy token {}, attempting to verify...", token);
// Let's make sure we have an authyId.
String authyId = (String)request.getSession().getAttribute(AUTHY_USER);
if (authyId == null) {
// authyId not found
log.error("No authyId found in the session");
redirectToLoginPage(request, response);
return;
}
AuthyApiClient client = new AuthyApiClient("fde43f39f773e1a686830f4fa26d0b4a");
Tokens tokens = client.getTokens();
Token verification = tokens.verify(1816856, token);
if(verification.isOk()) {
log.debug("Temporarily auto-accepting any 2nd factor token received: {}", token);
} else {
throw new ServletException("Authy token verification error");
}
Subject userSubject = (Subject)request.getSession().getAttribute(USER_SUBJECT_KEY);
request.setAttribute(LoginHandler.SUBJECT_KEY, userSubject);
request.setAttribute(LoginHandler.AUTHENTICATION_METHOD_KEY, authenticationMethod);
request.getSession().removeAttribute(USER_SUBJECT_KEY);
AuthenticationEngine.returnToAuthenticationEngine(request, response);
return;
}
else if ( username == null || password == null) {
log.debug("No username/password found, redirecting to login");
redirectToLoginPage(request, response);
return;
}
else {
// We have a username and password
try {
authenticateUser(request, username, password);
// If this method returns without throwing an exception, the username/password are okay...
log.debug("Username/password verified, redirecting to Authy 2nd factor");
// Using the username, get the users Authy Id
String authyId = "1816856";
// TBD.
// If no AuthyId Exists - Dynamically register the user.
// TBD.
// User user = users.createUser("new_user@email.com", "405-342-5699", "57");
// if(user.isOk())
// Store user.getId() in your database
// authyId = user.getId();
request.getSession().setAttribute(AUTHY_USER, authyId);
redirectToAuthyPage(request, response);
return;
} catch (LoginException e) {
request.setAttribute(failureParam, "true");
request.setAttribute(LoginHandler.AUTHENTICATION_EXCEPTION_KEY, new AuthenticationException(e));
redirectToLoginPage(request, response);
}
}
}
/**
* Sends the user to a page with an actionUrl attribute pointing back.
*
* @param path path to page
* @param request current request
* @param response current response
*/
protected void redirectToPage(String path, HttpServletRequest request, HttpServletResponse response) {
StringBuilder actionUrlBuilder = new StringBuilder();
if(!"".equals(request.getContextPath())){
actionUrlBuilder.append(request.getContextPath());
}
actionUrlBuilder.append(request.getServletPath());
request.setAttribute("actionUrl", actionUrlBuilder.toString());
try {
request.getRequestDispatcher(path).forward(request, response);
log.debug("Redirecting to page {}", path);
} catch (IOException ex) {
log.error("Unable to redirect to page.", ex);
} catch (ServletException ex) {
log.error("Unable to redirect to page.", ex);
}
}
/**
* Sends the user to the login page.
*
* @param request current request
* @param response current response
*/
protected void redirectToLoginPage(HttpServletRequest request, HttpServletResponse response) {
redirectToPage(loginPage, request, response);
}
/**
* Sends the user to the Authy authentication page.
*
* @param request current request
* @param response current response
*/
protected void redirectToAuthyPage(HttpServletRequest request, HttpServletResponse response) {
redirectToPage(authyPage, request, response);
}
/**
* Authenticate a username and password against JAAS. If authentication succeeds the subject is placed in the session.
*
* @param request current authentication request
* @param username the principal name of the user to be authenticated
* @param password the password of the user to be authenticated
*
* @throws LoginException thrown if there is a problem authenticating the user
*/
protected void authenticateUser(HttpServletRequest request, String username, String password) throws LoginException {
try {
log.debug("Attempting to authenticate user {}", username);
SimpleCallbackHandler cbh = new SimpleCallbackHandler(username, password);
javax.security.auth.login.LoginContext jaasLoginCtx = new javax.security.auth.login.LoginContext(
jaasConfigName, cbh);
jaasLoginCtx.login();
log.debug("Successfully authenticated user {}", username);
Subject loginSubject = jaasLoginCtx.getSubject();
Set<Principal> principals = loginSubject.getPrincipals();
principals.add(new UsernamePrincipal(username));
Set<Object> publicCredentials = loginSubject.getPublicCredentials();
Set<Object> privateCredentials = loginSubject.getPrivateCredentials();
privateCredentials.add(new UsernamePasswordCredential(username, password));
Subject userSubject = new Subject(false, principals, publicCredentials, privateCredentials);
request.getSession().setAttribute(USER_SUBJECT_KEY, userSubject);
} catch (LoginException e) {
log.debug("User authentication for " + username + " failed", e);
throw e;
} catch (Throwable e) {
log.debug("User authentication for " + username + " failed", e);
throw new LoginException("unknown authentication error");
}
}
/**
* A callback handler that provides static name and password data to a JAAS loging process.
*
* This handler only supports {@link NameCallback} and {@link PasswordCallback}.
*/
protected class SimpleCallbackHandler implements CallbackHandler {
/** Name of the user. */
private String uname;
/** User's password. */
private String pass;
/**
* Constructor.
*
* @param username The username
* @param password The password
*/
public SimpleCallbackHandler(String username, String password) {
uname = username;
pass = password;
}
/**
* Handle a callback.
*
* @param callbacks The list of callbacks to process.
*
* @throws UnsupportedCallbackException If callbacks has a callback other than {@link NameCallback} or
* {@link PasswordCallback}.
*/
public void handle(final Callback[] callbacks) throws UnsupportedCallbackException {
if (callbacks == null || callbacks.length == 0) {
return;
}
for (Callback cb : callbacks) {
if (cb instanceof NameCallback) {
NameCallback ncb = (NameCallback) cb;
ncb.setName(uname);
} else if (cb instanceof PasswordCallback) {
PasswordCallback pcb = (PasswordCallback) cb;
pcb.setPassword(pass.toCharArray());
}
}
}
}
}
| apache-2.0 |
SadieYuCN/StikkyHeader | core/src/main/java/it/carlom/stikkyheader/core/animator/AnimatorBuilder.java | 12591 | package it.carlom.stikkyheader.core.animator;
import android.graphics.Point;
import android.graphics.Rect;
import android.view.View;
import android.view.animation.Interpolator;
import com.nineoldandroids.view.ViewHelper;
import java.util.HashSet;
import java.util.Set;
public class AnimatorBuilder {
public static final float DEFAULT_VELOCITY_ANIMATOR = 0.5f;
private Set<AnimatorBundle> mSetAnimatorBundles;
public AnimatorBuilder() {
mSetAnimatorBundles = new HashSet<>(2);
}
private float mLastTranslationApplied = Float.NaN;
public static AnimatorBuilder create() {
return new AnimatorBuilder();
}
public AnimatorBuilder applyScale(View viewToScale, Rect finalRect) {
return applyScale(viewToScale, finalRect, null);
}
public AnimatorBuilder applyScale(View viewToScale, Rect finalRect, Interpolator interpolator) {
if (viewToScale == null) {
throw new IllegalArgumentException("You passed a null view");
}
Rect from = buildViewRect(viewToScale);
float scaleX = calculateScaleX(from, finalRect);
float scaleY = calculateScaleY(from, finalRect);
return applyScale(viewToScale, scaleX, scaleY, interpolator);
}
public AnimatorBuilder applyScale(View viewToScale, float scaleX, float scaleY) {
return applyScale(viewToScale, scaleX, scaleY, null);
}
public AnimatorBuilder applyScale(View viewToScale, float scaleX, float scaleY, Interpolator interpolator) {
if (viewToScale == null) {
throw new IllegalArgumentException("You passed a null view");
}
boolean hasScaleAnimation = hasAnimation(viewToScale, AnimatorBundle.TypeAnimation.SCALEX, AnimatorBundle.TypeAnimation.SCALEXY);
if (hasScaleAnimation) {
throw new IllegalArgumentException("Scale animation already added");
}
float startScaleX = ViewHelper.getScaleX(viewToScale);
float startScaleY = ViewHelper.getScaleY(viewToScale);
if (scaleX == scaleY && startScaleX == startScaleY) {
AnimatorBundle animatorScaleXY = AnimatorBundle.create(AnimatorBundle.TypeAnimation.SCALEXY, viewToScale, interpolator, startScaleX, scaleX);
addAnimator(animatorScaleXY);
} else {
AnimatorBundle animatorScaleX = AnimatorBundle.create(AnimatorBundle.TypeAnimation.SCALEX, viewToScale, interpolator, startScaleX, scaleX);
AnimatorBundle animatorScaleY = AnimatorBundle.create(AnimatorBundle.TypeAnimation.SCALEY, viewToScale, interpolator, startScaleY, scaleY);
addAnimator(animatorScaleX, animatorScaleY);
}
adjustTranslation(viewToScale);
return this;
}
public AnimatorBuilder applyTranslation(View viewToTranslate, Point finalPoint) {
return applyTranslation(viewToTranslate, finalPoint, null);
}
/**
* Translate the top-left point of the view to finalPoint
*/
public AnimatorBuilder applyTranslation(View viewToTranslate, Point finalPoint, Interpolator interpolator) {
if (viewToTranslate == null) {
throw new IllegalArgumentException("You passed a null view");
}
final Point from = buildPointView(viewToTranslate);
float translationX = calculateTranslationX(from, finalPoint);
float translationY = calculateTranslationY(from, finalPoint);
return applyTranslation(viewToTranslate, translationX, translationY, interpolator);
}
public AnimatorBuilder applyTranslation(View viewToTranslate, float translateX, float translateY) {
return applyTranslation(viewToTranslate, translateX, translateY, null);
}
public AnimatorBuilder applyTranslation(View viewToTranslate, float translateX, float translateY, Interpolator interpolator) {
if (viewToTranslate == null) {
throw new IllegalArgumentException("You passed a null view");
}
float startTranslationX = ViewHelper.getTranslationX(viewToTranslate);
float startTranslationY = ViewHelper.getTranslationY(viewToTranslate);
AnimatorBundle animatorTranslationX = AnimatorBundle.create(AnimatorBundle.TypeAnimation.TRANSLATIONX, viewToTranslate, interpolator, startTranslationX, translateX);
AnimatorBundle animatorTranslationY = AnimatorBundle.create(AnimatorBundle.TypeAnimation.TRANSLATIONY, viewToTranslate, interpolator, startTranslationY, translateY);
addAnimator(animatorTranslationX, animatorTranslationY);
adjustTranslation(viewToTranslate);
return this;
}
public AnimatorBuilder applyFade(View viewToFade, float fade) {
return applyFade(viewToFade, fade, null);
}
public AnimatorBuilder applyFade(View viewToFade, float fade, Interpolator interpolator) {
if (viewToFade == null) {
throw new IllegalArgumentException("You passed a null view");
}
float startAlpha = ViewHelper.getAlpha(viewToFade);
addAnimator(AnimatorBundle.create(AnimatorBundle.TypeAnimation.FADE, viewToFade, interpolator, startAlpha, fade));
return this;
}
public AnimatorBuilder applyVerticalParallax(View viewToParallax) {
return applyVerticalParallax(viewToParallax, DEFAULT_VELOCITY_ANIMATOR);
}
/**
* @param viewToParallax
* @param velocityParallax the velocity to apply to the view in order to show the parallax effect. choose a velocity between 0 and 1 for better results
* @return
*/
public AnimatorBuilder applyVerticalParallax(View viewToParallax, float velocityParallax) {
if (viewToParallax == null) {
throw new IllegalArgumentException("You passed a null view");
}
addAnimator(AnimatorBundle.create(AnimatorBundle.TypeAnimation.PARALLAX, viewToParallax, null, 0f, -velocityParallax));
return this;
}
private void addAnimator(AnimatorBundle... animators) {
boolean added = true;
for (AnimatorBundle animator : animators) {
added &= mSetAnimatorBundles.add(animator);
}
if (!added) {
throw new IllegalArgumentException("Animation already added to this view");
}
}
/**
* called after a new scale or translation animation has been added
*/
private void adjustTranslation(View viewAnimated) {
AnimatorBundle animatorScaleX = null;
AnimatorBundle animatorScaleY = null;
AnimatorBundle animatorTranslationX = null;
AnimatorBundle animatorTranslationY = null;
for (AnimatorBundle animator : mSetAnimatorBundles) {
if (viewAnimated != animator.mView) {
continue;
}
switch (animator.mTypeAnimation) {
case SCALEX:
animatorScaleX = animator;
break;
case SCALEY:
animatorScaleY = animator;
break;
case SCALEXY:
animatorScaleX = animator;
animatorScaleY = animator;
break;
case TRANSLATIONX:
animatorTranslationX = animator;
break;
case TRANSLATIONY:
animatorTranslationY = animator;
break;
}
}
if (animatorTranslationX != null && animatorScaleX != null) {
animatorTranslationX.mDelta = animatorTranslationX.mDelta + (animatorTranslationX.mView.getWidth() * (animatorScaleX.mDelta / 2f));
}
if (animatorTranslationY != null && animatorScaleY != null) {
animatorTranslationY.mDelta = animatorTranslationY.mDelta + (animatorTranslationY.mView.getWidth() * (animatorScaleY.mDelta / 2f));
}
}
protected void animateOnScroll(float boundedRatioTranslationY, float translationY) {
if (mLastTranslationApplied == boundedRatioTranslationY) {
return;
}
mLastTranslationApplied = boundedRatioTranslationY;
for (AnimatorBuilder.AnimatorBundle animatorBundle : mSetAnimatorBundles) {
float interpolatedTranslation = animatorBundle.mInterpolator == null ? boundedRatioTranslationY : animatorBundle.mInterpolator.getInterpolation(boundedRatioTranslationY);
float valueAnimation = animatorBundle.mFromValue + (animatorBundle.mDelta * interpolatedTranslation);
switch (animatorBundle.mTypeAnimation) {
case SCALEX:
ViewHelper.setScaleX(animatorBundle.mView,valueAnimation);
break;
case SCALEY:
ViewHelper.setScaleY(animatorBundle.mView,valueAnimation);
break;
case SCALEXY:
ViewHelper.setScaleX(animatorBundle.mView, valueAnimation);
ViewHelper.setScaleY(animatorBundle.mView, valueAnimation);
break;
case FADE:
//TODO performance issues?
ViewHelper.setAlpha(animatorBundle.mView,valueAnimation);
break;
case TRANSLATIONX:
ViewHelper.setTranslationX(animatorBundle.mView,valueAnimation);
break;
case TRANSLATIONY:
ViewHelper.setTranslationY(animatorBundle.mView,valueAnimation - translationY);
break;
case PARALLAX:
ViewHelper.setTranslationY(animatorBundle.mView, animatorBundle.mDelta * translationY);
break;
}
}
}
public boolean hasAnimatorBundles() {
return mSetAnimatorBundles.size() > 0;
}
public static Rect buildViewRect(View view) {
//TODO get coordinates related to the header
return new Rect(view.getLeft(), view.getTop(), view.getRight(), view.getBottom());
}
public static Point buildPointView(View view) {
return new Point(view.getLeft(), view.getTop());
}
public static float calculateScaleX(Rect from, Rect to) {
return (float) to.width() / (float) from.width();
}
public static float calculateScaleY(Rect from, Rect to) {
return (float) to.height() / (float) from.height();
}
public static float calculateTranslationX(Point from, Point to) {
return to.x - from.x;
}
public static float calculateTranslationY(Point from, Point to) {
return to.y - from.y;
}
private boolean hasAnimation(final View view, AnimatorBundle.TypeAnimation... typeAnimations) {
for (AnimatorBundle animator : mSetAnimatorBundles) {
if (animator.mView == view) {
for (AnimatorBundle.TypeAnimation typeAnimation : typeAnimations) {
if (animator.mTypeAnimation == typeAnimation) {
return true;
}
}
}
}
return false;
}
public static class AnimatorBundle {
public enum TypeAnimation {
SCALEX, SCALEY, SCALEXY, FADE, TRANSLATIONX, TRANSLATIONY, PARALLAX
}
private float mFromValue;
private float mDelta;
private TypeAnimation mTypeAnimation;
private View mView;
private Interpolator mInterpolator;
AnimatorBundle(TypeAnimation typeAnimation) {
mTypeAnimation = typeAnimation;
}
public static AnimatorBundle create(AnimatorBundle.TypeAnimation typeAnimation, View view, Interpolator interpolator, float fromValue, float toValue) {
AnimatorBundle animatorBundle = new AnimatorBundle(typeAnimation);
animatorBundle.mView = view;
animatorBundle.mFromValue = fromValue;
animatorBundle.mDelta = toValue - fromValue;
animatorBundle.mInterpolator = interpolator;
return animatorBundle;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnimatorBundle that = (AnimatorBundle) o;
return mView == that.mView && mTypeAnimation == that.mTypeAnimation;
}
@Override
public int hashCode() {
int result = mTypeAnimation.hashCode();
result = 31 * result + mView.hashCode();
return result;
}
}
}
| apache-2.0 |
paulhoule/centipede | centipede-parser/src/main/java/com/ontology2/centipede/errors/UsageException.java | 575 | package com.ontology2.centipede.errors;
public class UsageException extends RuntimeException {
public UsageException() {
}
public UsageException(String message) {
super(message);
}
public UsageException(String message, Throwable cause) {
super(message, cause);
}
public UsageException(Throwable cause) {
super(cause);
}
public UsageException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
| apache-2.0 |
youlookwhat/CloudReader | app/src/main/java/com/example/jingbin/cloudreader/bean/wanandroid/NaviJsonBean.java | 1585 | package com.example.jingbin.cloudreader.bean.wanandroid;
import java.util.List;
/**
* @author jingbin
* @data 2018/10/8
* @description 导航数据
*/
public class NaviJsonBean {
private int errorCode;
private String errorMsg;
private List<DataBean> data;
public int getErrorCode() {
return errorCode;
}
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
public String getErrorMsg() {
return errorMsg;
}
public void setErrorMsg(String errorMsg) {
this.errorMsg = errorMsg;
}
public List<DataBean> getData() {
return data;
}
public void setData(List<DataBean> data) {
this.data = data;
}
public static class DataBean {
private boolean selected;
private int cid;
private String name;
private List<ArticlesBean> articles;
public boolean isSelected() {
return selected;
}
public void setSelected(boolean selected) {
this.selected = selected;
}
public int getCid() {
return cid;
}
public void setCid(int cid) {
this.cid = cid;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<ArticlesBean> getArticles() {
return articles;
}
public void setArticles(List<ArticlesBean> articles) {
this.articles = articles;
}
}
}
| apache-2.0 |
h819/spring-boot | h819-example/src/main/java/org/examples/spring/controller/DownloadFileController.java | 5683 | package org.examples.spring.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.ResourceUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
/**
* 需要去掉 ie 的迅雷自动启动迅雷下载插件,否则会有问题.
* -
* chrome 问题
* 下载窗口不能关闭,否则 response 方式 ,chrome 下有问题。可以让浏览器休眠几秒钟(h819.js 中有相关函数)再关闭,此时 chrome 已经下载完成
* -
* Description : TODO()
* User: h819
* Date: 2017/3/16
* Time: 9:56
* To change this template use File | Settings | File Templates.
*/
@Controller
@RequestMapping("/download")
public class DownloadFileController {
//根据实际需要,设置文件类型
// private final String APPLICATION_NAME = MediaType.APPLICATION_PDF_VALUE;
private final String APPLICATION_NAME = MediaType.APPLICATION_OCTET_STREAM_VALUE;
// private final MediaType mediaType = MediaType.APPLICATION_PDF;
private final MediaType mediaType = MediaType.APPLICATION_OCTET_STREAM; //二进制流,下载文件常用
@Autowired
private ServletContext servletContext; //获得应用的路径用
/**
* Download File via HttpServletResponse
* Directly display the file in the browser(有的浏览器会直接打开,有的会直接下载).
*
* @param fileName 存放在 web app 目录下,被下载的文件名
* @param response
* @throws IOException
*/
@GetMapping(value = "/a", produces = APPLICATION_NAME)
@ResponseBody
public void downloadA(@RequestParam("filename") String fileName, HttpServletResponse response) throws IOException {
File file = getFromWebAppPathFile(fileName);
InputStream in = new FileInputStream(file);
response.setContentType(APPLICATION_NAME);
response.setContentLength((int) file.length());
response.setHeader("Content-Disposition", "attachment; filename=" + file.getName()); //filename 前面必须有空格
FileCopyUtils.copy(in, response.getOutputStream());
}
/**
* Download File via HttpEntity
* Directly display the file in the browser(有的浏览器会直接打开,有的会直接下载).
*
* @param fileName 存放在 web app 目录下,被下载的文件名
* @return
* @throws IOException
*/
@GetMapping(value = "/b", produces = APPLICATION_NAME)
@ResponseBody
public HttpEntity<byte[]> downloadB(@RequestParam("filename") String fileName) throws IOException {
File file = getFromWebAppPathFile(fileName);
byte[] document = FileCopyUtils.copyToByteArray(file);
HttpHeaders header = new HttpHeaders();
header.setContentType(mediaType);
header.set("Content-Disposition", "inline; filename=" + file.getName()); //filename 前面必须有空格
header.setContentLength(document.length);
return new HttpEntity<>(document, header);
}
/**
* Download File via Resource
* class path 内的文件
*
* @param fileName source (或者 jar 文件中),被下载的文件名
* @param response
* @return
* @throws FileNotFoundException
*/
@GetMapping(value = "/c", produces = APPLICATION_NAME) // pdf
@ResponseBody
public void downloadClassPathFile(@RequestParam("filename") String fileName, HttpServletResponse response) throws IOException {
File file = getFromClassPath(fileName);
response.setContentType(APPLICATION_NAME); // excel : MimeTypeUtils.APPLICATION_OCTET_STREAM_VALUE
response.setContentLength((int) file.length());
response.setHeader("Content-Disposition", "attachment; filename=" + file.getName()); //filename 前面必须有空格
response.setHeader("Content-Length", String.valueOf(file.length()));
FileCopyUtils.copy(new FileInputStream(file), response.getOutputStream());
}
/**
* 获得 web app 应用目录下的文件
* 以相对于 Web 应用根目录的方式进行访问
*
* @param fileName
* @return
* @throws FileNotFoundException
*/
private File getFromWebAppPathFile(String fileName) throws FileNotFoundException {
//获取应用内的文件,其他目录的同理
String filePath = servletContext.getRealPath("/") + fileName;
File file = new File(filePath);
if (!file.exists()) {
throw new FileNotFoundException("file with path: " + fileName + " was not found.");
}
return file;
}
/**
* 获取 classpath 中的文件
*
* @param fileName
* @return
*/
public File getFromClassPath(String fileName) throws FileNotFoundException {
return ResourceUtils.getFile("classpath:" + fileName);
}
/**
* 防盗链措施
* 判断 referer 参数,这个方法不安全,容易伪造
*/
private void check(String referer) {
//Check the renderer
if (referer != null && !referer.isEmpty()) {
//do nothing
//or send error
//
}
}
}
| apache-2.0 |
osmmosques/osm-mosques | antique-webapp-borken/src/main/java/com/gurkensalat/osm/mosques/HelloWebXml.java | 411 | package com.gurkensalat.osm.mosques;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
public class HelloWebXml extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(Application.class);
}
}
| apache-2.0 |
ISO20022ArchitectForum/sample-code-public | DLT/Corda/ISO20022Generated/src/iso20022/SchemaType.java | 1632 | /**
*/
package iso20022;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Schema Type</b></em>'.
* <!-- end-user-doc -->
*
* <!-- begin-model-doc -->
* Meta class for represensting XML Schema DataTypes
* <!-- end-model-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link iso20022.SchemaType#getKind <em>Kind</em>}</li>
* </ul>
*
* @see iso20022.Iso20022Package#getSchemaType()
* @model annotation="urn:iso:std:iso:20022:2013:ecore:extension basis='IMPLEMENTATION_ENHANCEMENT' description='Meta class for represensting XML Schema DataTypes'"
* @generated
*/
public interface SchemaType extends DataType {
/**
* Returns the value of the '<em><b>Kind</b></em>' attribute.
* The literals are from the enumeration {@link iso20022.SchemaTypeKind}.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Kind</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Kind</em>' attribute.
* @see iso20022.SchemaTypeKind
* @see #setKind(SchemaTypeKind)
* @see iso20022.Iso20022Package#getSchemaType_Kind()
* @model required="true" ordered="false"
* @generated
*/
SchemaTypeKind getKind();
/**
* Sets the value of the '{@link iso20022.SchemaType#getKind <em>Kind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Kind</em>' attribute.
* @see iso20022.SchemaTypeKind
* @see #getKind()
* @generated
*/
void setKind(SchemaTypeKind value);
} // SchemaType
| apache-2.0 |
aslanbekirov/crate | sql/src/main/java/io/crate/jobs/BulkShardProcessorContext.java | 2220 | /*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.jobs;
import io.crate.executor.transport.ShardRequest;
import org.elasticsearch.action.bulk.BulkShardProcessor;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class BulkShardProcessorContext extends AbstractExecutionSubContext {
private final BulkShardProcessor<? extends ShardRequest> bulkShardProcessor;
public BulkShardProcessorContext(int id, BulkShardProcessor<? extends ShardRequest> bulkShardProcessor) {
super(id);
this.bulkShardProcessor = bulkShardProcessor;
}
@Override
protected void innerStart() {
bulkShardProcessor.close();
}
@Override
protected void innerKill(@Nonnull Throwable t) {
bulkShardProcessor.kill(t);
}
@Override
protected void innerClose(@Nullable Throwable t) {
if (t != null) {
bulkShardProcessor.kill(t);
}
}
public boolean add(String indexName,
ShardRequest.Item item,
@Nullable String routing) {
return bulkShardProcessor.add(indexName, item, routing);
}
@Override
public String name() {
return "bulk-update-by-id";
}
}
| apache-2.0 |
arnost-starosta/midpoint | gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/reports/component/SingleValueChoosePanel.java | 1382 | package com.evolveum.midpoint.web.page.admin.reports.component;
import java.util.List;
import java.util.function.Function;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.util.ListModel;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
public class SingleValueChoosePanel<U, T extends ObjectType> extends ConvertingMultiValueChoosePanel<U,T> {
private static final long serialVersionUID = 1L;
private IModel<U> singleTargetModel;
public SingleValueChoosePanel(String id, List<Class<? extends T>> types,
Function<T, U> transformFunction, IModel<U> targetModel) {
super(id, types, transformFunction, new ListModel<>(), false);
singleTargetModel = targetModel;
}
@Override
protected void choosePerformedHook(AjaxRequestTarget target, List<T> selected) {
super.choosePerformedHook(target, selected);
if(selected != null) {
U transformedSelectedObject = selected.stream()
.findFirst()
.map(this::transform)
.orElse(null);
AuditLogViewerPanel.LOGGER.debug("Setting model object to {}", transformedSelectedObject);
singleTargetModel.setObject(transformedSelectedObject);
}
}
@Override
protected void removePerformedHook(AjaxRequestTarget target, T value) {
super.removePerformedHook(target, value);
singleTargetModel.setObject(null);
}
}
| apache-2.0 |
shashank-neo/orca | src/main/java/com/falcon/orca/commands/ManagerCommand.java | 1116 | /**
* Copyright 2016 Shwet Shashank
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations
* under the License.
*/
package com.falcon.orca.commands;
import com.falcon.orca.enums.ManagerCommandType;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.util.HashMap;
/**
* Created by shwet.s under project orca. <br/> Created on 06/04/16. <br/> Updated on 06/04/16. <br/> Updated by
* shwet.s. <br/>
*/
@Getter
@Setter
public class ManagerCommand extends ActorCommand implements Serializable {
private ManagerCommandType type;
public ManagerCommand() {
super(new HashMap<>());
}
}
| apache-2.0 |
sunguangran/navi | bootstrap/src/main/java/com/youku/java/navi/boot/ANaviMain.java | 4391 | package com.youku.java.navi.boot;
import ch.qos.logback.classic.BasicConfigurator;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.joran.JoranConfigurator;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
@Slf4j
public abstract class ANaviMain {
static {
try {
if (StringUtils.isEmpty(System.getProperty("NAVI_HOME")) && StringUtils.isNotEmpty(System.getenv("NAVI_HOME"))) {
System.setProperty("NAVI_HOME", System.getenv("NAVI_HOME"));
}
if (StringUtils.isEmpty(System.getProperty("PORT")) && StringUtils.isNotEmpty(System.getenv(NaviDefine.PORT))) {
System.setProperty("PORT", System.getenv(NaviDefine.PORT));
}
// 初始化Logback
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
lc.reset();
if (NaviDefine.NAVI_HOME != null) {
JoranConfigurator configurator = new JoranConfigurator();
configurator.setContext(lc);
configurator.doConfigure(NaviDefine.NAVI_LOGBACK_PATH);
} else {
BasicConfigurator.configure(lc);
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 解析服务配置文件
*/
protected Properties parseConfig(String confFile) {
log.info("start parsing config file.");
Properties props = new Properties();
try {
File file = new File(confFile);
BufferedReader br = new BufferedReader(new FileReader(file));
String str;
while ((str = br.readLine()) != null) {
if (str.startsWith("#") || str.length() == 0) {
continue;
}
String[] pairs = str.split("=");
if (pairs.length > 1) {
props.put(pairs[0].trim(), pairs[1].trim());
}
}
br.close();
} catch (IOException e) {
log.error("{}", e.getMessage());
}
return props;
}
/**
* 获得启动类
*/
public abstract String getStartClass(Properties serverConfig);
/**
* 获得配置地址
*/
public abstract String getConfPath();
/**
* 构建启动配置对象
*/
public Properties parseServerConfig(String[] args) {
// 开发模式使用默认配置
if (NaviDefine.NAVI_HOME == null) {
log.warn("NAVI_HOME not defined, will use default config");
Properties serverCfg = new Properties();
serverCfg.setProperty(NaviDefine.PORT, NaviDefine.DEFAULT_PORT);
serverCfg.setProperty(NaviDefine.SERVER, NaviDefine.DEFAULT_SERVER);
serverCfg.setProperty(NaviDefine.CHUNK_AGGR_SIZE, NaviDefine.DEFAULT_CHUNK_SIZE);
return serverCfg;
} else {
return parseConfig(getConfPath());
}
}
protected void doMain(Properties serverConfig) throws Exception {
String mode = serverConfig.getProperty(NaviDefine.MODE);
int statCode;
final INaviServer server;
if (NaviDefine.WORK_MODE.DEV == NaviDefine.WORK_MODE.toEnum(mode)) {
server = (INaviServer) Class.forName(getStartClass(serverConfig), true, Thread.currentThread().getContextClassLoader()).newInstance();
} else {
NaviServerClassloader loader = new NaviServerClassloader(Thread.currentThread().getContextClassLoader());
Thread.currentThread().setContextClassLoader(loader);
server = (INaviServer) Class.forName(getStartClass(serverConfig), true, loader).newInstance();
}
statCode = server.setupServer(serverConfig);
if (statCode == INaviServer.SUCCESS) {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
log.info("navi server detected jvm shutdown, server will exit.");
server.stopServer();
}
});
} else {
System.exit(statCode);
}
}
}
| apache-2.0 |