text
stringlengths
7
1.01M
package org.tests.cache; import io.ebean.BaseTestCase; import io.ebean.CacheMode; import io.ebean.DB; import io.ebean.ExpressionList; import io.ebean.bean.BeanCollection; import io.ebean.cache.ServerCache; import io.ebean.test.LoggedSql; import org.junit.jupiter.api.Test; import org.tests.model.basic.Customer; import org.tests.model.basic.ResetBasicData; import org.tests.model.cache.EColAB; import java.util.List; import java.util.function.Consumer; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.*; public class TestQueryCache extends BaseTestCase { @Test public void clashHashCode() { new EColAB("01", "20").save(); new EColAB("02", "10").save(); List<EColAB> list1 = DB.find(EColAB.class) .setUseQueryCache(true) .where() .eq("columnA", "01") .eq("columnB", "20") .findList(); List<EColAB> list2 = DB.find(EColAB.class) .setUseQueryCache(true) .where() .eq("columnA", "02") .eq("columnB", "10") .findList(); assertThat(list1.get(0).getColumnA()).isEqualTo("01"); assertThat(list1.get(0).getColumnB()).isEqualTo("20"); assertThat(list2.get(0).getColumnA()).isEqualTo("02"); assertThat(list2.get(0).getColumnB()).isEqualTo("10"); } @Test public void findSingleAttribute() { DB.find(EColAB.class).delete(); new EColAB("03", "SingleAttribute").save(); new EColAB("03", "SingleAttribute").save(); List<String> colA_first = DB .find(EColAB.class) .setUseQueryCache(true) .setDistinct(true) .select("columnA") .where() .eq("columnB", "SingleAttribute") .findSingleAttributeList(); List<String> colA_Second = DB .find(EColAB.class) .setUseQueryCache(true) .setDistinct(true) .select("columnA") .where() .eq("columnB", "SingleAttribute") .findSingleAttributeList(); assertThat(colA_Second).isSameAs(colA_first); List<String> colA_NotDistinct = DB .find(EColAB.class) .setUseQueryCache(true) .select("columnA") .where() .eq("columnB", "SingleAttribute") .findSingleAttributeList(); assertThat(colA_Second).isNotSameAs(colA_NotDistinct); // ensure that findCount & findSingleAttribute use different // slots in cache. If not a "Cannot cast List to int" should happen. int count = DB .find(EColAB.class) .setUseQueryCache(true) .select("columnA") .where() .eq("columnB", "SingleAttribute") .findCount(); assertThat(count).isEqualTo(2); } @Test public void findCount() { new EColAB("04", "count").save(); new EColAB("05", "count").save(); LoggedSql.start(); int count0 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "count") .findCount(); int count1 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "count") .findCount(); List<String> sql = LoggedSql.stop(); assertThat(count0).isEqualTo(count1); assertThat(sql).hasSize(1); // and now, ensure that we hit the database LoggedSql.start(); int count2 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.OFF) .where() .eq("columnB", "count") .findCount(); assertThat(count2).isEqualTo(count1); sql = LoggedSql.stop(); assertThat(sql).hasSize(1); } @Test public void findCountDifferentQueries() { LoggedSql.start(); int count0 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "abc") .findCount(); int count1 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "def") .findCount(); List<String> sql = LoggedSql.stop(); assertThat(count0).isEqualTo(count1); assertThat(sql).hasSize(2); // different queries } @Test public void findCountFirstOnThenRecache() { LoggedSql.start(); int count0 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "uvw") .findCount(); int count1 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.PUT) .where() .eq("columnB", "uvw") .findCount(); List<String> sql = LoggedSql.stop(); assertThat(count0).isEqualTo(count1); assertThat(sql).hasSize(2); // try recache as second query - it must fetch it } @Test public void findCountFirstRecacheThenOn() { LoggedSql.start(); int count0 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.PUT) .where() .eq("columnB", "xyz") .findCount(); int count1 = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "xyz") .findCount(); List<String> sql = LoggedSql.stop(); assertThat(count0).isEqualTo(count1); assertThat(sql).hasSize(1); // try recache as first query - second "ON" query must fetch it. } @Test @SuppressWarnings("unchecked") public void testReadOnlyFind() { ResetBasicData.reset(); ServerCache customerCache = DB.cacheManager().queryCache(Customer.class); customerCache.clear(); List<Customer> list = DB.find(Customer.class).setUseQueryCache(true).setReadOnly(true).where() .ilike("name", "Rob").findList(); BeanCollection<Customer> bc = (BeanCollection<Customer>) list; assertTrue(bc.isReadOnly()); assertFalse(bc.isEmpty()); assertTrue(!list.isEmpty()); assertTrue(DB.beanState(list.get(0)).isReadOnly()); List<Customer> list2 = DB.find(Customer.class).setUseQueryCache(true).setReadOnly(true).where() .ilike("name", "Rob").findList(); List<Customer> list2B = DB.find(Customer.class).setUseQueryCache(true) // .setReadOnly(true) .where().ilike("name", "Rob").findList(); assertSame(list, list2); // readOnly defaults to true for query cache assertSame(list, list2B); List<Customer> list3 = DB.find(Customer.class).setUseQueryCache(true).setReadOnly(false).where() .ilike("name", "Rob").findList(); assertNotSame(list, list3); BeanCollection<Customer> bc3 = (BeanCollection<Customer>) list3; assertFalse(bc3.isReadOnly()); assertFalse(bc3.isEmpty()); assertTrue(list3.size() > 0); // TODO: At this stage setReadOnly(false) does create a shallow copy of the List/Set/Map, but does not // change the read only state in the entities. // assertFalse(DB.beanState(list3.get(0)).isReadOnly()); } @Test public void findIds() { DB.find(EColAB.class).delete(); new EColAB("03", "someId").save(); new EColAB("04", "someId").save(); new EColAB("05", "someId").save(); LoggedSql.start(); List<Integer> colA_first = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "someId") .findIds(); List<Integer> colA_second = DB.find(EColAB.class) .setUseQueryCache(CacheMode.ON) .where() .eq("columnB", "someId") .findIds(); List<String> sql = LoggedSql.stop(); assertThat(colA_first).isSameAs(colA_second); assertThat(colA_first).hasSize(3); assertThat(sql).hasSize(1); // and now, ensure that we hit the database LoggedSql.start(); colA_second = DB.find(EColAB.class) .setUseQueryCache(CacheMode.PUT) .where() .eq("columnB", "someId") .findIds(); sql = LoggedSql.stop(); assertThat(sql).hasSize(1); } @Test public void findCountDifferentQueriesBit() { DB.getDefault().pluginApi().cacheManager().clearAll(); differentFindCount(q->q.bitwiseAny("id",1), q->q.bitwiseAny("id",0)); differentFindCount(q->q.bitwiseAll("id",1), q->q.bitwiseAll("id",0)); // differentFindCount(q->q.bitwiseNot("id",1), q->q.bitwiseNot("id",0)); NOT 1 == AND 1 = 0 differentFindCount(q->q.bitwiseAnd("id",1, 0), q->q.bitwiseAnd("id",1, 1)); differentFindCount(q->q.bitwiseAnd("id",2, 0), q->q.bitwiseAnd("id",4, 0)); differentFindCount(q->q.bitwiseAnd("id",2, 1), q->q.bitwiseAnd("id",4, 1)); // Will produce hash collision differentFindCount(q->q.bitwiseAnd("id",10, 0), q->q.bitwiseAnd("id",0, 928210)); } void differentFindCount(Consumer<ExpressionList<EColAB>> q0, Consumer<ExpressionList<EColAB>> q1) { LoggedSql.start(); ExpressionList<EColAB> el0 = DB.find(EColAB.class).setUseQueryCache(CacheMode.ON).where(); q0.accept(el0); el0.findCount(); ExpressionList<EColAB> el1 = DB.find(EColAB.class).setUseQueryCache(CacheMode.ON).where(); q1.accept(el1); el1.findCount(); List<String> sql = LoggedSql.stop(); assertThat(sql).hasSize(2); // different queries } }
package service.main.entity.input_output.user; public class DataTokenFCM { private String token; public String getToken() { return token; } public void setToken(String token) { this.token = token; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.intention.impl; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.CodeInsightUtil; import com.intellij.codeInsight.CodeInsightUtilCore; import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.template.*; import com.intellij.lang.java.JavaLanguage; import com.intellij.lang.surroundWith.SurroundDescriptor; import com.intellij.lang.surroundWith.Surrounder; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.introduceVariable.IntroduceVariableBase; import com.intellij.refactoring.ui.TypeSelectorManagerImpl; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.SmartList; import com.siyeh.ig.psiutils.CommentTracker; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.stream.Stream; public class SurroundAutoCloseableAction extends PsiElementBaseIntentionAction { @Override public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull PsiElement element) { return element.getLanguage().isKindOf(JavaLanguage.INSTANCE) && PsiUtil.getLanguageLevel(element).isAtLeast(LanguageLevel.JDK_1_7) && (findVariable(element) != null || findExpression(element) != null); } @Override public void invoke(@NotNull Project project, Editor editor, @NotNull PsiElement element) throws IncorrectOperationException { PsiLocalVariable variable = findVariable(element); if (variable != null) { processVariable(project, editor, variable); } else { PsiExpression expression = findExpression(element); if (expression != null) { processExpression(project, editor, expression); } } } private static PsiLocalVariable findVariable(PsiElement element) { PsiLocalVariable variable = PsiTreeUtil.getParentOfType(element, PsiLocalVariable.class); if (variable != null && variable.getParent() instanceof PsiDeclarationStatement && variable.getParent().getParent() instanceof PsiCodeBlock && rightType(variable.getType()) && validExpression(variable.getInitializer())) { return variable; } if (variable == null && element instanceof PsiWhiteSpace) { PsiElement sibling = element.getPrevSibling(); if (sibling instanceof PsiDeclarationStatement) { PsiElement lastVar = ArrayUtil.getLastElement(((PsiDeclarationStatement)sibling).getDeclaredElements()); if (lastVar instanceof PsiLocalVariable) { variable = (PsiLocalVariable)lastVar; if (rightType(variable.getType()) && validExpression(variable.getInitializer())) { return variable; } } } } return null; } private static PsiExpression findExpression(PsiElement element) { PsiExpression expression = PsiTreeUtil.getParentOfType(element, PsiExpression.class); if (expression != null && expression.getParent() instanceof PsiExpressionStatement && expression.getParent().getParent() instanceof PsiCodeBlock && validExpression(expression)) { return expression; } if (expression == null && element instanceof PsiWhiteSpace) { PsiElement sibling = element.getPrevSibling(); if (sibling instanceof PsiExpressionStatement) { expression = ((PsiExpressionStatement)sibling).getExpression(); if (validExpression(expression)) { return expression; } } } return null; } private static boolean rightType(PsiType type) { return InheritanceUtil.isInheritor(type, CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE); } private static boolean validExpression(PsiExpression expression) { return expression != null && rightType(expression.getType()) && PsiTreeUtil.findChildOfType(expression, PsiErrorElement.class) == null; } private static void processVariable(Project project, Editor editor, PsiLocalVariable variable) { PsiExpression initializer = ObjectUtils.assertNotNull(variable.getInitializer()); PsiElement declaration = variable.getParent(); PsiElement codeBlock = declaration.getParent(); LocalSearchScope scope = new LocalSearchScope(codeBlock); PsiElement last = null; for (PsiReference reference : ReferencesSearch.search(variable, scope).findAll()) { PsiElement usage = PsiTreeUtil.findPrevParent(codeBlock, reference.getElement()); if ((last == null || usage.getTextOffset() > last.getTextOffset())) { last = usage; } } CommentTracker tracker = new CommentTracker(); String text = "try (" + variable.getTypeElement().getText() + " " + variable.getName() + " = " + tracker.text(initializer) + ") {}"; PsiTryStatement armStatement = (PsiTryStatement)tracker.replaceAndRestoreComments(declaration, text); List<PsiElement> toFormat = null; if (last != null) { toFormat = moveStatements(last, armStatement); } CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(project); PsiElement formattedElement = codeStyleManager.reformat(armStatement); if (toFormat != null) { for (PsiElement psiElement : toFormat) { codeStyleManager.reformat(psiElement); } } if (last == null) { PsiCodeBlock tryBlock = ((PsiTryStatement)formattedElement).getTryBlock(); if (tryBlock != null) { PsiJavaToken brace = tryBlock.getLBrace(); if (brace != null) { editor.getCaretModel().moveToOffset(brace.getTextOffset() + 1); } } } } private static List<PsiElement> moveStatements(PsiElement last, PsiTryStatement statement) { PsiCodeBlock tryBlock = statement.getTryBlock(); assert tryBlock != null : statement.getText(); PsiElement parent = statement.getParent(); LocalSearchScope scope = new LocalSearchScope(parent); List<PsiElement> toFormat = new SmartList<>(); PsiElement stopAt = last.getNextSibling(); PsiElement i = statement.getNextSibling(); while (i != null && i != stopAt) { PsiElement child = i; i = PsiTreeUtil.skipWhitespacesAndCommentsForward(i); if (!(child instanceof PsiDeclarationStatement)) continue; int endOffset = last.getTextRange().getEndOffset(); //declared after last usage if (child.getTextOffset() > endOffset) break; PsiElement anchor = child; PsiElement[] declaredElements = ((PsiDeclarationStatement)child).getDeclaredElements(); for (PsiElement declared : declaredElements) { if (!(declared instanceof PsiLocalVariable)) continue; boolean contained = ReferencesSearch.search(declared, scope).forEach(ref -> ref.getElement().getTextOffset() <= endOffset); if (!contained) { PsiLocalVariable var = (PsiLocalVariable)declared; PsiElementFactory factory = JavaPsiFacade.getElementFactory(statement.getProject()); String name = var.getName(); assert name != null : child.getText(); PsiDeclarationStatement declarationStatement = factory.createVariableDeclarationStatement(name, var.getType(), null); PsiUtil.setModifierProperty((PsiLocalVariable)declarationStatement.getDeclaredElements()[0], PsiModifier.FINAL, var.hasModifierProperty(PsiModifier.FINAL)); toFormat.add(parent.addBefore(declarationStatement, statement)); CommentTracker commentTracker = new CommentTracker(); PsiExpression varInit = var.getInitializer(); if (varInit != null) { String varAssignText = name + " = " + commentTracker.text(varInit) + ";"; anchor = parent.addAfter(factory.createStatementFromText(varAssignText, parent), anchor); } commentTracker.deleteAndRestoreComments(declaredElements.length == 1 ? child : var); } } if (child == last && !child.isValid()) { last = anchor; } } PsiElement first = statement.getNextSibling(); tryBlock.addRangeBefore(first, last, tryBlock.getRBrace()); parent.deleteChildRange(first, last); return toFormat; } private static void processExpression(Project project, Editor editor, PsiExpression expression) { PsiType type = ObjectUtils.assertNotNull(expression.getType()); PsiElement statement = expression.getParent(); CommentTracker commentTracker = new CommentTracker(); String text = "try (" + type.getCanonicalText(true) + " r = " + commentTracker.text(expression) + ") {}"; PsiTryStatement tryStatement = (PsiTryStatement)commentTracker.replaceAndRestoreComments(statement, text); tryStatement = (PsiTryStatement)CodeStyleManager.getInstance(project).reformat(tryStatement); tryStatement = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(tryStatement); PsiResourceList resourceList = tryStatement.getResourceList(); if (resourceList != null) { PsiResourceVariable var = (PsiResourceVariable)resourceList.iterator().next(); PsiIdentifier id = var.getNameIdentifier(); PsiExpression initializer = var.getInitializer(); if (id != null && initializer != null) { type = initializer.getType(); String[] names = IntroduceVariableBase.getSuggestedName(type, initializer).names; PsiType[] types = Stream.of(new TypeSelectorManagerImpl(project, type, initializer, PsiExpression.EMPTY_ARRAY).getTypesForAll()) .filter(SurroundAutoCloseableAction::rightType) .toArray(PsiType[]::new); TemplateBuilder builder = TemplateBuilderFactory.getInstance().createTemplateBuilder(var); builder.replaceElement(id, new NamesExpression(names)); builder.replaceElement(var.getTypeElement(), new TypeExpression(project, types)); builder.run(editor, true); } } } @NotNull @Override public String getFamilyName() { return CodeInsightBundle.message("intention.surround.resource.with.ARM.block"); } @NotNull @Override public String getText() { return getFamilyName(); } private static class NamesExpression extends Expression { private final String[] myNames; public NamesExpression(String[] names) { myNames = names; } @Override public Result calculateResult(ExpressionContext context) { return calculateQuickResult(context); } @Override public Result calculateQuickResult(ExpressionContext context) { return new TextResult(myNames[0]); } @Override public LookupElement[] calculateLookupItems(ExpressionContext context) { return Stream.of(myNames).map(LookupElementBuilder::create).toArray(LookupElement[]::new); } } public static class Template implements SurroundDescriptor, Surrounder { private Surrounder[] mySurrounders = {this}; @NotNull @Override public PsiElement[] getElementsToSurround(PsiFile file, int startOffset, int endOffset) { PsiExpression expr = CodeInsightUtil.findExpressionInRange(file, startOffset, endOffset); if (expr == null) { expr = findExpression(file.findElementAt(endOffset)); } return expr != null && rightType(expr.getType()) ? new PsiElement[]{expr} : PsiElement.EMPTY_ARRAY; } @NotNull @Override public Surrounder[] getSurrounders() { return mySurrounders; } @Override public boolean isExclusive() { return false; } @Override public String getTemplateDescription() { return CodeInsightBundle.message("intention.surround.with.ARM.block.template"); } @Override public boolean isApplicable(@NotNull PsiElement[] elements) { return true; } @Nullable @Override public TextRange surroundElements(@NotNull Project project, @NotNull Editor editor, @NotNull PsiElement[] elements) { if (elements.length == 1 && elements[0] instanceof PsiExpression) { processExpression(project, editor, (PsiExpression)elements[0]); } return null; } } }
package game.objects; public enum GameObject { 가위, 바위, 보; private int[][] comparison = { // 가위 바위 보 /* 가위(0) */ { 0, -1, 1}, /* 바위(1) */ { 1, 0, -1}, /* 보 (2) */ { -1, 1, 0} }; // 상대의 (가위, 바위, 보)와 비교해서 결과 리턴 public int compare(GameObject ob) { return comparison[this.ordinal()][ob.ordinal()]; // enum 끼리 인덱스 번호로 비교 } }
package com.fasterxml.jackson.core.sym; import java.io.IOException; import java.util.HashSet; import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.core.json.JsonFactory; /** * Tests that use symbol table functionality through parser. */ public class SymbolsViaParserTest extends com.fasterxml.jackson.core.BaseTest { // for [jackson-core#213] public void test17CharSymbols() throws Exception { _test17Chars(false); } // for [jackson-core#213] public void test17ByteSymbols() throws Exception { _test17Chars(true); } // for [jackson-core#216] public void testSymbolTableExpansionChars() throws Exception { _testSymbolTableExpansion(false); } // for [jackson-core#216] public void testSymbolTableExpansionBytes() throws Exception { _testSymbolTableExpansion(true); } /* /********************************************************** /* Secondary test methods /********************************************************** */ private void _test17Chars(boolean useBytes) throws IOException { String doc = _createDoc17(); JsonFactory f = new JsonFactory(); JsonParser p = useBytes ? f.createParser(ObjectReadContext.empty(), doc.getBytes("UTF-8")) : f.createParser(ObjectReadContext.empty(), doc); HashSet<String> syms = new HashSet<String>(); assertToken(JsonToken.START_OBJECT, p.nextToken()); for (int i = 0; i < 50; ++i) { assertToken(JsonToken.FIELD_NAME, p.nextToken()); syms.add(p.currentName()); assertToken(JsonToken.VALUE_TRUE, p.nextToken()); } assertToken(JsonToken.END_OBJECT, p.nextToken()); assertEquals(50, syms.size()); p.close(); } private String _createDoc17() { StringBuilder sb = new StringBuilder(1000); sb.append("{\n"); for (int i = 1; i <= 50; ++i) { if (i > 1) { sb.append(",\n"); } sb.append("\"lengthmatters") .append(1000 + i) .append("\": true"); } sb.append("\n}"); return sb.toString(); } public void _testSymbolTableExpansion(boolean useBytes) throws Exception { JsonFactory jsonFactory = new JsonFactory(); // Important: must create separate documents to gradually build up symbol table for (int i = 0; i < 200; i++) { String field = Integer.toString(i); final String doc = "{ \"" + field + "\" : \"test\" }"; JsonParser parser = useBytes ? jsonFactory.createParser(ObjectReadContext.empty(), doc.getBytes("UTF-8")) : jsonFactory.createParser(ObjectReadContext.empty(), doc); assertToken(JsonToken.START_OBJECT, parser.nextToken()); assertToken(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals(field, parser.currentName()); assertToken(JsonToken.VALUE_STRING, parser.nextToken()); assertToken(JsonToken.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); parser.close(); } } }
/** * This code was generated by * \ / _ _ _| _ _ * | (_)\/(_)(_|\/| |(/_ v1.0.0 * / / */ package com.twilio.rest.proxy.v1.service.session; import com.twilio.base.Creator; import com.twilio.exception.ApiConnectionException; import com.twilio.exception.ApiException; import com.twilio.exception.RestException; import com.twilio.http.HttpMethod; import com.twilio.http.Request; import com.twilio.http.Response; import com.twilio.http.TwilioRestClient; import com.twilio.rest.Domains; /** * PLEASE NOTE that this class contains beta products that are subject to * change. Use them with caution. */ public class ParticipantCreator extends Creator<Participant> { private final String pathServiceSid; private final String pathSessionSid; private final String identifier; private String friendlyName; private String proxyIdentifier; private String proxyIdentifierSid; /** * Construct a new ParticipantCreator. * * @param pathServiceSid The SID of the parent Service resource * @param pathSessionSid The SID of the parent Session resource * @param identifier The phone number of the Participant */ public ParticipantCreator(final String pathServiceSid, final String pathSessionSid, final String identifier) { this.pathServiceSid = pathServiceSid; this.pathSessionSid = pathSessionSid; this.identifier = identifier; } /** * The string that you assigned to describe the participant. This value must be * 255 characters or fewer. **This value should not have PII.**. * * @param friendlyName The string that you assigned to describe the participant * @return this */ public ParticipantCreator setFriendlyName(final String friendlyName) { this.friendlyName = friendlyName; return this; } /** * The proxy phone number to use for the Participant. If not specified, Proxy * will select a number from the pool.. * * @param proxyIdentifier The proxy phone number to use for the Participant * @return this */ public ParticipantCreator setProxyIdentifier(final String proxyIdentifier) { this.proxyIdentifier = proxyIdentifier; return this; } /** * The SID of the Proxy Identifier to assign to the Participant.. * * @param proxyIdentifierSid The Proxy Identifier Sid * @return this */ public ParticipantCreator setProxyIdentifierSid(final String proxyIdentifierSid) { this.proxyIdentifierSid = proxyIdentifierSid; return this; } /** * Make the request to the Twilio API to perform the create. * * @param client TwilioRestClient with which to make the request * @return Created Participant */ @Override @SuppressWarnings("checkstyle:linelength") public Participant create(final TwilioRestClient client) { Request request = new Request( HttpMethod.POST, Domains.PROXY.toString(), "/v1/Services/" + this.pathServiceSid + "/Sessions/" + this.pathSessionSid + "/Participants", client.getRegion() ); addPostParams(request); Response response = client.request(request); if (response == null) { throw new ApiConnectionException("Participant creation failed: Unable to connect to server"); } else if (!TwilioRestClient.SUCCESS.apply(response.getStatusCode())) { RestException restException = RestException.fromJson(response.getStream(), client.getObjectMapper()); if (restException == null) { throw new ApiException("Server Error, no content"); } throw new ApiException( restException.getMessage(), restException.getCode(), restException.getMoreInfo(), restException.getStatus(), null ); } return Participant.fromJson(response.getStream(), client.getObjectMapper()); } /** * Add the requested post parameters to the Request. * * @param request Request to add post params to */ private void addPostParams(final Request request) { if (identifier != null) { request.addPostParam("Identifier", identifier); } if (friendlyName != null) { request.addPostParam("FriendlyName", friendlyName); } if (proxyIdentifier != null) { request.addPostParam("ProxyIdentifier", proxyIdentifier); } if (proxyIdentifierSid != null) { request.addPostParam("ProxyIdentifierSid", proxyIdentifierSid); } } }
import java.util.ArrayList; import java.util.List; public class _0089GrayCode { public List<Integer> grayCode(int n) { List<Integer> list = new ArrayList<Integer>(); list.add(0); for (int i = 1; i <= n; i++) { List<Integer> newList = new ArrayList<Integer>(); int bit = 1 << (i - 1); for (int num : list) newList.add(0, bit + num); list.addAll(newList); } return list; } }
package net.atcat.nanzhi.chairs.jsonPack; import javax.annotation.Nullable; import java.util.List; import java.util.function.Consumer; public class PackListObject extends JSONObj { private List<String> packlist ; public PackListObject( List<String> pl ) { this.packlist = pl ; } ; // 遍历 public void forEach ( Consumer<String> callback ) { packlist.forEach( callback ) ; } ; // 取得列表 public List<String> getList ( ) { return this.packlist; } ; // 从路径取得实例 @Nullable public static PackListObject getObjectFromJSONFile(String path ) { return getObjectFromJSONFile( path, PackListObject.class ) ; } ; }
package org.robolectric.shadows; import android.content.pm.PackageInfo; import android.graphics.Bitmap; import android.os.Build; import android.os.Bundle; import android.view.ViewGroup.LayoutParams; import android.webkit.ValueCallback; import android.webkit.WebBackForwardList; import android.webkit.WebChromeClient; import android.webkit.WebHistoryItem; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import java.lang.reflect.Field; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.robolectric.annotation.HiddenApi; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.annotation.RealObject; import org.robolectric.annotation.Resetter; import org.robolectric.fakes.RoboWebSettings; import org.robolectric.util.ReflectionHelpers; @SuppressWarnings({"UnusedDeclaration"}) @Implements(value = WebView.class) public class ShadowWebView extends ShadowViewGroup { @RealObject private WebView realWebView; private static final String HISTORY_KEY = "ShadowWebView.History"; private static PackageInfo packageInfo = null; private String lastUrl; private Map<String, String> lastAdditionalHttpHeaders; private HashMap<String, Object> javascriptInterfaces = new HashMap<>(); private WebSettings webSettings = new RoboWebSettings(); private WebViewClient webViewClient = null; private boolean clearCacheCalled = false; private boolean clearCacheIncludeDiskFiles = false; private boolean clearFormDataCalled = false; private boolean clearHistoryCalled = false; private boolean clearViewCalled = false; private boolean destroyCalled = false; private boolean onPauseCalled = false; private boolean onResumeCalled = false; private WebChromeClient webChromeClient; private boolean canGoBack; private int goBackInvocations = 0; private LoadData lastLoadData; private LoadDataWithBaseURL lastLoadDataWithBaseURL; private String originalUrl; private ArrayList<String> history = new ArrayList<>(); private String lastEvaluatedJavascript; // TODO: Delete this when setCanGoBack is deleted. This is only used to determine which "path" we // use when canGoBack or goBack is called. private boolean canGoBackIsSet; @HiddenApi @Implementation public void ensureProviderCreated() { final ClassLoader classLoader = getClass().getClassLoader(); Class<?> webViewProviderClass = getClassNamed("android.webkit.WebViewProvider"); Field mProvider; try { mProvider = WebView.class.getDeclaredField("mProvider"); mProvider.setAccessible(true); if (mProvider.get(realView) == null) { Object provider = Proxy.newProxyInstance( classLoader, new Class[] {webViewProviderClass}, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.getName().equals("getViewDelegate") || method.getName().equals("getScrollDelegate")) { return Proxy.newProxyInstance( classLoader, new Class[] { getClassNamed("android.webkit.WebViewProvider$ViewDelegate"), getClassNamed("android.webkit.WebViewProvider$ScrollDelegate") }, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return nullish(method); } }); } return nullish(method); } }); mProvider.set(realView, provider); } } catch (NoSuchFieldException | IllegalAccessException e) { throw new RuntimeException(e); } } @Implementation protected void setLayoutParams(LayoutParams params) { ReflectionHelpers.setField(realWebView, "mLayoutParams", params); } private Object nullish(Method method) { Class<?> returnType = method.getReturnType(); if (returnType.equals(long.class) || returnType.equals(double.class) || returnType.equals(int.class) || returnType.equals(float.class) || returnType.equals(short.class) || returnType.equals(byte.class)) return 0; if (returnType.equals(char.class)) return '\0'; if (returnType.equals(boolean.class)) return false; return null; } private Class<?> getClassNamed(String className) { try { return getClass().getClassLoader().loadClass(className); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } @Implementation protected void loadUrl(String url) { loadUrl(url, null); } @Implementation protected void loadUrl(String url, Map<String, String> additionalHttpHeaders) { history.add(0, url); originalUrl = url; lastUrl = url; if (additionalHttpHeaders != null) { this.lastAdditionalHttpHeaders = Collections.unmodifiableMap(additionalHttpHeaders); } else { this.lastAdditionalHttpHeaders = null; } } @Implementation protected void loadDataWithBaseURL( String baseUrl, String data, String mimeType, String encoding, String historyUrl) { if (historyUrl != null) { originalUrl = historyUrl; history.add(0, historyUrl); } lastLoadDataWithBaseURL = new LoadDataWithBaseURL(baseUrl, data, mimeType, encoding, historyUrl); } @Implementation protected void loadData(String data, String mimeType, String encoding) { lastLoadData = new LoadData(data, mimeType, encoding); } /** @return the last loaded url */ public String getLastLoadedUrl() { return lastUrl; } @Implementation protected String getOriginalUrl() { return originalUrl; } @Implementation protected String getUrl() { return originalUrl; } /** @return the additional Http headers that in the same request with last loaded url */ public Map<String, String> getLastAdditionalHttpHeaders() { return lastAdditionalHttpHeaders; } @Implementation protected WebSettings getSettings() { return webSettings; } @Implementation protected void setWebViewClient(WebViewClient client) { webViewClient = client; } @Implementation protected void setWebChromeClient(WebChromeClient client) { webChromeClient = client; } public WebViewClient getWebViewClient() { return webViewClient; } @Implementation protected void addJavascriptInterface(Object obj, String interfaceName) { javascriptInterfaces.put(interfaceName, obj); } public Object getJavascriptInterface(String interfaceName) { return javascriptInterfaces.get(interfaceName); } @Implementation protected void clearCache(boolean includeDiskFiles) { clearCacheCalled = true; clearCacheIncludeDiskFiles = includeDiskFiles; } public boolean wasClearCacheCalled() { return clearCacheCalled; } public boolean didClearCacheIncludeDiskFiles() { return clearCacheIncludeDiskFiles; } @Implementation protected void clearFormData() { clearFormDataCalled = true; } public boolean wasClearFormDataCalled() { return clearFormDataCalled; } @Implementation protected void clearHistory() { clearHistoryCalled = true; history.clear(); } public boolean wasClearHistoryCalled() { return clearHistoryCalled; } @Implementation protected void clearView() { clearViewCalled = true; } public boolean wasClearViewCalled() { return clearViewCalled; } @Implementation protected void onPause() { onPauseCalled = true; } public boolean wasOnPauseCalled() { return onPauseCalled; } @Implementation protected void onResume() { onResumeCalled = true; } public boolean wasOnResumeCalled() { return onResumeCalled; } @Implementation protected void destroy() { destroyCalled = true; } public boolean wasDestroyCalled() { return destroyCalled; } /** @return webChromeClient */ public WebChromeClient getWebChromeClient() { return webChromeClient; } @Implementation protected boolean canGoBack() { // TODO: Remove the canGoBack check when setCanGoBack is deleted. if (canGoBackIsSet) { return canGoBack; } return history.size() > 1; } @Implementation protected void goBack() { if (canGoBack()) { goBackInvocations++; // TODO: Delete this when setCanGoBack is deleted, since this creates two different behavior // paths. if (canGoBackIsSet) { return; } history.remove(0); if (!history.isEmpty()) { originalUrl = history.get(0); } } } @Implementation protected WebBackForwardList copyBackForwardList() { return new BackForwardList(history); } @Implementation protected static String findAddress(String addr) { return null; } /** * Overrides the system implementation for getting the WebView package. * * <p>Returns null by default, but this can be changed with {@code #setCurrentWebviewPackage()}. */ @Implementation(minSdk = Build.VERSION_CODES.O) protected static PackageInfo getCurrentWebViewPackage() { return packageInfo; } /** Sets the value to return from {@code #getCurrentWebviewPackage()}. */ public static void setCurrentWebViewPackage(PackageInfo webViewPackageInfo) { packageInfo = webViewPackageInfo; } @Implementation(minSdk = Build.VERSION_CODES.KITKAT) protected void evaluateJavascript(String script, ValueCallback<String> callback) { this.lastEvaluatedJavascript = script; } public String getLastEvaluatedJavascript() { return lastEvaluatedJavascript; } /** * Sets the value to return from {@code android.webkit.WebView#canGoBack()} * * @param canGoBack Value to return from {@code android.webkit.WebView#canGoBack()} * @deprecated Do not depend on this method as it will be removed in a future update. The * preferered method is to populate a fake web history to use for going back. */ @Deprecated public void setCanGoBack(boolean canGoBack) { canGoBackIsSet = true; this.canGoBack = canGoBack; } /** * @return goBackInvocations the number of times {@code android.webkit.WebView#goBack()} was * invoked */ public int getGoBackInvocations() { return goBackInvocations; } public LoadData getLastLoadData() { return lastLoadData; } public LoadDataWithBaseURL getLastLoadDataWithBaseURL() { return lastLoadDataWithBaseURL; } @Implementation protected WebBackForwardList saveState(Bundle outState) { if (history.size() > 0) { outState.putStringArrayList(HISTORY_KEY, history); } return new BackForwardList(history); } @Implementation protected WebBackForwardList restoreState(Bundle inState) { history = inState.getStringArrayList(HISTORY_KEY); if (history != null && history.size() > 0) { originalUrl = history.get(0); lastUrl = history.get(0); return new BackForwardList(history); } return null; } @Resetter public static void reset() { packageInfo = null; } public static void setWebContentsDebuggingEnabled(boolean enabled) {} public static class LoadDataWithBaseURL { public final String baseUrl; public final String data; public final String mimeType; public final String encoding; public final String historyUrl; public LoadDataWithBaseURL( String baseUrl, String data, String mimeType, String encoding, String historyUrl) { this.baseUrl = baseUrl; this.data = data; this.mimeType = mimeType; this.encoding = encoding; this.historyUrl = historyUrl; } } public static class LoadData { public final String data; public final String mimeType; public final String encoding; public LoadData(String data, String mimeType, String encoding) { this.data = data; this.mimeType = mimeType; this.encoding = encoding; } } private static class BackForwardList extends WebBackForwardList { private final ArrayList<String> history; public BackForwardList(ArrayList<String> history) { this.history = (ArrayList<String>) history.clone(); // WebView expects the most recently visited item to be at the end of the list. Collections.reverse(this.history); } @Override public int getCurrentIndex() { return history.size() - 1; } @Override public int getSize() { return history.size(); } @Override public HistoryItem getCurrentItem() { if (history.isEmpty()) { return null; } return new HistoryItem(history.get(getCurrentIndex())); } @Override public HistoryItem getItemAtIndex(int index) { return new HistoryItem(history.get(index)); } @Override protected WebBackForwardList clone() { return new BackForwardList(history); } } private static class HistoryItem extends WebHistoryItem { private final String url; public HistoryItem(String url) { this.url = url; } @Override public int getId() { return url.hashCode(); } @Override public Bitmap getFavicon() { return null; } @Override public String getOriginalUrl() { return url; } @Override public String getTitle() { return url; } @Override public String getUrl() { return url; } @Override protected HistoryItem clone() { return new HistoryItem(url); } } }
package client; /* * Copyright (c) 2009-2020. Authors: see NOTICE file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import be.cytomine.client.CytomineException; import be.cytomine.client.collections.AnnotationCollection; import be.cytomine.client.collections.Collection; import be.cytomine.client.models.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; public class AnnotationTest { private static final Logger log = LogManager.getLogger(AnnotationTest.class); @BeforeAll static void init() throws CytomineException { Utils.connect(); } @Test void testCreateAnnotation() throws CytomineException { log.info("test create annotation"); ImageInstance image = Utils.getImageInstance(); Annotation a = new Annotation("POLYGON ((1983 2168, 2107 2160, 2047 2074, 1983 2168))", image).save(); assertEquals(image.getId(), a.get("image"), "image not the same used for the annotation creation"); a = new Annotation().fetch(a.getId()); assertEquals(image.getId(), a.get("image"), "fetched image not the same used for the annotation creation"); String locationBis = "POLYGON ((1983 2169, 2107 2160, 2047 2074, 1983 2169))"; assertNotEquals(locationBis, a.get("location"), "Not the location used for the annotation update"); a.set("location", locationBis); a.update(); assertEquals(locationBis, a.get("location"), "Not the location used for the annotation update"); a.delete(); try { new Annotation().fetch(a.getId()); assert false; } catch (CytomineException e) { assertEquals(e.getHttpCode(), 404); } } @Test void testCreateAnnotationWithTerms() throws CytomineException { log.info("test create annotation with terms"); ImageInstance image = Utils.getImageInstance(); Term term1 = Utils.getTerm(); Term term2 = Utils.getNewTerm(); List<Long> idTerms = new ArrayList<>(); idTerms.add(term1.getId()); idTerms.add(term2.getId()); Annotation a = new Annotation("POLYGON ((1983 2168, 2107 2160, 2047 2074, 1983 2168))", image.getId(), idTerms).save(); assertEquals(idTerms.toString().replace(" ",""), a.getStr("term"), "terms not the same used for the annotation creation"); } @Test void testCreateAnnotationIncorrect() throws CytomineException { log.info("test create incorrect annotation"); try { new Annotation().save(); assert false; } catch (CytomineException e) { assertEquals(400, e.getHttpCode()); } } @Test void testListAllAnnotations() throws CytomineException { log.info("test list all annotations"); try { Collection<Annotation> c = Collection.fetch(Annotation.class); assert false; } catch (CytomineException e) { assertEquals(e.getHttpCode(), 400); } try { AnnotationCollection ac = new AnnotationCollection(0,0); ac.fetch(); assert false; } catch (CytomineException e) { assertEquals(e.getHttpCode(), 400); } } @Test void testListAnnotationsByProject() throws CytomineException { log.info("test list annotations in a project"); Project project = Utils.getProject(); AnnotationCollection ac = new AnnotationCollection(0,0); ac.addFilter("project", project.getId().toString()); ac.fetch(); int size = ac.size(); log.info(ac.size()); Utils.getNewAnnotation(); ac.fetch(); assertEquals(size+1, ac.size()); log.info(ac.size()); size = ac.size(); ac = AnnotationCollection.fetchByProject(project); assertEquals(size, ac.size()); log.info(ac.size()); } @Test void testListAnnotationsByUser() throws CytomineException { log.info("test list annotations by an user"); User user = User.getCurrent(); Project project = Utils.getProject(); AnnotationCollection ac = new AnnotationCollection(0,0); ac.addFilter("user", user.getId().toString()); ac.addFilter("project", project.getId().toString()); ac.fetch(); int size = ac.size(); log.info(ac.size()); Utils.getNewAnnotation(); ac.fetch(); assertEquals(size+1, ac.size()); log.info(ac.size()); size = ac.size(); ac = AnnotationCollection.fetchByUserAndProject(user, project); assertEquals(size, ac.size()); log.info(ac.size()); } @Test void testListAnnotationsByOntology() throws CytomineException { log.info("test list annotations where term are into an ontology"); Ontology ontology = Utils.getOntology(); Project project = Utils.getProject(); AnnotationCollection ac = new AnnotationCollection(0,0); ac.addFilter("ontology", ontology.getId().toString()); ac.addFilter("project", project.getId().toString()); ac.fetch(); int size = ac.size(); log.info(ac.size()); Utils.getNewAnnotation(); ac.fetch(); assertEquals(size+1, ac.size()); log.info(ac.size()); size = ac.size(); Map<String, Object> params = new HashMap<>(); params.put("ontology", ontology.getId()); params.put("project", project.getId()); ac = AnnotationCollection.fetchWithParameters(params); assertEquals(size, ac.size()); log.info(ac.size()); } @Test void testListAnnotationsByImage() throws CytomineException { log.info("test list annotations in a image"); ImageInstance image = Utils.getImageInstance(); AnnotationCollection ac = new AnnotationCollection(0,0); ac.addFilter("image", image.getId().toString()); ac.fetch(); int size = ac.size(); log.info(ac.size()); Utils.getNewAnnotation(); ac.fetch(); assertEquals(size+1, ac.size()); log.info(ac.size()); size = ac.size(); ac = AnnotationCollection.fetchByImageInstance(image); assertEquals(size, ac.size()); log.info(ac.size()); } @Test void testListAnnotationsByTermAndProject() throws CytomineException { log.info("test list annotations where term are into an ontology"); Project project = Utils.getProject(); AnnotationCollection ac = new AnnotationCollection(0,0); ac.addFilter("term", Utils.getTerm().getId().toString()); ac.addFilter("project", project.getId().toString()); ac.fetch(); int size = ac.size(); log.info(ac.size()); ImageInstance image = Utils.getImageInstance(); Term term = Utils.getTerm(); List<Long> idTerms = new ArrayList<>(); idTerms.add(term.getId()); Annotation a = new Annotation("POLYGON ((1983 2168, 2107 2160, 2047 2074, 1983 2168))", image.getId(), idTerms).save(); ac.fetch(); assertEquals(size+1, ac.size()); log.info(ac.size()); size = ac.size(); ac = AnnotationCollection.fetchByTermAndProject(term, project); assertEquals(size, ac.size()); log.info(ac.size()); } @Test void testListAnnotationsByTermAndImage() throws CytomineException { log.info("test list annotations where term are into an ontology"); ImageInstance image = Utils.getImageInstance(); AnnotationCollection ac = new AnnotationCollection(0,0); ac.addFilter("term", Utils.getTerm().getId().toString()); ac.addFilter("image", image.getId().toString()); ac.fetch(); int size = ac.size(); log.info(ac.size()); Term term = Utils.getTerm(); List<Long> idTerms = new ArrayList<>(); idTerms.add(term.getId()); Annotation a = new Annotation("POLYGON ((1983 2168, 2107 2160, 2047 2074, 1983 2168))", image.getId(), idTerms).save(); ac.fetch(); assertEquals(size+1, ac.size()); log.info(ac.size()); size = ac.size(); ac = AnnotationCollection.fetchByTermAndImageInstance(term, image); assertEquals(size, ac.size()); log.info(ac.size()); } @Test void testSimplify() throws CytomineException { log.info("test simplify an annotation"); Annotation a = Utils.getAnnotation(); a.simplify(0L,100L); } }
import java.util.*; public class Test { List<Integer> f = new ArrayList<Integer>(){}; }
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.rest.filter; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.servlet.ServletContext; import javax.ws.rs.WebApplicationException; import org.apache.logging.log4j.Logger; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; import org.dspace.rest.common.Item; import org.dspace.rest.common.ItemFilter; /** * The set of Item Filter Use Cases to apply to a collection of items. * * @author Terry Brady, Georgetown University */ public class ItemFilterSet { protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); static Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilterSet.class); private List<ItemFilter> itemFilters; private ItemFilter allFiltersFilter; /** * Construct a set of Item Filters identified by a list string. * * @param filterList Comma separated list of filter names to include. * Use {@link org.dspace.rest.common.ItemFilter#ALL} to retrieve all filters. * @param reportItems If true, return item details. If false, return only counts of items. */ public ItemFilterSet(String filterList, boolean reportItems) { log.debug(String.format("Create ItemFilterSet: %s", filterList)); itemFilters = ItemFilter.getItemFilters(filterList, reportItems); allFiltersFilter = ItemFilter.getAllFiltersFilter(itemFilters); } /** * Get the special filter that represents the intersection of all items in the Item Filter Set. * * @return the special Item Filter that contains items that satisfied every other Item Filter in the Item Filter Set */ public ItemFilter getAllFiltersFilter() { return allFiltersFilter; } /** * Evaluate an item against the use cases in the Item Filter Set. * * If an item satisfies all items in the Item Filter Set, it should also ve added to the special all items filter. * * @param context Active DSpace Context * @param item DSpace Object to evaluate * @param restItem REST representation of the DSpace Object being evaluated */ public void testItem(Context context, org.dspace.content.Item item, Item restItem) { boolean bAllTrue = true; for (ItemFilter itemFilter : itemFilters) { if (itemFilter.hasItemTest()) { bAllTrue &= itemFilter.testItem(context, item, restItem); } } if (bAllTrue && allFiltersFilter != null) { allFiltersFilter.addItem(restItem); } } /** * Get all of the Item Filters initialized into the Item Filter Set * * @return a list of Item Filters initialized into the Item Filter Set */ public List<ItemFilter> getItemFilters() { return itemFilters; } /** * Evaluate a set of Items against the Item Filters in the Item Filter Set * Current DSpace Context * * @param context Current DSpace Context * @param servletContext Context of the servlet container. * @param childItems Collection of Items to Evaluate * @param save If true, save the details of each item that is evaluated * @param expand List of item details to include in the results * @return The number of items evaluated * @throws WebApplicationException Runtime exception for applications. * @throws SQLException An exception that provides information on a database access error or other * errors. */ public int processSaveItems(Context context, ServletContext servletContext, Iterator<org.dspace.content.Item> childItems, boolean save, String expand) throws WebApplicationException, SQLException { return processSaveItems(context, servletContext, childItems, new ArrayList<Item>(), save, expand); } /** * Evaluate a set of Items against the Item Filters in the Item Filter Set * * @param context Current DSpace Context * @param servletContext Context of the servlet container. * @param childItems Collection of Items to Evaluate * @param items List of items to contain saved results * @param save If true, save the details of each item that is evaluated * @param expand List of item details to include in the results * @return The number of items evaluated * @throws WebApplicationException Runtime exception for applications. * @throws SQLException An exception that provides information on a database access error or other * errors. */ public int processSaveItems(Context context, ServletContext servletContext, Iterator<org.dspace.content.Item> childItems, List<Item> items, boolean save, String expand) throws WebApplicationException, SQLException { int count = 0; while (childItems.hasNext()) { count++; org.dspace.content.Item item = childItems.next(); log.debug(item.getHandle() + " evaluate."); if (authorizeService.authorizeActionBoolean(context, item, org.dspace.core.Constants.READ)) { Item restItem = new Item(item, servletContext, expand, context); if (save) { items.add(restItem); } testItem(context, item, restItem); } else { log.debug(item.getHandle() + " not authorized - not included in result set."); } } return count; } }
/* * Copyright 2013-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.loan; import com.example.loan.model.FraudCheckStatus; import com.example.loan.model.FraudServiceRequest; import com.example.loan.model.FraudServiceResponse; import com.example.loan.model.LoanApplication; import com.example.loan.model.LoanApplicationResult; import com.example.loan.model.LoanApplicationStatus; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; import org.springframework.stereotype.Service; import org.springframework.web.client.RestTemplate; @Service public class LoanApplicationService { private static final String FRAUD_SERVICE_JSON_VERSION_1 = "application/vnd.fraud.v1+json"; private final RestTemplate restTemplate; private final ServiceConfiguration serviceConfiguration; public LoanApplicationService(ServiceConfiguration serviceConfiguration) { this.restTemplate = new RestTemplate(); // tag::custom_request_factory[] this.restTemplate.setRequestFactory(new HttpComponentsClientHttpRequestFactory()); // end::custom_request_factory[] this.serviceConfiguration = serviceConfiguration; } public LoanApplicationResult loanApplication(LoanApplication loanApplication) { FraudServiceRequest request = new FraudServiceRequest(loanApplication); FraudServiceResponse response = sendRequestToFraudDetectionService(request); return buildResponseFromFraudResult(response); } private FraudServiceResponse sendRequestToFraudDetectionService( FraudServiceRequest request) { HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.add(HttpHeaders.CONTENT_TYPE, FRAUD_SERVICE_JSON_VERSION_1); // tag::client_call_server[] ResponseEntity<FraudServiceResponse> response = restTemplate.exchange( "http://localhost:" + getPort() + "/fraudcheck", HttpMethod.PUT, new HttpEntity<>(request, httpHeaders), FraudServiceResponse.class); // end::client_call_server[] return response.getBody(); } private LoanApplicationResult buildResponseFromFraudResult( FraudServiceResponse response) { LoanApplicationStatus applicationStatus = null; if (FraudCheckStatus.OK == response.getFraudCheckStatus()) { applicationStatus = LoanApplicationStatus.LOAN_APPLIED; } else if (FraudCheckStatus.FRAUD == response.getFraudCheckStatus()) { applicationStatus = LoanApplicationStatus.LOAN_APPLICATION_REJECTED; } return new LoanApplicationResult(applicationStatus, response.getRejectionReason()); } public int getPort() { return this.serviceConfiguration.getPort(); } public void setPort(int port) { this.serviceConfiguration.setPort(port); } } @ConfigurationProperties("service") class ServiceConfiguration { private int port = 8080; public int getPort() { return port; } public void setPort(int port) { this.port = port; } }
package com.devplatform.translatorservice.clients.queryparam; public class SlackGetUsersInfoQueryParam extends SlackGenericQueryParam{ private String user; public SlackGetUsersInfoQueryParam(String token, String user){ this.token = token; this.user = user; } }
package arrays; public class ContainerWithMostWater { private static class Solution { private int BFApproach(int[] height) { int maxArea = Integer.MIN_VALUE; for (int i = 0; i < height.length; i++) { for (int j = 1; j < height.length; j++) { int area = (j - i) * Math.min(height[i], height[j]); maxArea = Math.max(maxArea, area); } } return maxArea; } public int maxArea(int[] height) { return BFApproach(height); } public int maxAreaOptimized(int[] height) { int left = 0; int right = height.length - 1; int maxArea = 0; while (left < right) { int area = (right - left) * Math.min(height[left], height[right]); if (height[left] < height[right]) { left++; }else { right--; } maxArea = Math.max(area, maxArea); } return maxArea; } } public static void main(String[] args) { Solution solution = new Solution(); System.out.println("====> max area:: " + solution.maxArea(new int[]{1, 8, 6, 2, 5, 4, 8, 3, 7})); System.out.println("====> max area:: " + solution.maxAreaOptimized(new int[]{1, 8, 6, 2, 5, 4, 8, 3, 7})); } }
package hs.jfx.eventstream.core.impl; import hs.jfx.eventstream.api.EventStream; import hs.jfx.eventstream.api.ObservableStream; import hs.jfx.eventstream.api.Subscriber; public abstract class FilterNullStreams { public static <T> EventStream<T> event(ObservableStream<T> source) { return new BaseEventStream<>(subscriber(source)); } private static <T> Subscriber<T> subscriber(ObservableStream<T> source) { return emitter -> source.subscribe(v -> { if(v != null) { emitter.emit(v); } }); } }
import org.apache.camel.builder.RouteBuilder; public class ADifferentRouteBuilder extends RouteBuilder { public void configure() { from("timer:timerName?delay=1000"); } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.reactive.config; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.springframework.cache.Cache; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.http.CacheControl; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.web.reactive.resource.ResourceWebHandler; /** * Assist with creating and configuring a static resources handler. * * @author Rossen Stoyanchev * @since 5.0 */ public class ResourceHandlerRegistration { private final ResourceLoader resourceLoader; private final String[] pathPatterns; private final List<String> locationValues = new ArrayList<>(); @Nullable private CacheControl cacheControl; @Nullable private ResourceChainRegistration resourceChainRegistration; /** * Create a {@link ResourceHandlerRegistration} instance. * @param resourceLoader a resource loader for turning a String location * into a {@link Resource} * @param pathPatterns one or more resource URL path patterns */ public ResourceHandlerRegistration(ResourceLoader resourceLoader, String... pathPatterns) { Assert.notNull(resourceLoader, "ResourceLoader is required"); Assert.notEmpty(pathPatterns, "At least one path pattern is required for resource handling"); this.resourceLoader = resourceLoader; this.pathPatterns = pathPatterns; } /** * Add one or more resource locations from which to serve static content. * Each location must point to a valid directory. Multiple locations may * be specified as a comma-separated list, and the locations will be checked * for a given resource in the order specified. * * <p>For example, {{@code "/"}, * {@code "classpath:/META-INF/public-web-resources/"}} allows resources to * be served both from the web application root and from any JAR on the * classpath that contains a {@code /META-INF/public-web-resources/} directory, * with resources in the web application root taking precedence. * @return the same {@link ResourceHandlerRegistration} instance, for * chained method invocation */ public ResourceHandlerRegistration addResourceLocations(String... resourceLocations) { this.locationValues.addAll(Arrays.asList(resourceLocations)); return this; } /** * Specify the {@link CacheControl} which should be used * by the resource handler. * @param cacheControl the CacheControl configuration to use * @return the same {@link ResourceHandlerRegistration} instance, for * chained method invocation */ public ResourceHandlerRegistration setCacheControl(CacheControl cacheControl) { this.cacheControl = cacheControl; return this; } /** * Configure a chain of resource resolvers and transformers to use. This * can be useful, for example, to apply a version strategy to resource URLs. * <p>If this method is not invoked, by default only a simple * {@code PathResourceResolver} is used in order to match URL paths to * resources under the configured locations. * @param cacheResources whether to cache the result of resource resolution; * setting this to "true" is recommended for production (and "false" for * development, especially when applying a version strategy) * @return the same {@link ResourceHandlerRegistration} instance, for * chained method invocation */ public ResourceChainRegistration resourceChain(boolean cacheResources) { this.resourceChainRegistration = new ResourceChainRegistration(cacheResources); return this.resourceChainRegistration; } /** * Configure a chain of resource resolvers and transformers to use. This * can be useful, for example, to apply a version strategy to resource URLs. * <p>If this method is not invoked, by default only a simple * {@code PathResourceResolver} is used in order to match URL paths to * resources under the configured locations. * @param cacheResources whether to cache the result of resource resolution; * setting this to "true" is recommended for production (and "false" for * development, especially when applying a version strategy * @param cache the cache to use for storing resolved and transformed resources; * by default a {@link org.springframework.cache.concurrent.ConcurrentMapCache} * is used. Since Resources aren't serializable and can be dependent on the * application host, one should not use a distributed cache but rather an * in-memory cache. * @return the same {@link ResourceHandlerRegistration} instance, for chained method invocation */ public ResourceChainRegistration resourceChain(boolean cacheResources, Cache cache) { this.resourceChainRegistration = new ResourceChainRegistration(cacheResources, cache); return this.resourceChainRegistration; } /** * Returns the URL path patterns for the resource handler. */ protected String[] getPathPatterns() { return this.pathPatterns; } /** * Returns a {@link ResourceWebHandler} instance. */ protected ResourceWebHandler getRequestHandler() { ResourceWebHandler handler = new ResourceWebHandler(); handler.setLocationValues(this.locationValues); handler.setResourceLoader(this.resourceLoader); if (this.resourceChainRegistration != null) { handler.setResourceResolvers(this.resourceChainRegistration.getResourceResolvers()); handler.setResourceTransformers(this.resourceChainRegistration.getResourceTransformers()); } if (this.cacheControl != null) { handler.setCacheControl(this.cacheControl); } return handler; } }
package liquibase.sdk.database; import liquibase.CatalogAndSchema; import liquibase.Liquibase; import liquibase.change.Change; import liquibase.changelog.ChangeSet; import liquibase.changelog.DatabaseChangeLog; import liquibase.changelog.RanChangeSet; import liquibase.database.Database; import liquibase.database.DatabaseConnection; import liquibase.database.InternalDatabase; import liquibase.database.ObjectQuotingStrategy; import liquibase.exception.*; import liquibase.lockservice.DatabaseChangeLogLock; import liquibase.sql.visitor.SqlVisitor; import liquibase.statement.DatabaseFunction; import liquibase.statement.SqlStatement; import liquibase.structure.DatabaseObject; import liquibase.structure.core.Schema; import java.io.IOException; import java.io.Writer; import java.math.BigInteger; import java.sql.Connection; import java.sql.Time; import java.sql.Timestamp; import java.util.Collection; import java.util.Date; import java.util.List; public class MockDatabase implements Database, InternalDatabase { private static final int FETCH_SIZE = 1000; private boolean outputDefaultSchema; private boolean outputDefaultCatalog; private boolean supportsCatalogs = true; private boolean supportsSchemas = true; private boolean supportsSequences = true; private String defaultCatalogName; private String defaultSchemaName; private boolean caseSensitive; @Override public int getPriority() { return PRIORITY_DEFAULT; } public Schema getSchema() { return null; } public String getName() { return "Mock Database"; } @Override public CatalogAndSchema getDefaultSchema() { return new CatalogAndSchema("default", "default"); } @Override public Integer getDefaultPort() { return null; } public DatabaseObject[] getContainingObjects() { return null; } public boolean equals(final DatabaseObject otherObject, final Database accordingTo) { return otherObject.getName().equalsIgnoreCase(this.getName()); } @Override public void setCanCacheLiquibaseTableInfo(final boolean canCacheLiquibaseTableInfo) { // } @Override public boolean requiresUsername() { return false; } @Override public boolean requiresPassword() { return false; } @Override public boolean isCorrectDatabaseImplementation(final DatabaseConnection conn) throws DatabaseException { return false; } @Override public String getDefaultDriver(final String url) { return null; } @Override public DatabaseConnection getConnection() { return null; } @Override public void setConnection(final DatabaseConnection conn) { } public void setConnection(final Connection conn) { } @Override public boolean getAutoCommitMode() { return false; } @Override public boolean isAutoCommit() throws DatabaseException { return false; } @Override public void setAutoCommit(final boolean b) throws DatabaseException { } @Override public boolean isCaseSensitive() { return caseSensitive; } public void setCaseSensitive(boolean caseSensitive) { this.caseSensitive = caseSensitive; } @Override public boolean supportsDDLInTransaction() { return false; } @Override public String getDatabaseProductName() { return null; } @Override public String getDatabaseProductVersion() throws DatabaseException { return null; } @Override public int getDatabaseMajorVersion() throws DatabaseException { return 0; } @Override public int getDatabaseMinorVersion() throws DatabaseException { return 0; } @Override public String getShortName() { return "mock"; } public String getDriverName() throws DatabaseException { return null; } public String getConnectionURL() throws DatabaseException { return null; } public String getConnectionUsername() throws DatabaseException { return null; } @Override public String getDefaultCatalogName() { return defaultCatalogName; } @Override public void setDefaultCatalogName(final String catalogName) throws DatabaseException { this.defaultCatalogName = catalogName; } @Override public String getDefaultSchemaName() { return defaultSchemaName; } @Override public void setDefaultSchemaName(final String schemaName) throws DatabaseException { this.defaultSchemaName = schemaName; } @Override public boolean supportsInitiallyDeferrableColumns() { return false; } @Override public boolean supportsSequences() { return supportsSequences; } public void setSupportsSequences(boolean supportsSequences) { this.supportsSequences = supportsSequences; } @Override public boolean supportsDropTableCascadeConstraints() { return false; } @Override public boolean supportsAutoIncrement() { return true; } @Override public String getDateLiteral(final String isoDate) { return isoDate; } @Override public String getDateLiteral(final java.sql.Date date) { return date.toString(); } @Override public String getTimeLiteral(final Time time) { return time.toString(); } @Override public String getDateTimeLiteral(final Timestamp timeStamp) { return timeStamp.toString(); } @Override public String getCurrentDateTimeFunction() { return "DATETIME()"; } @Override public void setCurrentDateTimeFunction(final String function) { } @Override public String getLineComment() { return null; } @Override public String getAutoIncrementClause(final BigInteger startWith, final BigInteger incrementBy) { return (("AUTO_INCREMENT_CLAUSE" + startWith) != null) ? (" " + startWith) : ((("" + incrementBy) != null) ? (" " + incrementBy) : ""); } public SqlStatement getCommitSQL() { return null; } /** * @see liquibase.database.Database#getDatabaseChangeLogTableName() */ @Override public String getDatabaseChangeLogTableName() { return "DATABASECHANGELOG"; } /** * Does nothing * * @see liquibase.database.Database#setDatabaseChangeLogTableName(java.lang.String) */ @Override public void setDatabaseChangeLogTableName(final String tableName) { } /** * @see liquibase.database.Database#getDatabaseChangeLogLockTableName() */ @Override public String getDatabaseChangeLogLockTableName() { return "DATABASECHANGELOGLOCK"; } /** * Does nothing * * @see liquibase.database.Database#setDatabaseChangeLogLockTableName(java.lang.String) */ @Override public void setDatabaseChangeLogLockTableName(final String tableName) { } @Override public String getConcatSql(final String... values) { return null; } public boolean acquireLock(final Liquibase liquibase) throws LockException { return false; } public void releaseLock() throws LockException { } public DatabaseChangeLogLock[] listLocks() throws LockException { return new DatabaseChangeLogLock[0]; } @Override public void dropDatabaseObjects(final CatalogAndSchema schema) throws DatabaseException { } @Override public void tag(final String tagString) throws DatabaseException { } @Override public boolean doesTagExist(final String tag) throws DatabaseException { return false; } @Override public boolean isSystemObject(final DatabaseObject example) { return false; } @Override public boolean isLiquibaseObject(final DatabaseObject object) { return false; } @Override public boolean supportsTablespaces() { return false; } @Override public String getViewDefinition(final CatalogAndSchema schema, final String name) throws DatabaseException { return null; } public String getDatabaseProductName(final DatabaseConnection conn) throws DatabaseException { return "Mock Database"; } @Override public String getDateLiteral(final Date defaultDateValue) { return defaultDateValue.toString(); } @Override public String escapeTableName(final String catalogName, final String schemaName, final String tableName) { if (schemaName == null) { return tableName; } else { return schemaName+"."+tableName; } } @Override public String escapeIndexName(final String catalogName, final String schemaName, final String indexName) { return escapeTableName(catalogName, schemaName, indexName); } @Override public String escapeColumnName(final String catalogName, final String schemaName, final String tableName, final String columnName) { return columnName; } @Override public String escapeColumnName(String catalogName, String schemaName, String tableName, String columnName, boolean quoteNamesThatMayBeFunctions) { return columnName; } @Override public String escapeColumnNameList(final String columnNames) { return columnNames; } @Override public String escapeSequenceName(final String catalogName, final String schemaName, final String sequenceName) { if (sequenceName == null) { return sequenceName; } else { return schemaName+"."+sequenceName; } } public String convertRequestedSchemaToSchema(final String requestedSchema) throws DatabaseException { return requestedSchema; } public String convertRequestedSchemaToCatalog(final String requestedSchema) throws DatabaseException { return null; } @Override public boolean supportsSchemas() { return supportsSchemas; } public void setSupportsSchemas(boolean supportsSchemas) { this.supportsSchemas = supportsSchemas; } @Override public boolean supportsCatalogs() { return supportsCatalogs; } public void setSupportsCatalogs(boolean supportsCatalogs) { this.supportsCatalogs = supportsCatalogs; } public boolean supportsCatalogInObjectName() { return true; } @Override public String generatePrimaryKeyName(final String tableName) { return "PK_"+tableName; } @Override public String escapeViewName(final String catalogName, final String schemaName, final String viewName) { return escapeTableName(catalogName, schemaName, viewName); } public boolean acquireLock() throws LockException { return false; } @Override public ChangeSet.RunStatus getRunStatus(final ChangeSet changeSet) throws DatabaseException, DatabaseHistoryException { return null; } @Override public RanChangeSet getRanChangeSet(final ChangeSet changeSet) throws DatabaseException, DatabaseHistoryException { return null; } @Override public void markChangeSetExecStatus(final ChangeSet changeSet, final ChangeSet.ExecType execType) throws DatabaseException { } @Override public List<RanChangeSet> getRanChangeSetList() throws DatabaseException { return null; } @Override public Date getRanDate(final ChangeSet changeSet) throws DatabaseException, DatabaseHistoryException { return null; } @Override public void removeRanStatus(final ChangeSet changeSet) throws DatabaseException { } @Override public void commit() { } @Override public void rollback() { } public SqlStatement getSelectChangeLogLockSQL() throws DatabaseException { return null; } @Override public String escapeStringForDatabase(final String string) { return string; } @Override public void close() throws DatabaseException { } @Override public boolean supportsRestrictForeignKeys() { return true; } @Override public String escapeConstraintName(final String constraintName) { return constraintName; } @Override public boolean isSafeToRunUpdate() throws DatabaseException { return true; } @Override public String escapeObjectName(final String objectName, final Class<? extends DatabaseObject> objectType) { return objectName; } @Override public String escapeObjectName(final String catalogName, final String schemaName, final String objectName, final Class<? extends DatabaseObject> objectType) { return catalogName +"."+schemaName+"."+objectName; } @Override public void executeStatements(final Change change, final DatabaseChangeLog changeLog, final List<SqlVisitor> sqlVisitors) throws LiquibaseException { } @Override public void execute(final SqlStatement[] statements, final List<SqlVisitor> sqlVisitors) throws LiquibaseException { } @Override public void saveStatements(final Change change, final List<SqlVisitor> sqlVisitors, final Writer writer) throws IOException, LiquibaseException { } @Override public void executeRollbackStatements(final Change change, final List<SqlVisitor> sqlVisitors) throws LiquibaseException { } @Override public void executeRollbackStatements(final SqlStatement[] statements, final List<SqlVisitor> sqlVisitors) throws LiquibaseException { } @Override public void saveRollbackStatement(final Change change, final List<SqlVisitor> sqlVisitors, final Writer writer) throws IOException, LiquibaseException { } @Override public String getLiquibaseCatalogName() { return null; } @Override public void setLiquibaseCatalogName(final String catalogName) { } @Override public String getLiquibaseSchemaName(){ return null; } @Override public void setLiquibaseSchemaName(final String schemaName) { } @Override public String getLiquibaseTablespaceName() { return null; } @Override public void setLiquibaseTablespaceName(final String tablespaceName) { } @Override public Date parseDate(final String dateAsString) throws DateParseException { return new Date(); } @Override public List<DatabaseFunction> getDateFunctions() { return null; } @Override public void resetInternalState() { } @Override public boolean supportsForeignKeyDisable() { return false; } @Override public boolean disableForeignKeyChecks() throws DatabaseException { return false; } @Override public void enableForeignKeyChecks() throws DatabaseException { } public void updateChecksum(final ChangeSet changeSet) throws DatabaseException { } @Override public boolean isReservedWord(final String string) { return false; } @Override public CatalogAndSchema correctSchema(final CatalogAndSchema schema) { return schema.standardize(this); } @Override /** * Returns name all lower case except for the last letter capital for easier detection of corrected names. */ public String correctObjectName(final String name, final Class<? extends DatabaseObject> objectType) { if (name == null) { return null; } String finalName = name.toLowerCase(); finalName = finalName.substring(0, finalName.length()-1)+finalName.substring(finalName.length()-1, finalName.length()).toUpperCase(); return finalName; } public String correctObjectName(final String name, final Class<? extends DatabaseObject> objectType, final boolean quoteCorrectedName) { return correctObjectName(name, objectType); } @Override public Integer getFetchSize() { return FETCH_SIZE; } @Override public boolean isFunction(final String string) { return string.endsWith("()"); } @Override public int getDataTypeMaxParameters(final String dataTypeName) { return 2; } public CatalogAndSchema getSchemaFromJdbcInfo(final String rawCatalogName, final String rawSchemaName) { return new CatalogAndSchema(rawCatalogName, rawSchemaName); } public String getJdbcCatalogName(final CatalogAndSchema schema) { return schema.getCatalogName(); } public String getJdbcSchemaName(final CatalogAndSchema schema) { return schema.getSchemaName(); } public String getJdbcCatalogName(final Schema schema) { return schema.getCatalogName(); } public String getJdbcSchemaName(final Schema schema) { return schema.getName(); } @Override public boolean dataTypeIsNotModifiable(final String typeName) { return true; } @Override public String generateDatabaseFunctionValue(final DatabaseFunction databaseFunction) { return null; } @Override public ObjectQuotingStrategy getObjectQuotingStrategy() { return ObjectQuotingStrategy.LEGACY; } @Override public void setObjectQuotingStrategy(final ObjectQuotingStrategy quotingStrategy) { } @Override public boolean supportsCatalogInObjectName(final Class<? extends DatabaseObject> type) { return true; } @Override public boolean createsIndexesForForeignKeys() { return false; } @Override public boolean getOutputDefaultSchema() { return outputDefaultSchema; } @Override public void setOutputDefaultSchema(final boolean outputDefaultSchema) { this.outputDefaultSchema = outputDefaultSchema; } @Override public boolean getOutputDefaultCatalog() { return outputDefaultCatalog; } @Override public void setOutputDefaultCatalog(final boolean outputDefaultCatalog) { this.outputDefaultCatalog = outputDefaultCatalog; } @Override public boolean isDefaultSchema(final String catalog, final String schema) { return false; } @Override public boolean isDefaultCatalog(final String catalog) { return false; } @Override public boolean supportsPrimaryKeyNames() { return true; } @Override public String getSystemSchema() { return "information_schema"; } @Override public void addReservedWords(Collection<String> words) { } @Override public String toString() { return "Mock database"; } @Override public String escapeDataTypeName(String dataTypeName) { return dataTypeName; } @Override public String unescapeDataTypeName(String dataTypeName) { return dataTypeName; } @Override public String unescapeDataTypeString(String dataTypeString) { return dataTypeString; } @Override public ValidationErrors validate() { return new ValidationErrors(); } @Override public boolean supportsNotNullConstraintNames() { return false; } @Override public boolean supportsBatchUpdates() throws DatabaseException { return false; } @Override public int getMaxFractionalDigitsForTimestamp() { return 9; } @Override public int getDefaultFractionalDigitsForTimestamp() { return 6; } @Override public boolean requiresExplicitNullForColumns() { return false; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver.wal; import static org.junit.Assert.assertFalse; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** * Tests that verifies that the log is forced to be rolled every "hbase.regionserver.logroll.period" */ public abstract class AbstractTestLogRollPeriod { private static final Log LOG = LogFactory.getLog(AbstractTestLogRollPeriod.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static long LOG_ROLL_PERIOD = 4000; @BeforeClass public static void setUpBeforeClass() throws Exception { // disable the ui TEST_UTIL.getConfiguration().setInt("hbase.regionsever.info.port", -1); TEST_UTIL.getConfiguration().setLong("hbase.regionserver.logroll.period", LOG_ROLL_PERIOD); TEST_UTIL.startMiniCluster(); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * Tests that the LogRoller perform the roll even if there are no edits */ @Test public void testNoEdits() throws Exception { TableName tableName = TableName.valueOf("TestLogRollPeriodNoEdits"); TEST_UTIL.createTable(tableName, "cf"); try { Table table = TEST_UTIL.getConnection().getTable(tableName); try { HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(tableName); WAL log = server.getWAL(null); checkMinLogRolls(log, 5); } finally { table.close(); } } finally { TEST_UTIL.deleteTable(tableName); } } /** * Tests that the LogRoller perform the roll with some data in the log */ @Test(timeout=60000) public void testWithEdits() throws Exception { final TableName tableName = TableName.valueOf("TestLogRollPeriodWithEdits"); final String family = "cf"; TEST_UTIL.createTable(tableName, family); try { HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(tableName); WAL log = server.getWAL(null); final Table table = TEST_UTIL.getConnection().getTable(tableName); Thread writerThread = new Thread("writer") { @Override public void run() { try { long row = 0; while (!interrupted()) { Put p = new Put(Bytes.toBytes(String.format("row%d", row))); p.addColumn(Bytes.toBytes(family), Bytes.toBytes("col"), Bytes.toBytes(row)); table.put(p); row++; Thread.sleep(LOG_ROLL_PERIOD / 16); } } catch (Exception e) { LOG.warn(e); } } }; try { writerThread.start(); checkMinLogRolls(log, 5); } finally { writerThread.interrupt(); writerThread.join(); table.close(); } } finally { TEST_UTIL.deleteTable(tableName); } } private void checkMinLogRolls(final WAL log, final int minRolls) throws Exception { final List<Path> paths = new ArrayList<Path>(); log.registerWALActionsListener(new WALActionsListener.Base() { @Override public void postLogRoll(Path oldFile, Path newFile) { LOG.debug("postLogRoll: oldFile="+oldFile+" newFile="+newFile); paths.add(newFile); } }); // Sleep until we should get at least min-LogRoll events long wtime = System.currentTimeMillis(); Thread.sleep((minRolls + 1) * LOG_ROLL_PERIOD); // Do some extra sleep in case the machine is slow, // and the log-roll is not triggered exactly on LOG_ROLL_PERIOD. final int NUM_RETRIES = 1 + 8 * (minRolls - paths.size()); for (int retry = 0; paths.size() < minRolls && retry < NUM_RETRIES; ++retry) { Thread.sleep(LOG_ROLL_PERIOD / 4); } wtime = System.currentTimeMillis() - wtime; LOG.info(String.format("got %d rolls after %dms (%dms each) - expected at least %d rolls", paths.size(), wtime, wtime / paths.size(), minRolls)); assertFalse(paths.size() < minRolls); } }
package sciwhiz12.voxeltools.item; import net.minecraft.block.Block; import net.minecraft.block.Blocks; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.ItemUseContext; import net.minecraft.tags.BlockTags; import net.minecraft.tags.ITag; import net.minecraft.util.ActionResultType; import net.minecraft.util.Direction; import net.minecraft.util.Hand; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import sciwhiz12.voxeltools.VoxelTools; import sciwhiz12.voxeltools.VxConfig; import sciwhiz12.voxeltools.VxTags; import sciwhiz12.voxeltools.util.PermissionUtil; public class ShovelItem extends Item implements ILeftClicker.OnBlock { public ShovelItem(Properties properties) { super(properties); } @Override public void onLeftClickBlock(PlayerEntity player, World world, Hand hand, BlockPos pos, Direction face) { if (player.isEffectiveAi() && PermissionUtil.checkForPermission(player)) { for (BlockPos targetPos : getDigRadius(pos)) { if (VxTags.GROUND.contains(world.getBlockState(targetPos).getBlock())) { world.setBlockAndUpdate(targetPos, Blocks.AIR.defaultBlockState()); } } } } @Override public boolean onBlockStartBreak(ItemStack itemstack, BlockPos pos, PlayerEntity player) { return true; } @Override public ActionResultType useOn(ItemUseContext context) { World world = context.getLevel(); PlayerEntity player = context.getPlayer(); if (!world.isClientSide && player != null && PermissionUtil .checkForPermission(player) && VxConfig.Server.shovelFlattenRadius != 0) { BlockPos pos = context.getClickedPos(); if (player.isCrouching()) { for (BlockPos targetPos : getFlattenRadius(pos)) { if (VxTags.GROUND.contains(world.getBlockState(targetPos).getBlock())) { world.setBlockAndUpdate(targetPos, Blocks.AIR.defaultBlockState()); } } return ActionResultType.SUCCESS; } } return ActionResultType.PASS; } private Iterable<BlockPos> getDigRadius(BlockPos origin) { int x = VxConfig.Server.shovelDigRadiusX; int y = VxConfig.Server.shovelDigRadiusY; int z = VxConfig.Server.shovelDigRadiusZ; BlockPos cornerOne = origin.offset(x, y, z); BlockPos cornerTwo = origin.offset(-x, -y, -z); return BlockPos.betweenClosed(cornerOne, cornerTwo); } private Iterable<BlockPos> getFlattenRadius(BlockPos origin) { int radius = VxConfig.Server.shovelFlattenRadius; int height = VxConfig.Server.shovelFlattenHeight; int offset = VxConfig.Server.shovelFlattenHeightOffset; BlockPos cornerOne = origin.offset(radius, offset, radius); BlockPos cornerTwo = origin.offset(-radius, offset + height, -radius); return BlockPos.betweenClosed(cornerOne, cornerTwo); } }
package gg.projecteden.nexus.features.minigames.managers; import com.google.common.base.Strings; import com.sk89q.worldguard.protection.regions.ProtectedRegion; import gg.projecteden.nexus.Nexus; import gg.projecteden.nexus.features.minigames.models.Arena; import gg.projecteden.nexus.framework.exceptions.postconfigured.InvalidInputException; import gg.projecteden.nexus.utils.WorldGuardUtils; import lombok.Getter; import lombok.SneakyThrows; import org.bukkit.Location; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.configuration.file.YamlConfiguration; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; public class ArenaManager { private static List<Arena> arenas = new ArrayList<>(); @Getter private static String folder = "plugins/Nexus/minigames/arenas/"; public static List<Arena> getAll() { return arenas; } public static List<Arena> getAll(String filter) { List<Arena> filtered = new ArrayList<>(); for (Arena arena : arenas) { if (filter != null) if (!arena.getName().toLowerCase().startsWith(filter.toLowerCase())) continue; filtered.add(arena); } return filtered; } public static List<String> getNames() { return getNames(null); } public static List<String> getNames(String filter) { return getAll(filter).stream().map(Arena::getName).collect(Collectors.toList()); } public static Arena getFromLocation(Location location) { return getFromLocation(location, null); } public static Arena getFromLocation(Location location, String type) { Set<ProtectedRegion> regionsAt = new WorldGuardUtils(location).getRegionsAt(location); for (ProtectedRegion region : regionsAt) { Arena fromRegion = getFromRegion(region.getId()); if (fromRegion != null && (type == null || fromRegion.ownsRegion(region, type))) return fromRegion; } return null; } public static Arena getFromRegion(String regionName) { try { String mechanicName, arenaName; if (!regionName.contains("_")) { mechanicName = regionName; arenaName = regionName; } else { mechanicName = regionName.split("_")[0]; arenaName = regionName.split("_")[1]; } Arena arena = get(arenaName); if (arena.getMechanic().getClass().getSimpleName().equalsIgnoreCase(mechanicName)) return arena; } catch (ArrayIndexOutOfBoundsException | InvalidInputException ignore) {} return null; } public static Arena find(String name) { if (!Strings.isNullOrEmpty(name)) { for (Arena arena : arenas) if (arena.getName().equalsIgnoreCase(name)) return arena; for (Arena arena : arenas) if (arena.getName().toLowerCase().startsWith(name.toLowerCase())) return arena; } throw new InvalidInputException("Arena not found"); } public static boolean exists(String name) { try { get(name); return true; } catch (InvalidInputException ex) { return false; } } public static Arena get(String name) { arenas.removeIf(Objects::isNull); for (Arena arena : arenas) if (arena.getName().equalsIgnoreCase(name)) return arena; throw new InvalidInputException("Arena not found"); } public static Arena get(int id) { arenas.removeIf(Objects::isNull); for (Arena arena : arenas) if (arena.getId() == id) return arena; throw new InvalidInputException("Arena not found"); } public static void add(Arena arena) { try { arenas.remove(get(arena.getId())); } catch (NullPointerException | InvalidInputException ignore) {} try { arenas.remove(get(arena.getName())); } catch (NullPointerException | InvalidInputException ignore) {} arenas.add(arena); } public static void remove(Arena arena) { arenas.remove(arena); } private static String getFile(String name) { return folder + name + ".yml"; } private static FileConfiguration getConfig(String name) { File file = new File(getFile(name)); if (!file.exists()) { try { if (!file.createNewFile()) Nexus.warn("File " + file.getName() + " already exists"); } catch (IOException ex) { Nexus.severe("An error occurred while trying to create a configuration file: " + ex.getMessage()); } } return YamlConfiguration.loadConfiguration(file); } @SneakyThrows public static void read() { File file = Paths.get(folder).toFile(); if (!file.exists()) file.createNewFile(); arenas.clear(); try (Stream<Path> paths = Files.walk(Paths.get(folder))) { paths.forEach(filePath -> { try { if (!Files.isRegularFile(filePath)) return; String name = filePath.getFileName().toString(); if (name.startsWith(".")) return; if (!name.endsWith(".yml")) return; read(name.replace(".yml", "")); } catch (Exception ex) { Nexus.severe("An error occurred while trying to read arena configuration file " + filePath.getFileName().toFile() + ": " + ex.getMessage()); } }); } catch (Exception ex) { Nexus.severe("An error occurred while trying to read arena configuration files: " + ex.getMessage()); } } public static void read(String name) { add((Arena) getConfig(name).get("arena")); } public static void write() { arenas.forEach(ArenaManager::write); } public static void write(Arena arena) { FileConfiguration arenaConfig = getConfig(arena.getName()); arenaConfig.set("arena", arena); try { arenaConfig.save(getFile(arena.getName())); if (!arenas.contains(arena)) add(arena); } catch (Exception ex) { Nexus.severe("An error occurred while trying to write arena configuration file " + arena.getName() + ": " + ex.getMessage()); } } public static void delete(String arena) { delete(get(arena)); } public static void delete(Arena arena) { File file = new File(getFile(arena.getName())); if (!file.exists()) { return; } file.delete(); remove(arena); } public static int getNextId() { int id = 1; for (Arena arena : getAll()) if (arena.getId() >= id) id = (arena.getId() + 1); return id; } @SneakyThrows public static <T> T convert(Arena arena, Class<?> clazz) { return (T) clazz.getDeclaredConstructor(Map.class).newInstance(arena.serialize()); } }
package com.ellison.springdemo.common.entity; public class ConsultConfigArea { public String areaCode; public String areaName; public String state; public String getAreaCode() { return areaCode; } public void setAreaCode(String areaCode) { this.areaCode = areaCode; } public String getAreaName() { return areaName; } public void setAreaName(String areaName) { this.areaName = areaName; } public String getState() { return state; } public void setState(String state) { this.state = state; } @Override public String toString() { return "ConsultConfigArea{" + "areaCode='" + areaCode + '\'' + ", areaName='" + areaName + '\'' + ", state='" + state + '\'' + '}'; } }
import java.io.*; import java.util.*; public class SelectionSort { public static void selectionSort(int[] arr){ for (int i = 0; i < arr.length - 1; i++) { int index = i; for (int j = i + 1; j < arr.length; j++){ if (arr[j] < arr[index]){ index = j;//searching for lowest index } } int smallerNumber = arr[index]; arr[index] = arr[i]; arr[i] = smallerNumber; } } public static void main(String a[]){ int[] arr1 = {9,14,3,2,43,11,58,22}; System.out.println("Before Selection Sort"); for(int i:arr1){ System.out.print(i+" "); } System.out.println(); selectionSort(arr1);//sorting array using selection sort System.out.println("After Selection Sort"); for(int i:arr1){ System.out.print(i+" "); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v2/services/ad_group_criterion_label_service.proto package com.google.ads.googleads.v2.services; public interface MutateAdGroupCriterionLabelsResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.googleads.v2.services.MutateAdGroupCriterionLabelsResponse) com.google.protobuf.MessageOrBuilder { /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (e.g. auth errors), * we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 3;</code> * @return Whether the partialFailureError field is set. */ boolean hasPartialFailureError(); /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (e.g. auth errors), * we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 3;</code> * @return The partialFailureError. */ com.google.rpc.Status getPartialFailureError(); /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (e.g. auth errors), * we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 3;</code> */ com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder(); /** * <pre> * All results for the mutate. * </pre> * * <code>repeated .google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult results = 2;</code> */ java.util.List<com.google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult> getResultsList(); /** * <pre> * All results for the mutate. * </pre> * * <code>repeated .google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult results = 2;</code> */ com.google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult getResults(int index); /** * <pre> * All results for the mutate. * </pre> * * <code>repeated .google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult results = 2;</code> */ int getResultsCount(); /** * <pre> * All results for the mutate. * </pre> * * <code>repeated .google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult results = 2;</code> */ java.util.List<? extends com.google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResultOrBuilder> getResultsOrBuilderList(); /** * <pre> * All results for the mutate. * </pre> * * <code>repeated .google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResult results = 2;</code> */ com.google.ads.googleads.v2.services.MutateAdGroupCriterionLabelResultOrBuilder getResultsOrBuilder( int index); }
/* * Copyright 2014-2022 Web Firm Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webfirmframework.wffweb.tag.html.core; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * @author WFF * @since 3.0.3 */ public enum IndexedTagName { // NB: indexing of tags must be done in a separate enum without including // any class of tag otherwise initialization of tagNameIndex may not be // correct. It could also lead to initialization error. INSTANCE; private final List<String> sortedTagNames; private final Map<String, Integer> indexedTagNames; private IndexedTagName() { final PreIndexedTagName[] values = PreIndexedTagName.values(); final int initialCapacity = values.length; sortedTagNames = new ArrayList<>(initialCapacity); indexedTagNames = new ConcurrentHashMap<>(initialCapacity); for (final PreIndexedTagName each : values) { sortedTagNames.add(each.tagName()); indexedTagNames.put(each.tagName(), each.index()); } } List<String> sortedTagNames() { return sortedTagNames; } Map<String, Integer> indexedTagNames() { return indexedTagNames; } /** * @param tagName * @return the index of tag name * @since 3.0.3 */ public Integer getIndexByTagName(final String tagName) { return indexedTagNames.get(tagName); } }
package MultithreadedProgramming.ch02.syn_Out_asyn; import java.util.ArrayList; import java.util.List; public class MyList { private List list = new ArrayList<String>(); synchronized public void add(String username) { System.out.println("ThreadName=" + Thread.currentThread().getName() + "执行了add方法!"); list.add(username); System.out.println("ThreadName=" + Thread.currentThread().getName() + "推出了add方法"); } synchronized public int getSize() { System.out.println("ThreadName=" + Thread.currentThread().getName() + "执行了getSize方法!"); int sizeValue = list.size(); System.out.println("ThreadName=" + Thread.currentThread().getName() + "退出了getSize方法"); return sizeValue; } }
/* * Copyright 2015 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.nui.events; import org.terasology.input.ButtonState; import org.terasology.input.Input; import org.terasology.input.device.KeyboardDevice; import org.terasology.input.device.MouseDevice; /** * See {@link NUIInputEvent} */ public class NUIKeyEvent extends NUIInputEvent { private Input key; private ButtonState state; public NUIKeyEvent(MouseDevice mouse, KeyboardDevice keyboard, Input key, ButtonState state) { super(mouse, keyboard); this.key = key; this.state = state; } public Input getKey() { return key; } public ButtonState getState() { return state; } public boolean isDown() { return state != ButtonState.UP; } }
package org.droidplanner.services.android.impl.core.MAVLink.connection; import android.content.Context; import android.os.Bundle; import android.util.Log; import org.droidplanner.services.android.impl.utils.NetworkUtils; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.util.concurrent.atomic.AtomicReference; /** * Provides support for mavlink connection via udp. */ public abstract class UdpConnection extends MavLinkConnection { private AtomicReference<DatagramSocket> socketRef = new AtomicReference<>(); private int serverPort; private int hostPort; private InetAddress hostAdd; private DatagramPacket sendPacket; private DatagramPacket receivePacket; protected UdpConnection(Context context) { super(context); } private void getUdpStream(Bundle extras) throws IOException { final DatagramSocket socket = new DatagramSocket(serverPort); socket.setBroadcast(true); socket.setReuseAddress(true); NetworkUtils.bindSocketToNetwork(extras, socket); socketRef.set(socket); } @Override public final void closeConnection() throws IOException { final DatagramSocket socket = socketRef.get(); if (socket != null) { socket.close(); } } @Override public final void openConnection(Bundle connectionExtras) throws IOException { getUdpStream(connectionExtras); onConnectionOpened(connectionExtras); } @Override public final void sendBuffer(byte[] buffer) throws IOException { final DatagramSocket socket = socketRef.get(); if (socket == null) { return; } try { if (hostAdd != null) { // We can't send to our sister until they // have connected to us if (sendPacket == null) { sendPacket = new DatagramPacket(buffer, buffer.length, hostAdd, hostPort); } else { sendPacket.setData(buffer, 0, buffer.length); sendPacket.setAddress(hostAdd); sendPacket.setPort(hostPort); } socket.send(sendPacket); } } catch (Exception e) { e.printStackTrace(); } } public void sendBuffer(InetAddress targetAddr, int targetPort, byte[] buffer) throws IOException { final DatagramSocket socket = socketRef.get(); if (socket == null || targetAddr == null || buffer == null) { return; } DatagramPacket packet = new DatagramPacket(buffer, buffer.length, targetAddr, targetPort); socket.send(packet); } @Override public final int readDataBlock(byte[] readData) throws IOException { final DatagramSocket socket = socketRef.get(); if (socket == null) { return 0; } if (receivePacket == null) { receivePacket = new DatagramPacket(readData, readData.length); } else { receivePacket.setData(readData); } socket.receive(receivePacket); hostAdd = receivePacket.getAddress(); hostPort = receivePacket.getPort(); return receivePacket.getLength(); } @Override public final void loadPreferences() { Log.i("lxw"," BBB22"); serverPort = loadServerPort(); } @Override public final int getConnectionType() { return MavLinkConnectionTypes.MAVLINK_CONNECTION_UDP; } protected abstract int loadServerPort(); }
package com.sen5.lib.api.event; /** * Created by zhurongkun on 2017/8/23. * 用户验证回调 */ public class EventVerify extends BaseEvent{ }
package android.net; import android.net.apf.ApfCapabilities; import android.os.Parcel; import android.os.Parcelable; public class ProvisioningConfigurationParcelable implements Parcelable { public static final Parcelable.Creator<ProvisioningConfigurationParcelable> CREATOR = new Parcelable.Creator<ProvisioningConfigurationParcelable>() { /* class android.net.ProvisioningConfigurationParcelable.C00051 */ @Override // android.os.Parcelable.Creator public ProvisioningConfigurationParcelable createFromParcel(Parcel parcel) { ProvisioningConfigurationParcelable provisioningConfigurationParcelable = new ProvisioningConfigurationParcelable(); provisioningConfigurationParcelable.readFromParcel(parcel); return provisioningConfigurationParcelable; } @Override // android.os.Parcelable.Creator public ProvisioningConfigurationParcelable[] newArray(int i) { return new ProvisioningConfigurationParcelable[i]; } }; public ApfCapabilities apfCapabilities; public String displayName; public boolean enableIPv4; public boolean enableIPv6; public InitialConfigurationParcelable initialConfig; public int ipv6AddrGenMode; public Network network; public int provisioningTimeoutMs; public int requestedPreDhcpActionMs; public StaticIpConfiguration staticIpConfig; public boolean usingIpReachabilityMonitor; public boolean usingMultinetworkPolicyTracker; public int describeContents() { return 0; } public final void writeToParcel(Parcel parcel, int i) { int dataPosition = parcel.dataPosition(); parcel.writeInt(0); parcel.writeInt(this.enableIPv4 ? 1 : 0); parcel.writeInt(this.enableIPv6 ? 1 : 0); parcel.writeInt(this.usingMultinetworkPolicyTracker ? 1 : 0); parcel.writeInt(this.usingIpReachabilityMonitor ? 1 : 0); parcel.writeInt(this.requestedPreDhcpActionMs); if (this.initialConfig != null) { parcel.writeInt(1); this.initialConfig.writeToParcel(parcel, 0); } else { parcel.writeInt(0); } if (this.staticIpConfig != null) { parcel.writeInt(1); this.staticIpConfig.writeToParcel(parcel, 0); } else { parcel.writeInt(0); } if (this.apfCapabilities != null) { parcel.writeInt(1); this.apfCapabilities.writeToParcel(parcel, 0); } else { parcel.writeInt(0); } parcel.writeInt(this.provisioningTimeoutMs); parcel.writeInt(this.ipv6AddrGenMode); if (this.network != null) { parcel.writeInt(1); this.network.writeToParcel(parcel, 0); } else { parcel.writeInt(0); } parcel.writeString(this.displayName); int dataPosition2 = parcel.dataPosition(); parcel.setDataPosition(dataPosition); parcel.writeInt(dataPosition2 - dataPosition); parcel.setDataPosition(dataPosition2); } public final void readFromParcel(Parcel parcel) { int dataPosition = parcel.dataPosition(); int readInt = parcel.readInt(); if (readInt >= 0) { try { boolean z = true; this.enableIPv4 = parcel.readInt() != 0; if (parcel.dataPosition() - dataPosition < readInt) { this.enableIPv6 = parcel.readInt() != 0; if (parcel.dataPosition() - dataPosition < readInt) { this.usingMultinetworkPolicyTracker = parcel.readInt() != 0; if (parcel.dataPosition() - dataPosition < readInt) { if (parcel.readInt() == 0) { z = false; } this.usingIpReachabilityMonitor = z; if (parcel.dataPosition() - dataPosition < readInt) { this.requestedPreDhcpActionMs = parcel.readInt(); if (parcel.dataPosition() - dataPosition < readInt) { if (parcel.readInt() != 0) { this.initialConfig = InitialConfigurationParcelable.CREATOR.createFromParcel(parcel); } else { this.initialConfig = null; } if (parcel.dataPosition() - dataPosition < readInt) { if (parcel.readInt() != 0) { this.staticIpConfig = (StaticIpConfiguration) StaticIpConfiguration.CREATOR.createFromParcel(parcel); } else { this.staticIpConfig = null; } if (parcel.dataPosition() - dataPosition < readInt) { if (parcel.readInt() != 0) { this.apfCapabilities = (ApfCapabilities) ApfCapabilities.CREATOR.createFromParcel(parcel); } else { this.apfCapabilities = null; } if (parcel.dataPosition() - dataPosition < readInt) { this.provisioningTimeoutMs = parcel.readInt(); if (parcel.dataPosition() - dataPosition < readInt) { this.ipv6AddrGenMode = parcel.readInt(); if (parcel.dataPosition() - dataPosition < readInt) { if (parcel.readInt() != 0) { this.network = (Network) Network.CREATOR.createFromParcel(parcel); } else { this.network = null; } if (parcel.dataPosition() - dataPosition < readInt) { this.displayName = parcel.readString(); if (parcel.dataPosition() - dataPosition < readInt) { parcel.setDataPosition(dataPosition + readInt); } } } } } } } } } } } } } finally { parcel.setDataPosition(dataPosition + readInt); } } } }
package tddexample; import java.time.LocalDateTime; public class Flight { private String reference; private LocalDateTime departureTime; private String origin; private String destination; public Flight(String reference, LocalDateTime departureTime, String origin, String destination) { this.reference = reference; this.departureTime = departureTime; this.origin = origin; this.destination = destination; } public String getReference() { return reference; } public void setReference(String reference) { this.reference = reference; } public LocalDateTime getDepartureTime() { return departureTime; } public void setDepartureTime(LocalDateTime departureTime) { this.departureTime = departureTime; } public String getOrigin() { return origin; } public void setOrigin(String origin) { this.origin = origin; } public String getDestination() { return destination; } public void setDestination(String destination) { this.destination = destination; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((departureTime == null) ? 0 : departureTime.hashCode()); result = prime * result + ((destination == null) ? 0 : destination.hashCode()); result = prime * result + ((origin == null) ? 0 : origin.hashCode()); result = prime * result + ((reference == null) ? 0 : reference.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Flight other = (Flight) obj; if (departureTime == null) { if (other.departureTime != null) return false; } else if (!departureTime.equals(other.departureTime)) return false; if (destination == null) { if (other.destination != null) return false; } else if (!destination.equals(other.destination)) return false; if (origin == null) { if (other.origin != null) return false; } else if (!origin.equals(other.origin)) return false; if (reference == null) { if (other.reference != null) return false; } else if (!reference.equals(other.reference)) return false; return true; } }
/** * TLS-Attacker - A Modular Penetration Testing Framework for TLS * * Copyright 2014-2021 Ruhr University Bochum, Paderborn University, Hackmanit GmbH * * Licensed under Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0.txt */ package de.rub.nds.tlsattacker.core.protocol.handler; import de.rub.nds.tlsattacker.core.constants.AlgorithmResolver; import de.rub.nds.tlsattacker.core.protocol.message.DHEServerKeyExchangeMessage; import de.rub.nds.tlsattacker.core.protocol.parser.DHEServerKeyExchangeParser; import de.rub.nds.tlsattacker.core.protocol.preparator.DHEServerKeyExchangePreparator; import de.rub.nds.tlsattacker.core.protocol.serializer.DHEServerKeyExchangeSerializer; import de.rub.nds.tlsattacker.core.state.TlsContext; import java.math.BigInteger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class DHEServerKeyExchangeHandler<T extends DHEServerKeyExchangeMessage> extends ServerKeyExchangeHandler<T> { private static final Logger LOGGER = LogManager.getLogger(); public DHEServerKeyExchangeHandler(TlsContext tlsContext) { super(tlsContext); } @Override public DHEServerKeyExchangeParser<T> getParser(byte[] message, int pointer) { return new DHEServerKeyExchangeParser<T>(pointer, message, tlsContext.getChooser().getLastRecordVersion(), AlgorithmResolver.getKeyExchangeAlgorithm(tlsContext.getChooser().getSelectedCipherSuite()), tlsContext.getConfig()); } @Override public DHEServerKeyExchangePreparator<T> getPreparator(T message) { return new DHEServerKeyExchangePreparator<T>(tlsContext.getChooser(), message); } @Override public DHEServerKeyExchangeSerializer<T> getSerializer(T message) { return new DHEServerKeyExchangeSerializer<T>(message, tlsContext.getChooser().getSelectedProtocolVersion()); } @Override public void adjustTLSContext(T message) { adjustDhGenerator(message); adjustDhModulus(message); adjustServerPublicKey(message); if (message.getComputations() != null && message.getComputations().getPrivateKey() != null) { adjustServerPrivateKey(message); } } private void adjustDhGenerator(T message) { tlsContext.setServerDhGenerator(new BigInteger(1, message.getGenerator().getValue())); LOGGER.debug("Dh Generator: " + tlsContext.getServerDhGenerator()); } private void adjustDhModulus(T message) { tlsContext.setServerDhModulus(new BigInteger(1, message.getModulus().getValue())); LOGGER.debug("Dh Modulus: " + tlsContext.getServerDhModulus()); } private void adjustServerPublicKey(T message) { tlsContext.setServerDhPublicKey(new BigInteger(1, message.getPublicKey().getValue())); LOGGER.debug("Server PublicKey: " + tlsContext.getServerDhPublicKey()); } private void adjustServerPrivateKey(T message) { tlsContext.setServerDhPrivateKey(message.getComputations().getPrivateKey().getValue()); LOGGER.debug("Server PrivateKey: " + tlsContext.getServerDhPrivateKey()); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.eas.client.model.gui.selectors; import java.util.ArrayList; import java.util.List; import org.openide.loaders.DataObject; /** * * @author mg */ public class DefaultMtdSelectionValidator implements MtdSelectionValidator { protected List<String> allowedTypes = new ArrayList<>(); public DefaultMtdSelectionValidator(List<String> aAllowedTypes) { super(); allowedTypes = aAllowedTypes; } @Override public boolean isEntityValid(DataObject umdo) { if (allowedTypes != null) { if (umdo != null) { return allowedTypes.contains(umdo.getPrimaryFile().getExt()); } else { return false; } } else { return true; } } @Override public List<String> getAllowedTypes() { return allowedTypes; } }
package com.atguigu.gmall.pms.controller; import java.util.List; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.atguigu.gmall.pms.entity.CommentEntity; import com.atguigu.gmall.pms.service.CommentService; import com.atguigu.gmall.common.bean.PageResultVo; import com.atguigu.gmall.common.bean.ResponseVo; import com.atguigu.gmall.common.bean.PageParamVo; /** * 商品评价 * * @author tangtang * @email tangtang@atguigu.com * @date 2021-05-13 18:26:33 */ @Api(tags = "商品评价 管理") @RestController @RequestMapping("pms/comment") public class CommentController { @Autowired private CommentService commentService; /** * 列表 */ @GetMapping @ApiOperation("分页查询") public ResponseVo<PageResultVo> queryCommentByPage(PageParamVo paramVo){ PageResultVo pageResultVo = commentService.queryPage(paramVo); return ResponseVo.ok(pageResultVo); } /** * 信息 */ @GetMapping("{id}") @ApiOperation("详情查询") public ResponseVo<CommentEntity> queryCommentById(@PathVariable("id") Long id){ CommentEntity comment = commentService.getById(id); return ResponseVo.ok(comment); } /** * 保存 */ @PostMapping @ApiOperation("保存") public ResponseVo<Object> save(@RequestBody CommentEntity comment){ commentService.save(comment); return ResponseVo.ok(); } /** * 修改 */ @PostMapping("/update") @ApiOperation("修改") public ResponseVo update(@RequestBody CommentEntity comment){ commentService.updateById(comment); return ResponseVo.ok(); } /** * 删除 */ @PostMapping("/delete") @ApiOperation("删除") public ResponseVo delete(@RequestBody List<Long> ids){ commentService.removeByIds(ids); return ResponseVo.ok(); } }
package org.wikipedia.search; public class ImageSearchException extends Exception { public ImageSearchException(String message, Exception e) { super(message, e); } }
package com.iciyun.blockchain.aToB100; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.PrivateKey; import java.security.Security; import java.security.spec.InvalidKeySpecException; import java.util.Collection; import java.util.Iterator; import java.util.Properties; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.apache.commons.io.IOUtils; import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.openssl.PEMParser; import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; import org.hyperledger.fabric.sdk.BlockEvent; import org.hyperledger.fabric.sdk.ChaincodeID; import org.hyperledger.fabric.sdk.Channel; import org.hyperledger.fabric.sdk.Enrollment; import org.hyperledger.fabric.sdk.HFClient; import org.hyperledger.fabric.sdk.ProposalResponse; import org.hyperledger.fabric.sdk.QueryByChaincodeRequest; import org.hyperledger.fabric.sdk.TransactionProposalRequest; import org.hyperledger.fabric.sdk.User; import org.hyperledger.fabric.sdk.exception.CryptoException; import org.hyperledger.fabric.sdk.exception.InvalidArgumentException; import org.hyperledger.fabric.sdk.exception.ProposalException; import org.hyperledger.fabric.sdk.exception.TransactionException; import org.hyperledger.fabric.sdk.security.CryptoSuite; /** * Hello world! * */ public class App { final HFClient client = HFClient.createNewInstance(); Channel channel; TransactionProposalRequest proposalRequest; void setupCryptoMaterialsForClient() throws CryptoException, InvalidArgumentException, IllegalAccessException, InstantiationException, ClassNotFoundException, NoSuchMethodException, InvocationTargetException { // Set default crypto suite for HF client client.setCryptoSuite(CryptoSuite.Factory.getCryptoSuite()); client.setUserContext(new User() { @Override public String getName() { return "PeerAdmin"; } @Override public Set<String> getRoles() { return null; } @Override public String getAccount() { return null; } @Override public String getAffiliation() { return null; } @Override public Enrollment getEnrollment() { return new Enrollment() { @Override public PrivateKey getKey() { PrivateKey privateKey = null; try { File privateKeyFile = findFileSk( "/opt/gopath/src/github.com/hyperledger/fabric-samples/first-network/crypto-config/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp/keystore"); privateKey = getPrivateKeyFromBytes( IOUtils.toByteArray(new FileInputStream(privateKeyFile))); } catch (InvalidKeySpecException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (NoSuchProviderException e) { e.printStackTrace(); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } return privateKey; } @Override public String getCert() { String certificate = null; try { File certificateFile = new File( "/opt/gopath/src/github.com/hyperledger/fabric-samples/first-network/crypto-config/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp/signcerts/Admin@org1.example.com-cert.pem"); certificate = new String(IOUtils.toByteArray(new FileInputStream(certificateFile)), "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return certificate; } }; } @Override public String getMspId() { return "Org1MSP"; } }); } static File findFileSk(String directorys) { File directory = new File(directorys); File[] matches = directory.listFiles((file, name) -> name.endsWith("_sk")); if (null == matches) { throw new RuntimeException( "Matches returned null does %s directory exist?" + directory.getAbsoluteFile().getName()); } // if (matches.length != 1) { // throw new RuntimeException("Expected in %s only 1 sk file but found %d" // + directory.getAbsoluteFile().getName() + matches.length); // } System.out.println(matches[0]); System.out.println(matches[1]); return matches[1]; } static PrivateKey getPrivateKeyFromBytes(byte[] data) throws IOException, NoSuchProviderException, NoSuchAlgorithmException, InvalidKeySpecException { final Reader pemReader = new StringReader(new String(data)); final PrivateKeyInfo pemPair; try (PEMParser pemParser = new PEMParser(pemReader)) { pemPair = (PrivateKeyInfo) pemParser.readObject(); } Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); PrivateKey privateKey = new JcaPEMKeyConverter().setProvider(BouncyCastleProvider.PROVIDER_NAME) .getPrivateKey(pemPair); return privateKey; } void createChannel() throws InvalidArgumentException, TransactionException { channel = client.newChannel("mychannel"); Properties ordererProperties = new Properties(); ordererProperties.setProperty("pemFile", "/opt/gopath/src/github.com/hyperledger/fabric-samples/first-network/crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/server.crt"); ordererProperties.setProperty("trustServerCertificate", "true"); // testing environment only NOT FOR PRODUCTION! ordererProperties.setProperty("hostnameOverride", "orderer.example.com"); ordererProperties.setProperty("sslProvider", "openSSL"); ordererProperties.setProperty("negotiationType", "TLS"); ordererProperties.put("grpc.NettyChannelBuilderOption.keepAliveTime", new Object[] { 5L, TimeUnit.MINUTES }); ordererProperties.put("grpc.NettyChannelBuilderOption.keepAliveTimeout", new Object[] { 8L, TimeUnit.SECONDS }); channel.addOrderer( client.newOrderer("orderer.example.com", "grpcs://orderer.example.com:7050", ordererProperties)); Properties peerProperties = new Properties(); peerProperties.setProperty("pemFile", "/opt/gopath/src/github.com/hyperledger/fabric-samples/first-network/crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.crt"); peerProperties.setProperty("trustServerCertificate", "true"); // testing environment only NOT FOR PRODUCTION! peerProperties.setProperty("hostnameOverride", "peer0.org1.example.com"); peerProperties.setProperty("sslProvider", "openSSL"); peerProperties.setProperty("negotiationType", "TLS"); peerProperties.put("grpc.NettyChannelBuilderOption.maxInboundMessageSize", 9000000); channel.addPeer( client.newPeer("peer0.org1.example.com", "grpcs://peer0.org1.example.com:7051", peerProperties)); channel.initialize(); } void creteTransactionalProposal() { proposalRequest = client.newTransactionProposalRequest(); final ChaincodeID chaincodeID = ChaincodeID.newBuilder().setName("mycc").setVersion("1.0") .setPath("github.com/hyperledger/fabric/examples/chaincode/go/chaincode_example02").build(); proposalRequest.setChaincodeID(chaincodeID); // proposalRequest.setFcn("init"); // proposalRequest.setProposalWaitTime(TimeUnit.SECONDS.toMillis(10)); // proposalRequest.setArgs(new String[] { "ORG1", // "{\"assetKey\":\"a1\",\"assetName\":\"aname1\",\"assetType\":\"atype1\",\"slNo\":\"slno1\",\"orderDate\":\"19-05-2017\"}" }); proposalRequest.setFcn("invoke"); proposalRequest.setProposalWaitTime(TimeUnit.SECONDS.toMillis(10)); proposalRequest.setArgs(new String[] { "a", "b", "100" }); } void sendProposal() throws ProposalException, InvalidArgumentException, InterruptedException, ExecutionException { final Collection<ProposalResponse> responses = channel.sendTransactionProposal(proposalRequest); CompletableFuture<BlockEvent.TransactionEvent> txFuture = channel.sendTransaction(responses, client.getUserContext()); BlockEvent.TransactionEvent event = txFuture.get(); System.out.println(event.toString()); } public static void main(String args[]) throws Exception { System.out.println("starting..."); App t = new App(); t.setupCryptoMaterialsForClient(); t.createChannel(); // t.creteTransactionalProposal(); // t.sendProposal(); QueryByChaincodeRequest queryByChaincodeRequest = QueryByChaincodeRequest .newInstance(t.client.getUserContext()); final ChaincodeID chaincodeID = ChaincodeID.newBuilder().setName("mycc").setVersion("1.0") .setPath("github.com/hyperledger/fabric/examples/chaincode/go/chaincode_example02").build(); queryByChaincodeRequest.setChaincodeID(chaincodeID); queryByChaincodeRequest.setFcn("query"); queryByChaincodeRequest.setArgs(new String[] { "a" }); final Collection<ProposalResponse> responses = t.channel.queryByChaincode(queryByChaincodeRequest); Iterator<ProposalResponse> it = responses.iterator(); while (it.hasNext()) { ProposalResponse response = it.next(); System.out.println(response.getMessage()); System.out.println(new String(response.getChaincodeActionResponsePayload())); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package springapp.test; import junit.framework.TestCase; import org.junit.Test; import org.springframework.web.servlet.ModelAndView; import springapp.web.HelloController; /** * * @author davidchang */ public class HelloControllerTest extends TestCase { @Test public void testHandleRequestView() throws Exception { HelloController controller = new HelloController(); ModelAndView modelAndView = controller.handleRequest(null, null); assertEquals("hello", modelAndView.getViewName()); assertNotNull(modelAndView.getModel()); String nowValue = (String) modelAndView.getModel().get("now"); assertNotNull(nowValue); } }
package io.logansquarex.core; import com.fasterxml.jackson.core.JsonFactory; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import io.logansquarex.JsonMapperLoaderImpl; import io.logansquarex.core.objectmappers.BooleanMapper; import io.logansquarex.core.objectmappers.DoubleMapper; import io.logansquarex.core.objectmappers.FloatMapper; import io.logansquarex.core.objectmappers.IntegerMapper; import io.logansquarex.core.objectmappers.ListMapper; import io.logansquarex.core.objectmappers.LongMapper; import io.logansquarex.core.objectmappers.MapMapper; import io.logansquarex.core.objectmappers.ObjectMapper; import io.logansquarex.core.objectmappers.StringMapper; import io.logansquarex.core.simple.SimpleListResponse; import io.logansquarex.core.simple.SimpleListResponseMapper; import io.logansquarex.core.typeconverters.DefaultCalendarConverter; import io.logansquarex.core.typeconverters.DefaultDateConverter; import io.logansquarex.core.typeconverters.TypeConverter; import io.logansquarex.core.util.SimpleArrayMap; /** * The point of all interaction with this library. * Unlike LoganSquare, the default will not throw exception. */ public class LoganSquareX { protected static final ListMapper LIST_MAPPER = new ListMapper(); protected static final MapMapper MAP_MAPPER = new MapMapper(); protected static final SimpleArrayMap<Class, JsonMapper> OBJECT_MAPPERS = new SimpleArrayMap<Class, JsonMapper>(JsonMapperLoaderImpl.DEFAULT_MAP_SIZE); protected static final SimpleArrayMap<Class, Class> CLASS_MAPPERS = new SimpleArrayMap<Class, Class>(JsonMapperLoaderImpl.DEFAULT_MAP_SIZE); static { try { JsonMapperLoaderImpl JSON_MAPPER_LOADER; JSON_MAPPER_LOADER = new JsonMapperLoaderImpl(); // JSON_MAPPER_LOADER.putAllJsonMappers(OBJECT_MAPPERS); JSON_MAPPER_LOADER.retainAllClassMapper(CLASS_MAPPERS); } catch (Exception e) { e.printStackTrace(); // throw new RuntimeException("JsonMapperLoaderImpl class not found"); } OBJECT_MAPPERS.put(String.class, new StringMapper()); OBJECT_MAPPERS.put(Integer.class, new IntegerMapper()); OBJECT_MAPPERS.put(Long.class, new LongMapper()); OBJECT_MAPPERS.put(Float.class, new FloatMapper()); OBJECT_MAPPERS.put(Double.class, new DoubleMapper()); OBJECT_MAPPERS.put(Boolean.class, new BooleanMapper()); OBJECT_MAPPERS.put(Object.class, new ObjectMapper()); OBJECT_MAPPERS.put(List.class, LIST_MAPPER); OBJECT_MAPPERS.put(ArrayList.class, LIST_MAPPER); OBJECT_MAPPERS.put(Map.class, MAP_MAPPER); OBJECT_MAPPERS.put(HashMap.class, MAP_MAPPER); OBJECT_MAPPERS.put(SimpleListResponse.class, new SimpleListResponseMapper()); } protected static final ConcurrentHashMap<ParameterizedType, JsonMapper> PARAMETERIZED_OBJECT_MAPPERS = new ConcurrentHashMap<ParameterizedType, JsonMapper>(); protected static final SimpleArrayMap<Class, TypeConverter> TYPE_CONVERTERS = new SimpleArrayMap<>(); static { registerTypeConverter(Date.class, new DefaultDateConverter()); registerTypeConverter(Calendar.class, new DefaultCalendarConverter()); } /** * The JsonFactory that should be used throughout the entire app. */ public static final JsonFactory JSON_FACTORY = new JsonFactory(); /** * Parse an object from an InputStream. * * @param is The InputStream, most likely from your networking library. * @param jsonObjectClass The @JsonObject class to parse the InputStream into */ public static <E> E parse(InputStream is, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).parse(is); } catch (IOException e) { return null; } } /** * Parse an object from a String. Note: parsing from an InputStream should be preferred over parsing from a String if possible. * * @param jsonString The JSON string being parsed. * @param jsonObjectClass The @JsonObject class to parse the InputStream into */ public static <E> E parse(String jsonString, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).parse(jsonString); } catch (IOException e) { return null; } } /** * Parse a parameterized object from an InputStream. * * @param is The InputStream, most likely from your networking library. * @param jsonObjectType The ParameterizedType describing the object. Ex: LoganSquare.parse(is, new ParameterizedType&lt;MyModel&lt;OtherModel&gt;&gt;() { }); */ public static <E> E parse(InputStream is, ParameterizedType<E> jsonObjectType) { try { return mapperFor(jsonObjectType).parse(is); } catch (IOException e) { return null; } } /** * Parse a parameterized object from a String. Note: parsing from an InputStream should be preferred over parsing from a String if possible. * * @param jsonString The JSON string being parsed. * @param jsonObjectType The ParameterizedType describing the object. Ex: LoganSquare.parse(is, new ParameterizedType&lt;MyModel&lt;OtherModel&gt;&gt;() { }); */ public static <E> E parse(String jsonString, ParameterizedType<E> jsonObjectType) { try { return mapperFor(jsonObjectType).parse(jsonString); } catch (IOException e) { return null; } } /** * Parse a list of objects from an InputStream. * * @param is The inputStream, most likely from your networking library. * @param jsonObjectClass The @JsonObject class to parse the InputStream into */ public static <E> List<E> parseList(InputStream is, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).parseList(is); } catch (IOException e) { return null; } } /** * Parse a list of objects from a String. Note: parsing from an InputStream should be preferred over parsing from a String if possible. * * @param jsonString The JSON string being parsed. * @param jsonObjectClass The @JsonObject class to parse the InputStream into */ public static <E> List<E> parseList(String jsonString, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).parseList(jsonString); } catch (IOException e) { return null; } } /** * Parse a map of objects from an InputStream. * * @param is The inputStream, most likely from your networking library. * @param jsonObjectClass The @JsonObject class to parse the InputStream into */ public static <E> Map<String, E> parseMap(InputStream is, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).parseMap(is); } catch (IOException e) { return null; } } /** * Parse a map of objects from a String. Note: parsing from an InputStream should be preferred over parsing from a String if possible. * * @param jsonString The JSON string being parsed. * @param jsonObjectClass The @JsonObject class to parse the InputStream into */ public static <E> Map<String, E> parseMap(String jsonString, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).parseMap(jsonString); } catch (IOException e) { return null; } } /** * Serialize an object to a JSON String. * * @param object The object to serialize. */ @SuppressWarnings("unchecked") public static <E> String serialize(E object) { try { return mapperFor((Class<E>) object.getClass()).serialize(object); } catch (IOException e) { return null; } } /** * Serialize an object to an OutputStream. * * @param object The object to serialize. * @param os The OutputStream being written to. */ @SuppressWarnings("unchecked") public static <E> void serialize(E object, OutputStream os) { try { mapperFor((Class<E>) object.getClass()).serialize(object, os); } catch (IOException e) { // ignored System.out.print(e.toString()); } } /** * Serialize a parameterized object to a JSON String. * * @param object The object to serialize. * @param parameterizedType The ParameterizedType describing the object. Ex: LoganSquare.serialize(object, new ParameterizedType&lt;MyModel&lt;OtherModel&gt;&gt;() { }); */ @SuppressWarnings("unchecked") public static <E> String serialize(E object, ParameterizedType<E> parameterizedType) { try { return mapperFor(parameterizedType).serialize(object); } catch (IOException e) { return null; } } /** * Serialize a parameterized object to an OutputStream. * * @param object The object to serialize. * @param parameterizedType The ParameterizedType describing the object. Ex: LoganSquare.serialize(object, new ParameterizedType&lt;MyModel&lt;OtherModel&gt;&gt;() { }, os); * @param os The OutputStream being written to. */ @SuppressWarnings("unchecked") public static <E> void serialize(E object, ParameterizedType<E> parameterizedType, OutputStream os) { try { mapperFor(parameterizedType).serialize(object, os); } catch (IOException e) { // ignored System.out.print(e.toString()); } } /** * Serialize a list of objects to a JSON String. * * @param list The list of objects to serialize. * @param jsonObjectClass The @JsonObject class of the list elements */ public static <E> String serialize(List<E> list, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).serialize(list); } catch (IOException e) { return null; } } /** * Serialize a list of objects to an OutputStream. * * @param list The list of objects to serialize. * @param os The OutputStream to which the list should be serialized * @param jsonObjectClass The @JsonObject class of the list elements */ public static <E> void serialize(List<E> list, OutputStream os, Class<E> jsonObjectClass) { try { mapperFor(jsonObjectClass).serialize(list, os); } catch (IOException e) { // ignored System.out.print(e.toString()); } } /** * Serialize a map of objects to a JSON String. * * @param map The map of objects to serialize. * @param jsonObjectClass The @JsonObject class of the list elements */ public static <E> String serialize(Map<String, E> map, Class<E> jsonObjectClass) { try { return mapperFor(jsonObjectClass).serialize(map); } catch (IOException e) { return null; } } /** * Serialize a map of objects to an OutputStream. * * @param map The map of objects to serialize. * @param os The OutputStream to which the list should be serialized * @param jsonObjectClass The @JsonObject class of the list elements */ public static <E> void serialize(Map<String, E> map, OutputStream os, Class<E> jsonObjectClass) { try { mapperFor(jsonObjectClass).serialize(map, os); } catch (IOException e) { // ignored System.out.print(e.toString()); } } @SuppressWarnings("unchecked") /*package*/ static <E> JsonMapper<E> getMapper(Class<E> cls) { JsonMapper<E> mapper = OBJECT_MAPPERS.get(cls); if (mapper == null) { Class<?> mapperClass= CLASS_MAPPERS.get(cls); // The only way the mapper wouldn't already be loaded into OBJECT_MAPPERS is if it was compiled separately, but let's handle it anyway try { if (mapperClass==null) { mapperClass = Class.forName(cls.getName() + Constants.MAPPER_CLASS_SUFFIX); } mapper = (JsonMapper<E>) mapperClass.newInstance(); OBJECT_MAPPERS.put(cls, mapper); } catch (Exception ignored) { } } return mapper; } @SuppressWarnings("unchecked") private static <E> JsonMapper<E> getMapper(ParameterizedType<E> type, SimpleArrayMap<ParameterizedType, JsonMapper> partialMappers) { if (type.typeParameters.size() == 0) { return getMapper((Class<E>) type.rawType); } if (partialMappers == null) { partialMappers = new SimpleArrayMap<ParameterizedType, JsonMapper>(); } if (partialMappers.containsKey(type)) { return partialMappers.get(type); } else if (PARAMETERIZED_OBJECT_MAPPERS.containsKey(type)) { return PARAMETERIZED_OBJECT_MAPPERS.get(type); } else { try { Class<?> mapperClass = Class.forName(type.rawType.getName() + Constants.MAPPER_CLASS_SUFFIX); Constructor constructor = mapperClass.getDeclaredConstructors()[0]; Object[] args = new Object[2 + type.typeParameters.size()]; args[0] = type; args[args.length - 1] = partialMappers; for (int i = 0; i < type.typeParameters.size(); i++) { args[i + 1] = type.typeParameters.get(i); } JsonMapper<E> mapper = (JsonMapper<E>) constructor.newInstance(args); PARAMETERIZED_OBJECT_MAPPERS.put(type, mapper); return mapper; } catch (Exception ignored) { return null; } } } /** * Returns whether or not LoganSquare can handle a given class. * * @param cls The class for which support is being checked. */ @SuppressWarnings("unchecked") public static boolean supports(Class cls) { return getMapper(cls) != null; } /** * Returns whether or not LoganSquare can handle a given ParameterizedType. * * @param type The ParameterizedType for which support is being checked. */ @SuppressWarnings("unchecked") public static boolean supports(ParameterizedType type) { return getMapper(type, null) != null; } /** * Returns a JsonMapper for a given class that has been annotated with @JsonObject. * * @param cls The class for which the JsonMapper should be fetched. */ public static <E> JsonMapper<E> mapperFor(Class<E> cls) throws NoSuchMapperException { JsonMapper<E> mapper = getMapper(cls); if (mapper == null) { throw new NoSuchMapperException(cls); } else { return mapper; } } /** * Returns a JsonMapper for a given class that has been annotated with @JsonObject. * * @param type The ParameterizedType for which the JsonMapper should be fetched. */ @SuppressWarnings("unchecked") public static <E> JsonMapper<E> mapperFor(ParameterizedType<E> type) throws NoSuchMapperException { return mapperFor(type, null); } public static <E> JsonMapper<E> mapperFor(ParameterizedType<E> type, SimpleArrayMap<ParameterizedType, JsonMapper> partialMappers) throws NoSuchMapperException { JsonMapper<E> mapper = getMapper(type, partialMappers); if (mapper == null) { throw new NoSuchMapperException(type.rawType); } else { return mapper; } } /** * Returns a TypeConverter for a given class. * * @param cls The class for which the TypeConverter should be fetched. */ @SuppressWarnings("unchecked") public static <E> TypeConverter<E> typeConverterFor(Class<E> cls) throws NoSuchTypeConverterException { TypeConverter<E> typeConverter = TYPE_CONVERTERS.get(cls); if (typeConverter == null) { throw new NoSuchTypeConverterException(cls); } return typeConverter; } /** * Register a new TypeConverter for parsing and serialization. * * @param cls The class for which the TypeConverter should be used. * @param converter The TypeConverter */ public static <E> void registerTypeConverter(Class<E> cls, TypeConverter<E> converter) { TYPE_CONVERTERS.put(cls, converter); } /** * list to response * * @param list * @param <E> * @return {"code":0,"data":[{"description":"xxxxx"},{"description":"xxxxx"}]} */ public static <E> String serializeListSimple(List<E> list) { try { SimpleListResponseMapper jsonObjectMapper = (SimpleListResponseMapper) mapperFor(SimpleListResponse.class); return jsonObjectMapper.serialize(list); } catch (IOException e) { return null; } } }
/** * AET * * Copyright (C) 2013 Cognifide Limited * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.cognifide.aet.communication.api.messages; import java.io.Serializable; /** * Basic message that holds its type. Used to send via jms. */ public interface BasicMessage extends Serializable { /** * @return message type. */ public MessageType getMessageType(); }
/* * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.relational.core.mapping.event; /** * Interface for {@link RelationalEvent}s which have an {@link Identifier} but might not have an entity. * * @author Jens Schauder */ public interface WithId<E> extends RelationalEvent<E> { /** * Events with an identifier will always return a {@link Identifier} one. */ Identifier getId(); }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.repository.kdr; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.repository.AbstractRepository; import org.pentaho.di.repository.RepositoryDirectory; public abstract class KettleDatabaseRepositoryBase extends AbstractRepository { // private static Class<?> PKG = Repository.class; // for i18n purposes, needed by Translator2!! public static final String TABLE_R_VERSION = "R_VERSION"; public static final String FIELD_VERSION_ID_VERSION = "ID_VERSION"; public static final String FIELD_VERSION_MAJOR_VERSION = "MAJOR_VERSION"; public static final String FIELD_VERSION_MINOR_VERSION = "MINOR_VERSION"; public static final String FIELD_VERSION_IS_UPGRADE = "IS_UPGRADE"; public static final String FIELD_VERSION_UPGRADE_DATE = "UPGRADE_DATE"; public static final String TABLE_R_REPOSITORY_LOG = "R_REPOSITORY_LOG"; public static final String FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG = "ID_REPOSITORY_LOG"; public static final String FIELD_REPOSITORY_LOG_REP_VERSION = "REP_VERSION"; public static final String FIELD_REPOSITORY_LOG_LOG_DATE = "LOG_DATE"; public static final String FIELD_REPOSITORY_LOG_LOG_USER = "LOG_USER"; public static final String FIELD_REPOSITORY_LOG_OPERATION_DESC = "OPERATION_DESC"; public static final String TABLE_R_DATABASE_TYPE = "R_DATABASE_TYPE"; public static final String FIELD_DATABASE_TYPE_ID_DATABASE_TYPE = "ID_DATABASE_TYPE"; public static final String FIELD_DATABASE_TYPE_CODE = "CODE"; public static final String FIELD_DATABASE_TYPE_DESCRIPTION = "DESCRIPTION"; public static final String TABLE_R_DATABASE_CONTYPE = "R_DATABASE_CONTYPE"; public static final String FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE = "ID_DATABASE_CONTYPE"; public static final String FIELD_DATABASE_CONTYPE_CODE = "CODE"; public static final String FIELD_DATABASE_CONTYPE_DESCRIPTION = "DESCRIPTION"; public static final String TABLE_R_DATABASE = "R_DATABASE"; public static final String FIELD_DATABASE_ID_DATABASE = "ID_DATABASE"; public static final String FIELD_DATABASE_NAME = "NAME"; public static final String FIELD_DATABASE_ID_DATABASE_TYPE = "ID_DATABASE_TYPE"; public static final String FIELD_DATABASE_ID_DATABASE_CONTYPE = "ID_DATABASE_CONTYPE"; public static final String FIELD_DATABASE_HOST_NAME = "HOST_NAME"; public static final String FIELD_DATABASE_DATABASE_NAME = "DATABASE_NAME"; public static final String FIELD_DATABASE_PORT = "PORT"; public static final String FIELD_DATABASE_USERNAME = "USERNAME"; public static final String FIELD_DATABASE_DATA_TBS = "DATA_TBS"; public static final String FIELD_DATABASE_PASSWORD = "PASSWORD"; public static final String FIELD_DATABASE_SERVERNAME = "SERVERNAME"; public static final String FIELD_DATABASE_INDEX_TBS = "INDEX_TBS"; public static final String TABLE_R_DATABASE_ATTRIBUTE = "R_DATABASE_ATTRIBUTE"; public static final String FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE = "ID_DATABASE_ATTRIBUTE"; public static final String FIELD_DATABASE_ATTRIBUTE_ID_DATABASE = "ID_DATABASE"; public static final String FIELD_DATABASE_ATTRIBUTE_CODE = "CODE"; public static final String FIELD_DATABASE_ATTRIBUTE_VALUE_STR = "VALUE_STR"; public static final String TABLE_R_NOTE = "R_NOTE"; public static final String FIELD_NOTE_ID_NOTE = "ID_NOTE"; public static final String FIELD_NOTE_VALUE_STR = "VALUE_STR"; public static final String FIELD_NOTE_GUI_LOCATION_X = "GUI_LOCATION_X"; public static final String FIELD_NOTE_GUI_LOCATION_Y = "GUI_LOCATION_Y"; public static final String FIELD_NOTE_GUI_LOCATION_WIDTH = "GUI_LOCATION_WIDTH"; public static final String FIELD_NOTE_GUI_LOCATION_HEIGHT = "GUI_LOCATION_HEIGHT"; public static final String FIELD_NOTE_FONT_NAME = "FONT_NAME"; public static final String FIELD_NOTE_FONT_SIZE = "FONT_SIZE"; public static final String FIELD_NOTE_FONT_BOLD = "FONT_BOLD"; public static final String FIELD_NOTE_FONT_ITALIC = "FONT_ITALIC"; public static final String FIELD_NOTE_COLOR_RED = "FONT_COLOR_RED"; public static final String FIELD_NOTE_COLOR_GREEN = "FONT_COLOR_GREEN"; public static final String FIELD_NOTE_COLOR_BLUE = "FONT_COLOR_BLUE"; public static final String FIELD_NOTE_BACK_GROUND_COLOR_RED = "FONT_BACK_GROUND_COLOR_RED"; public static final String FIELD_NOTE_BACK_GROUND_COLOR_GREEN = "FONT_BACK_GROUND_COLOR_GREEN"; public static final String FIELD_NOTE_BACK_GROUND_COLOR_BLUE = "FONT_BACK_GROUND_COLOR_BLUE"; public static final String FIELD_NOTE_BORDER_COLOR_RED = "FONT_BORDER_COLOR_RED"; public static final String FIELD_NOTE_BORDER_COLOR_GREEN = "FONT_BORDER_COLOR_GREEN"; public static final String FIELD_NOTE_BORDER_COLOR_BLUE = "FONT_BORDER_COLOR_BLUE"; public static final String FIELD_NOTE_DRAW_SHADOW = "DRAW_SHADOW"; public static final String TABLE_R_TRANSFORMATION = "R_TRANSFORMATION"; public static final String FIELD_TRANSFORMATION_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANSFORMATION_ID_DIRECTORY = "ID_DIRECTORY"; public static final String FIELD_TRANSFORMATION_NAME = "NAME"; public static final String FIELD_TRANSFORMATION_DESCRIPTION = "DESCRIPTION"; public static final String FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION = "EXTENDED_DESCRIPTION"; public static final String FIELD_TRANSFORMATION_TRANS_VERSION = "TRANS_VERSION"; public static final String FIELD_TRANSFORMATION_TRANS_STATUS = "TRANS_STATUS"; public static final String FIELD_TRANSFORMATION_ID_STEP_READ = "ID_STEP_READ"; public static final String FIELD_TRANSFORMATION_ID_STEP_WRITE = "ID_STEP_WRITE"; public static final String FIELD_TRANSFORMATION_ID_STEP_INPUT = "ID_STEP_INPUT"; public static final String FIELD_TRANSFORMATION_ID_STEP_OUTPUT = "ID_STEP_OUTPUT"; public static final String FIELD_TRANSFORMATION_ID_STEP_UPDATE = "ID_STEP_UPDATE"; public static final String FIELD_TRANSFORMATION_ID_DATABASE_LOG = "ID_DATABASE_LOG"; public static final String FIELD_TRANSFORMATION_TABLE_NAME_LOG = "TABLE_NAME_LOG"; public static final String FIELD_TRANSFORMATION_USE_BATCHID = "USE_BATCHID"; public static final String FIELD_TRANSFORMATION_USE_LOGFIELD = "USE_LOGFIELD"; public static final String FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE = "ID_DATABASE_MAXDATE"; public static final String FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE = "TABLE_NAME_MAXDATE"; public static final String FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE = "FIELD_NAME_MAXDATE"; public static final String FIELD_TRANSFORMATION_OFFSET_MAXDATE = "OFFSET_MAXDATE"; public static final String FIELD_TRANSFORMATION_DIFF_MAXDATE = "DIFF_MAXDATE"; public static final String FIELD_TRANSFORMATION_CREATED_USER = "CREATED_USER"; public static final String FIELD_TRANSFORMATION_CREATED_DATE = "CREATED_DATE"; public static final String FIELD_TRANSFORMATION_MODIFIED_USER = "MODIFIED_USER"; public static final String FIELD_TRANSFORMATION_MODIFIED_DATE = "MODIFIED_DATE"; public static final String FIELD_TRANSFORMATION_SIZE_ROWSET = "SIZE_ROWSET"; public static final String TABLE_R_DIRECTORY = "R_DIRECTORY"; public static final String FIELD_DIRECTORY_ID_DIRECTORY = "ID_DIRECTORY"; public static final String FIELD_DIRECTORY_ID_DIRECTORY_PARENT = "ID_DIRECTORY_PARENT"; public static final String FIELD_DIRECTORY_DIRECTORY_NAME = "DIRECTORY_NAME"; public static final String TABLE_R_TRANS_ATTRIBUTE = "R_TRANS_ATTRIBUTE"; public static final String FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE = "ID_TRANS_ATTRIBUTE"; public static final String FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_ATTRIBUTE_NR = "NR"; public static final String FIELD_TRANS_ATTRIBUTE_CODE = "CODE"; public static final String FIELD_TRANS_ATTRIBUTE_VALUE_NUM = "VALUE_NUM"; public static final String FIELD_TRANS_ATTRIBUTE_VALUE_STR = "VALUE_STR"; public static final String TABLE_R_JOB_ATTRIBUTE = "R_JOB_ATTRIBUTE"; public static final String FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE = "ID_JOB_ATTRIBUTE"; public static final String FIELD_JOB_ATTRIBUTE_ID_JOB = "ID_JOB"; public static final String FIELD_JOB_ATTRIBUTE_NR = "NR"; public static final String FIELD_JOB_ATTRIBUTE_CODE = "CODE"; public static final String FIELD_JOB_ATTRIBUTE_VALUE_NUM = "VALUE_NUM"; public static final String FIELD_JOB_ATTRIBUTE_VALUE_STR = "VALUE_STR"; public static final String TABLE_R_DEPENDENCY = "R_DEPENDENCY"; public static final String FIELD_DEPENDENCY_ID_DEPENDENCY = "ID_DEPENDENCY"; public static final String FIELD_DEPENDENCY_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_DEPENDENCY_ID_DATABASE = "ID_DATABASE"; public static final String FIELD_DEPENDENCY_TABLE_NAME = "TABLE_NAME"; public static final String FIELD_DEPENDENCY_FIELD_NAME = "FIELD_NAME"; public static final String TABLE_R_TRANS_STEP_CONDITION = "R_TRANS_STEP_CONDITION"; public static final String FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_STEP_CONDITION_ID_STEP = "ID_STEP"; public static final String FIELD_TRANS_STEP_CONDITION_ID_CONDITION = "ID_CONDITION"; public static final String TABLE_R_CONDITION = "R_CONDITION"; public static final String FIELD_CONDITION_ID_CONDITION = "ID_CONDITION"; public static final String FIELD_CONDITION_ID_CONDITION_PARENT = "ID_CONDITION_PARENT"; public static final String FIELD_CONDITION_NEGATED = "NEGATED"; public static final String FIELD_CONDITION_OPERATOR = "OPERATOR"; public static final String FIELD_CONDITION_LEFT_NAME = "LEFT_NAME"; public static final String FIELD_CONDITION_CONDITION_FUNCTION = "CONDITION_FUNCTION"; public static final String FIELD_CONDITION_RIGHT_NAME = "RIGHT_NAME"; public static final String FIELD_CONDITION_ID_VALUE_RIGHT = "ID_VALUE_RIGHT"; public static final String TABLE_R_VALUE = "R_VALUE"; public static final String FIELD_VALUE_ID_VALUE = "ID_VALUE"; public static final String FIELD_VALUE_NAME = "NAME"; public static final String FIELD_VALUE_VALUE_TYPE = "VALUE_TYPE"; public static final String FIELD_VALUE_VALUE_STR = "VALUE_STR"; public static final String FIELD_VALUE_IS_NULL = "IS_NULL"; public static final String TABLE_R_TRANS_HOP = "R_TRANS_HOP"; public static final String FIELD_TRANS_HOP_ID_TRANS_HOP = "ID_TRANS_HOP"; public static final String FIELD_TRANS_HOP_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_HOP_ID_STEP_FROM = "ID_STEP_FROM"; public static final String FIELD_TRANS_HOP_ID_STEP_TO = "ID_STEP_TO"; public static final String FIELD_TRANS_HOP_ENABLED = "ENABLED"; public static final String TABLE_R_STEP_TYPE = "R_STEP_TYPE"; public static final String FIELD_STEP_TYPE_ID_STEP_TYPE = "ID_STEP_TYPE"; public static final String FIELD_STEP_TYPE_CODE = "CODE"; public static final String FIELD_STEP_TYPE_DESCRIPTION = "DESCRIPTION"; public static final String FIELD_STEP_TYPE_HELPTEXT = "HELPTEXT"; public static final String TABLE_R_STEP = "R_STEP"; public static final String FIELD_STEP_ID_STEP = "ID_STEP"; public static final String FIELD_STEP_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_STEP_NAME = "NAME"; public static final String FIELD_STEP_DESCRIPTION = "DESCRIPTION"; public static final String FIELD_STEP_ID_STEP_TYPE = "ID_STEP_TYPE"; public static final String FIELD_STEP_DISTRIBUTE = "DISTRIBUTE"; public static final String FIELD_STEP_COPIES = "COPIES"; public static final String FIELD_STEP_GUI_LOCATION_X = "GUI_LOCATION_X"; public static final String FIELD_STEP_GUI_LOCATION_Y = "GUI_LOCATION_Y"; public static final String FIELD_STEP_GUI_DRAW = "GUI_DRAW"; public static final String FIELD_STEP_COPIES_STRING = "COPIES_STRING"; public static final String TABLE_R_STEP_ATTRIBUTE = "R_STEP_ATTRIBUTE"; public static final String FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE = "ID_STEP_ATTRIBUTE"; public static final String FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_STEP_ATTRIBUTE_ID_STEP = "ID_STEP"; public static final String FIELD_STEP_ATTRIBUTE_CODE = "CODE"; public static final String FIELD_STEP_ATTRIBUTE_NR = "NR"; public static final String FIELD_STEP_ATTRIBUTE_VALUE_NUM = "VALUE_NUM"; public static final String FIELD_STEP_ATTRIBUTE_VALUE_STR = "VALUE_STR"; public static final String TABLE_R_TRANS_NOTE = "R_TRANS_NOTE"; public static final String FIELD_TRANS_NOTE_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_NOTE_ID_NOTE = "ID_NOTE"; public static final String TABLE_R_JOB = "R_JOB"; public static final String FIELD_JOB_ID_JOB = "ID_JOB"; public static final String FIELD_JOB_ID_DIRECTORY = "ID_DIRECTORY"; public static final String FIELD_JOB_NAME = "NAME"; public static final String FIELD_JOB_DESCRIPTION = "DESCRIPTION"; public static final String FIELD_JOB_EXTENDED_DESCRIPTION = "EXTENDED_DESCRIPTION"; public static final String FIELD_JOB_JOB_VERSION = "JOB_VERSION"; public static final String FIELD_JOB_JOB_STATUS = "JOB_STATUS"; public static final String FIELD_JOB_ID_DATABASE_LOG = "ID_DATABASE_LOG"; public static final String FIELD_JOB_TABLE_NAME_LOG = "TABLE_NAME_LOG"; public static final String FIELD_JOB_CREATED_USER = "CREATED_USER"; public static final String FIELD_JOB_CREATED_DATE = "CREATED_DATE"; public static final String FIELD_JOB_MODIFIED_USER = "MODIFIED_USER"; public static final String FIELD_JOB_MODIFIED_DATE = "MODIFIED_DATE"; public static final String FIELD_JOB_USE_BATCH_ID = "USE_BATCH_ID"; public static final String FIELD_JOB_PASS_BATCH_ID = "PASS_BATCH_ID"; public static final String FIELD_JOB_USE_LOGFIELD = "USE_LOGFIELD"; public static final String FIELD_JOB_SHARED_FILE = "SHARED_FILE"; public static final String TABLE_R_LOGLEVEL = "R_LOGLEVEL"; public static final String FIELD_LOGLEVEL_ID_LOGLEVEL = "ID_LOGLEVEL"; public static final String FIELD_LOGLEVEL_CODE = "CODE"; public static final String FIELD_LOGLEVEL_DESCRIPTION = "DESCRIPTION"; public static final String TABLE_R_LOG = "R_LOG"; public static final String FIELD_LOG_ID_LOG = "ID_LOG"; public static final String FIELD_LOG_NAME = "NAME"; public static final String FIELD_LOG_ID_LOGLEVEL = "ID_LOGLEVEL"; public static final String FIELD_LOG_LOGTYPE = "LOGTYPE"; public static final String FIELD_LOG_FILENAME = "FILENAME"; public static final String FIELD_LOG_FILEEXTENTION = "FILEEXTENTION"; public static final String FIELD_LOG_ADD_DATE = "ADD_DATE"; public static final String FIELD_LOG_ADD_TIME = "ADD_TIME"; public static final String FIELD_LOG_ID_DATABASE_LOG = "ID_DATABASE_LOG"; public static final String FIELD_LOG_TABLE_NAME_LOG = "TABLE_NAME_LOG"; public static final String TABLE_R_JOBENTRY = "R_JOBENTRY"; public static final String FIELD_JOBENTRY_ID_JOBENTRY = "ID_JOBENTRY"; public static final String FIELD_JOBENTRY_ID_JOB = "ID_JOB"; public static final String FIELD_JOBENTRY_ID_JOBENTRY_TYPE = "ID_JOBENTRY_TYPE"; public static final String FIELD_JOBENTRY_NAME = "NAME"; public static final String FIELD_JOBENTRY_DESCRIPTION = "DESCRIPTION"; public static final String TABLE_R_JOBENTRY_COPY = "R_JOBENTRY_COPY"; public static final String FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY = "ID_JOBENTRY_COPY"; public static final String FIELD_JOBENTRY_COPY_ID_JOBENTRY = "ID_JOBENTRY"; public static final String FIELD_JOBENTRY_COPY_ID_JOB = "ID_JOB"; public static final String FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE = "ID_JOBENTRY_TYPE"; public static final String FIELD_JOBENTRY_COPY_NR = "NR"; public static final String FIELD_JOBENTRY_COPY_GUI_LOCATION_X = "GUI_LOCATION_X"; public static final String FIELD_JOBENTRY_COPY_GUI_LOCATION_Y = "GUI_LOCATION_Y"; public static final String FIELD_JOBENTRY_COPY_GUI_DRAW = "GUI_DRAW"; public static final String FIELD_JOBENTRY_COPY_PARALLEL = "PARALLEL"; public static final String TABLE_R_JOBENTRY_TYPE = "R_JOBENTRY_TYPE"; public static final String FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE = "ID_JOBENTRY_TYPE"; public static final String FIELD_JOBENTRY_TYPE_CODE = "CODE"; public static final String FIELD_JOBENTRY_TYPE_DESCRIPTION = "DESCRIPTION"; public static final String TABLE_R_JOBENTRY_ATTRIBUTE = "R_JOBENTRY_ATTRIBUTE"; public static final String FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE = "ID_JOBENTRY_ATTRIBUTE"; public static final String FIELD_JOBENTRY_ATTRIBUTE_ID_JOB = "ID_JOB"; public static final String FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY = "ID_JOBENTRY"; public static final String FIELD_JOBENTRY_ATTRIBUTE_NR = "NR"; public static final String FIELD_JOBENTRY_ATTRIBUTE_CODE = "CODE"; public static final String FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM = "VALUE_NUM"; public static final String FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR = "VALUE_STR"; public static final String TABLE_R_JOB_HOP = "R_JOB_HOP"; public static final String FIELD_JOB_HOP_ID_JOB_HOP = "ID_JOB_HOP"; public static final String FIELD_JOB_HOP_ID_JOB = "ID_JOB"; public static final String FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM = "ID_JOBENTRY_COPY_FROM"; public static final String FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO = "ID_JOBENTRY_COPY_TO"; public static final String FIELD_JOB_HOP_ENABLED = "ENABLED"; public static final String FIELD_JOB_HOP_EVALUATION = "EVALUATION"; public static final String FIELD_JOB_HOP_UNCONDITIONAL = "UNCONDITIONAL"; public static final String TABLE_R_JOB_NOTE = "R_JOB_NOTE"; public static final String FIELD_JOB_NOTE_ID_JOB = "ID_JOB"; public static final String FIELD_JOB_NOTE_ID_NOTE = "ID_NOTE"; public static final String TABLE_R_USER = "R_USER"; public static final String FIELD_USER_ID_USER = "ID_USER"; public static final String FIELD_USER_LOGIN = "LOGIN"; public static final String FIELD_USER_PASSWORD = "PASSWORD"; public static final String FIELD_USER_NAME = "NAME"; public static final String FIELD_USER_DESCRIPTION = "DESCRIPTION"; public static final String FIELD_USER_ENABLED = "ENABLED"; public static final String TABLE_R_STEP_DATABASE = "R_STEP_DATABASE"; public static final String FIELD_STEP_DATABASE_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_STEP_DATABASE_ID_STEP = "ID_STEP"; public static final String FIELD_STEP_DATABASE_ID_DATABASE = "ID_DATABASE"; public static final String TABLE_R_PARTITION_SCHEMA = "R_PARTITION_SCHEMA"; public static final String FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA = "ID_PARTITION_SCHEMA"; public static final String FIELD_PARTITION_SCHEMA_NAME = "NAME"; public static final String FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION = "DYNAMIC_DEFINITION"; public static final String FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE = "PARTITIONS_PER_SLAVE"; public static final String TABLE_R_PARTITION = "R_PARTITION"; public static final String FIELD_PARTITION_ID_PARTITION = "ID_PARTITION"; public static final String FIELD_PARTITION_ID_PARTITION_SCHEMA = "ID_PARTITION_SCHEMA"; public static final String FIELD_PARTITION_PARTITION_ID = "PARTITION_ID"; public static final String TABLE_R_TRANS_PARTITION_SCHEMA = "R_TRANS_PARTITION_SCHEMA"; public static final String FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA = "ID_TRANS_PARTITION_SCHEMA"; public static final String FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA = "ID_PARTITION_SCHEMA"; public static final String TABLE_R_CLUSTER = "R_CLUSTER"; public static final String FIELD_CLUSTER_ID_CLUSTER = "ID_CLUSTER"; public static final String FIELD_CLUSTER_NAME = "NAME"; public static final String FIELD_CLUSTER_BASE_PORT = "BASE_PORT"; public static final String FIELD_CLUSTER_SOCKETS_BUFFER_SIZE = "SOCKETS_BUFFER_SIZE"; public static final String FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL = "SOCKETS_FLUSH_INTERVAL"; public static final String FIELD_CLUSTER_SOCKETS_COMPRESSED = "SOCKETS_COMPRESSED"; public static final String FIELD_CLUSTER_DYNAMIC = "DYNAMIC_CLUSTER"; public static final String TABLE_R_SLAVE = "R_SLAVE"; public static final String FIELD_SLAVE_ID_SLAVE = "ID_SLAVE"; public static final String FIELD_SLAVE_NAME = "NAME"; public static final String FIELD_SLAVE_HOST_NAME = "HOST_NAME"; public static final String FIELD_SLAVE_PORT = "PORT"; public static final String FIELD_SLAVE_USERNAME = "USERNAME"; public static final String FIELD_SLAVE_PASSWORD = "PASSWORD"; public static final String FIELD_SLAVE_PROXY_HOST_NAME = "PROXY_HOST_NAME"; public static final String FIELD_SLAVE_PROXY_PORT = "PROXY_PORT"; public static final String FIELD_SLAVE_NON_PROXY_HOSTS = "NON_PROXY_HOSTS"; public static final String FIELD_SLAVE_MASTER = "MASTER"; public static final String FIELD_SLAVE_WEB_APP_NAME = "WEB_APP_NAME"; public static final String TABLE_R_CLUSTER_SLAVE = "R_CLUSTER_SLAVE"; public static final String FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE = "ID_CLUSTER_SLAVE"; public static final String FIELD_CLUSTER_SLAVE_ID_CLUSTER = "ID_CLUSTER"; public static final String FIELD_CLUSTER_SLAVE_ID_SLAVE = "ID_SLAVE"; public static final String TABLE_R_TRANS_CLUSTER = "R_TRANS_CLUSTER"; public static final String FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER = "ID_TRANS_CLUSTER"; public static final String FIELD_TRANS_CLUSTER_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_CLUSTER_ID_CLUSTER = "ID_CLUSTER"; public static final String TABLE_R_TRANS_SLAVE = "R_TRANS_SLAVE"; public static final String FIELD_TRANS_SLAVE_ID_TRANS_SLAVE = "ID_TRANS_SLAVE"; public static final String FIELD_TRANS_SLAVE_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_SLAVE_ID_SLAVE = "ID_SLAVE"; public static final String TABLE_R_JOBENTRY_DATABASE = "R_JOBENTRY_DATABASE"; public static final String FIELD_JOBENTRY_DATABASE_ID_JOB = "ID_JOB"; public static final String FIELD_JOBENTRY_DATABASE_ID_JOBENTRY = "ID_JOBENTRY"; public static final String FIELD_JOBENTRY_DATABASE_ID_DATABASE = "ID_DATABASE"; public static final String TABLE_R_TRANS_LOCK = "R_TRANS_LOCK"; public static final String FIELD_TRANS_LOCK_ID_TRANS_LOCK = "ID_TRANS_LOCK"; public static final String FIELD_TRANS_LOCK_ID_TRANSFORMATION = "ID_TRANSFORMATION"; public static final String FIELD_TRANS_LOCK_ID_USER = "ID_USER"; public static final String FIELD_TRANS_LOCK_LOCK_MESSAGE = "LOCK_MESSAGE"; public static final String FIELD_TRANS_LOCK_LOCK_DATE = "LOCK_DATE"; public static final String TABLE_R_JOB_LOCK = "R_JOB_LOCK"; public static final String FIELD_JOB_LOCK_ID_JOB_LOCK = "ID_JOB_LOCK"; public static final String FIELD_JOB_LOCK_ID_JOB = "ID_JOB"; public static final String FIELD_JOB_LOCK_ID_USER = "ID_USER"; public static final String FIELD_JOB_LOCK_LOCK_MESSAGE = "LOCK_MESSAGE"; public static final String FIELD_JOB_LOCK_LOCK_DATE = "LOCK_DATE"; /* * public static final String TABLE_R_TRANS_DATA_SERVICE = "R_TRANS_DATA_SERVICE"; public static final String * FIELD_TRANS_DATA_SERVICE_ID_TRANS_DATA_SERVICE = "ID_TRANS_DATA_SERVICE"; public static final String * FIELD_TRANS_DATA_SERVICE_ID_TRANS = "ID_TRANS"; public static final String FIELD_TRANS_DATA_SERVICE_SERVICE_NAME = * "SERVICE_NAME"; public static final String FIELD_TRANS_DATA_SERVICE_STEP_NAME = "STEP_NAME"; public static final * String FIELD_TRANS_DATA_SERVICE_IS_OUTPUT = "IS_OUTPUT"; public static final String * FIELD_TRANS_DATA_SERVICE_ALLOW_OPTIMIZATION = "ALLOW_OPTIMIZATION"; public static final String * FIELD_TRANS_DATA_SERVICE_CACHE_METHOD = "CACHE_METHOD"; */ public static final String TABLE_R_NAMESPACE = "R_NAMESPACE"; public static final String FIELD_NAMESPACE_ID_NAMESPACE = "ID_NAMESPACE"; public static final String FIELD_NAMESPACE_NAME = "NAME"; public static final String TABLE_R_ELEMENT_TYPE = "R_ELEMENT_TYPE"; public static final String FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE = "ID_ELEMENT_TYPE"; public static final String FIELD_ELEMENT_TYPE_ID_NAMESPACE = "ID_NAMESPACE"; public static final String FIELD_ELEMENT_TYPE_NAME = "NAME"; public static final String FIELD_ELEMENT_TYPE_DESCRIPTION = "DESCRIPTION"; public static final String TABLE_R_ELEMENT = "R_ELEMENT"; public static final String FIELD_ELEMENT_ID_ELEMENT = "ID_ELEMENT"; public static final String FIELD_ELEMENT_ID_ELEMENT_TYPE = "ID_ELEMENT_TYPE"; public static final String FIELD_ELEMENT_NAME = "NAME"; public static final String TABLE_R_ELEMENT_ATTRIBUTE = "R_ELEMENT_ATTRIBUTE"; public static final String FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE = "ID_ELEMENT_ATTRIBUTE"; public static final String FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT = "ID_ELEMENT"; public static final String FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT = "ID_ELEMENT_ATTRIBUTE_PARENT"; public static final String FIELD_ELEMENT_ATTRIBUTE_KEY = "ATTR_KEY"; public static final String FIELD_ELEMENT_ATTRIBUTE_VALUE = "ATTR_VALUE"; public static final String[] repositoryTableNames = new String[] { TABLE_R_CLUSTER, TABLE_R_CLUSTER_SLAVE, TABLE_R_CONDITION, TABLE_R_DATABASE, TABLE_R_DATABASE_ATTRIBUTE, TABLE_R_DATABASE_CONTYPE, TABLE_R_DATABASE_TYPE, TABLE_R_DEPENDENCY, TABLE_R_DIRECTORY, TABLE_R_JOB, TABLE_R_JOBENTRY, TABLE_R_JOBENTRY_ATTRIBUTE, TABLE_R_JOBENTRY_COPY, TABLE_R_JOBENTRY_DATABASE, TABLE_R_JOBENTRY_TYPE, TABLE_R_JOB_ATTRIBUTE, TABLE_R_JOB_HOP, TABLE_R_JOB_LOCK, TABLE_R_JOB_NOTE, TABLE_R_LOG, TABLE_R_LOGLEVEL, TABLE_R_NOTE, TABLE_R_PARTITION, TABLE_R_PARTITION_SCHEMA, TABLE_R_REPOSITORY_LOG, TABLE_R_SLAVE, TABLE_R_STEP, TABLE_R_STEP_ATTRIBUTE, TABLE_R_STEP_DATABASE, TABLE_R_STEP_TYPE, TABLE_R_TRANSFORMATION, TABLE_R_TRANS_ATTRIBUTE, TABLE_R_TRANS_CLUSTER, // , TABLE_R_TRANS_DATA_SERVICE TABLE_R_TRANS_HOP, TABLE_R_TRANS_LOCK, TABLE_R_TRANS_NOTE, TABLE_R_TRANS_PARTITION_SCHEMA, TABLE_R_TRANS_SLAVE, TABLE_R_TRANS_STEP_CONDITION, TABLE_R_USER, TABLE_R_VALUE, TABLE_R_VERSION, TABLE_R_NAMESPACE, TABLE_R_ELEMENT_TYPE, TABLE_R_ELEMENT, TABLE_R_ELEMENT_ATTRIBUTE }; /** The maximum length of a text field in a Kettle repository : 2.000.000 is enough for everyone ;-) */ public static final int REP_STRING_LENGTH = 2000000; public static final int REP_ORACLE_STRING_LENGTH = 2000; public static final int REP_STRING_CODE_LENGTH = 255; public static final String TRANS_ATTRIBUTE_ID_STEP_REJECTED = "ID_STEP_REJECTED"; public static final String TRANS_ATTRIBUTE_UNIQUE_CONNECTIONS = "UNIQUE_CONNECTIONS"; public static final String TRANS_ATTRIBUTE_FEEDBACK_SHOWN = "FEEDBACK_SHOWN"; public static final String TRANS_ATTRIBUTE_FEEDBACK_SIZE = "FEEDBACK_SIZE"; public static final String TRANS_ATTRIBUTE_USING_THREAD_PRIORITIES = "USING_THREAD_PRIORITIES"; public static final String TRANS_ATTRIBUTE_SHARED_FILE = "SHARED_FILE"; public static final String TRANS_ATTRIBUTE_CAPTURE_STEP_PERFORMANCE = "CAPTURE_STEP_PERFORMANCE"; public static final String TRANS_ATTRIBUTE_STEP_PERFORMANCE_CAPTURING_DELAY = "STEP_PERFORMANCE_CAPTURING_DELAY"; public static final String TRANS_ATTRIBUTE_STEP_PERFORMANCE_CAPTURING_SIZE_LIMIT = "STEP_PERFORMANCE_CAPTURING_SIZE_LIMIT"; public static final String TRANS_ATTRIBUTE_STEP_PERFORMANCE_LOG_TABLE = "STEP_PERFORMANCE_LOG_TABLE"; public static final String TRANS_ATTRIBUTE_LOG_SIZE_LIMIT = "LOG_SIZE_LIMIT"; public static final String TRANS_ATTRIBUTE_LOG_INTERVAL = "LOG_INTERVAL"; public static final String TRANS_ATTRIBUTE_TRANSFORMATION_TYPE = "TRANSFORMATION_TYPE"; public static final String TRANS_ATTRIBUTE_SLEEP_TIME_EMPTY = "SLEEP_TIME_EMPTY"; public static final String TRANS_ATTRIBUTE_SLEEP_TIME_FULL = "SLEEP_TIME_FULL"; public static final String TRANS_ATTRIBUTE_DATA_SERVICE_NAME = "DATA_SERVICE_NAME"; public static final String TRANS_ATTRIBUTE_PARAM_KEY = "PARAM_KEY"; public static final String TRANS_ATTRIBUTE_PARAM_DEFAULT = "PARAM_DEFAULT"; public static final String TRANS_ATTRIBUTE_PARAM_DESCRIPTION = "PARAM_DESC"; public static final String JOB_ATTRIBUTE_PARAM_KEY = "PARAM_KEY"; public static final String JOB_ATTRIBUTE_PARAM_DEFAULT = "PARAM_DEFAULT"; public static final String JOB_ATTRIBUTE_PARAM_DESCRIPTION = "PARAM_DESC"; public static final String JOB_ATTRIBUTE_LOG_SIZE_LIMIT = "LOG_SIZE_LIMIT"; //IDX_JOB_ATTRIBUTE_LOOKUP PDI-10237 public static final String IDX_JOB_ATTRIBUTE_LOOKUP = "IDX_JATT"; public static final String IDX_TRANS_ATTRIBUTE_LOOKUP = "IDX_TATT"; public static final String IDX_R_DATABASE_ATTRIBUTE = "IDX_RDAT"; public static final String IDX_R_DIRECTORY = "IDX_RDIR"; public static final String IDX_R_STEP_ATTRIBUTE = "IDX_RSAT"; public static final String R_STEP_DATABASE_LU1 = "IDX_RSD1"; public static final String R_STEP_DATABASE_LU2 = "IDX_RSD2"; public static final String R_JOBENTRY_DATABASE_LU1 = "IDX_RJD1"; public static final String R_JOBENTRY_DATABASE_LU2 = "IDX_RJD2"; public static final String R_JOBENTRY_ATTRIBUTE = "IDX_RJEA"; protected KettleDatabaseRepositoryMeta repositoryMeta; protected RepositoryDirectory importBaseDirectory; protected LogChannelInterface log; protected boolean connected; protected KettleDatabaseRepositoryCreationHelper creationHelper; public KettleDatabaseRepositoryBase() { } public KettleDatabaseRepositoryMeta getRepositoryMeta() { return repositoryMeta; } public void setRepositoryMeta( KettleDatabaseRepositoryMeta repositoryMeta ) { this.repositoryMeta = repositoryMeta; } public String getName() { if ( repositoryMeta == null ) { return null; } return repositoryMeta.getName(); } public LogChannelInterface getLog() { return log; } /** * @return If the repository is in a connected state. */ public boolean isConnected() { return connected; } // Utility classes public String quote( String identifier ) { return repositoryMeta.getConnection().quoteField( identifier ); } public String quoteTable( String table ) { return repositoryMeta.getConnection().getQuotedSchemaTableCombination( null, table ); } /** * @param connected * the connected to set */ public void setConnected( boolean connected ) { this.connected = connected; } }
package lhdt.domain; /** * 디렉토리 구조 * @author Cheon JeongDae * */ public enum UploadDirectoryType { // 년, 월, 일 YEAR, YEAR_MONTH, YEAR_MONTH_DAY, // 사용자 아이디 밑에 년, 월, 일 USERID_YEAR, USERID_YEAR_MONTH, USERID_YEAR_MONTH_DAY, // 년, 월, 일 밑에 사용자 아이디 YEAR_USERID, YEAR_MONTH_USERID, YEAR_MONTH_DAY_USERID }
package min3d; /** * Created by sreejith on 17/11/15. */ public final class Quaternion { public double x; public double y; public double z; public double w; //private float[] matrixs; public Quaternion(final Quaternion q) { this(q.x, q.y, q.z, q.w); } public Quaternion(double x, double y, double z, double w) { this.x = x; this.y = y; this.z = z; this.w = w; } public void set(final Quaternion q) { //matrixs = null; this.x = q.x; this.y = q.y; this.z = q.z; this.w = q.w; } public Quaternion(Vector3 axis, double angle) { set(axis, angle); } public double norm() { return Math.sqrt(dot(this)); } public double getW() { return w; } public double getX() { return x; } public double getY() { return y; } public double getZ() { return z; } /** * @param axis * rotation axis, unit vector * @param angle * the rotation angle * @return this */ public Quaternion set(Vector3 axis, double angle) { //matrixs = null; double s = Math.sin(angle / 2); w = Math.cos(angle / 2); x = axis.getX() * s; y = axis.getY() * s; z = axis.getZ() * s; return this; } public Quaternion mulThis(Quaternion q) { //matrixs = null; double nw = w * q.w - x * q.x - y * q.y - z * q.z; double nx = w * q.x + x * q.w + y * q.z - z * q.y; double ny = w * q.y + y * q.w + z * q.x - x * q.z; z = w * q.z + z * q.w + x * q.y - y * q.x; w = nw; x = nx; y = ny; return this; } public Quaternion setRotation( Vector3 v, float angle){ double s = (double) Math.sin(angle / 2); w = (double) Math.cos(angle / 2); x = v.x*s; y = v.y*s; z = v.z*s; return this; } public void setRotation(double x, double y, double z, double angle){ // float half = angle*0.5f; double s = (double) Math.sin(angle / 2); this.x = x*s; this.y = y*s; this.z = z*s; w = (double) Math.cos(angle / 2); } public void rotateThis(double x,double y,double z,double w){ this.x = x; this.y = y; this.z = z; this.w = w; } public Quaternion scaleThis(double scale) { if (scale != 1) { //matrixs = null; w *= scale; x *= scale; y *= scale; z *= scale; } return this; } public Quaternion divThis(double scale) { if (scale != 1) { //matrixs = null; w /= scale; x /= scale; y /= scale; z /= scale; } return this; } public double dot(Quaternion q) { return x * q.x + y * q.y + z * q.z + w * q.w; } public boolean equals(Quaternion q) { return x == q.x && y == q.y && z == q.z && w == q.w; } public Quaternion interpolateThis(Quaternion q, double t) { if (!equals(q)) { double d = dot(q); double qx, qy, qz, qw; if (d < 0f) { qx = -q.x; qy = -q.y; qz = -q.z; qw = -q.w; d = -d; } else { qx = q.x; qy = q.y; qz = q.z; qw = q.w; } double f0, f1; if ((1 - d) > 0.1f) { double angle = (double) Math.acos(d); double s = (double) Math.sin(angle); double tAngle = t * angle; f0 = (double) Math.sin(angle - tAngle) / s; f1 = (double) Math.sin(tAngle) / s; } else { f0 = 1 - t; f1 = t; } x = f0 * x + f1 * qx; y = f0 * y + f1 * qy; z = f0 * z + f1 * qz; w = f0 * w + f1 * qw; } return this; } public Quaternion normalizeThis() { return divThis(norm()); } public Quaternion interpolate(Quaternion q, double t) { return new Quaternion(this).interpolateThis(q, t); } /** * Converts this Quaternion into a matrix, returning it as a float array. */ public float[] toMatrix() { float[] matrixs = new float[16]; toMatrix(matrixs); return matrixs; } /** * Converts this Quaternion into a matrix, placing the values into the given array. * @param matrixs 16-length float array. */ public final void toMatrix(float[] matrixs) { matrixs[3] = 0.0f; matrixs[7] = 0.0f; matrixs[11] = 0.0f; matrixs[12] = 0.0f; matrixs[13] = 0.0f; matrixs[14] = 0.0f; matrixs[15] = 1.0f; matrixs[0] = (float) (1.0f - (2.0f * ((y * y) + (z * z)))); matrixs[1] = (float) (2.0f * ((x * y) - (z * w))); matrixs[2] = (float) (2.0f * ((x * z) + (y * w))); matrixs[4] = (float) (2.0f * ((x * y) + (z * w))); matrixs[5] = (float) (1.0f - (2.0f * ((x * x) + (z * z)))); matrixs[6] = (float) (2.0f * ((y * z) - (x * w))); matrixs[8] = (float) (2.0f * ((x * z) - (y * w))); matrixs[9] = (float) (2.0f * ((y * z) + (x * w))); matrixs[10] = (float) (1.0f - (2.0f * ((x * x) + (y * y)))); } }
package com.special.ResideMenuDemo; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentTransaction; import android.view.MotionEvent; import android.view.View; import android.widget.Toast; import com.special.ResideMenu.ResideMenu; import com.special.ResideMenu.ResideMenuItem; public class MenuActivity extends FragmentActivity implements View.OnClickListener{ private ResideMenu resideMenu; private MenuActivity mContext; private ResideMenuItem itemHome; private ResideMenuItem itemProfile; private ResideMenuItem itemCalendar; private ResideMenuItem itemSettings; /** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); mContext = this; setUpMenu(); changeFragment(new HomeFragment()); } private void setUpMenu() { // attach to current activity; resideMenu = new ResideMenu(this); resideMenu.setBackground(R.drawable.menu_background); resideMenu.attachToActivity(this); resideMenu.setMenuListener(menuListener); //valid scale factor is between 0.0f and 1.0f. leftmenu'width is 150dip. resideMenu.setScaleValue(0.6f); // create menu items; itemHome = new ResideMenuItem(this, R.drawable.icon_home, "Home"); itemProfile = new ResideMenuItem(this, R.drawable.icon_profile, "Profile"); itemCalendar = new ResideMenuItem(this, R.drawable.icon_calendar, "Calendar"); itemSettings = new ResideMenuItem(this, R.drawable.icon_settings, "Settings"); itemHome.setOnClickListener(this); itemProfile.setOnClickListener(this); itemCalendar.setOnClickListener(this); itemSettings.setOnClickListener(this); resideMenu.addMenuItem(itemHome, ResideMenu.DIRECTION_LEFT); resideMenu.addMenuItem(itemProfile, ResideMenu.DIRECTION_LEFT); resideMenu.addMenuItem(itemCalendar, ResideMenu.DIRECTION_RIGHT); resideMenu.addMenuItem(itemSettings, ResideMenu.DIRECTION_RIGHT); // You can disable a direction by setting -> // resideMenu.setSwipeDirectionDisable(ResideMenu.DIRECTION_RIGHT); findViewById(R.id.title_bar_left_menu).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { resideMenu.openMenu(ResideMenu.DIRECTION_LEFT); } }); findViewById(R.id.title_bar_right_menu).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { resideMenu.openMenu(ResideMenu.DIRECTION_RIGHT); } }); } @Override public boolean dispatchTouchEvent(MotionEvent ev) { return resideMenu.dispatchTouchEvent(ev); } @Override public void onClick(View view) { if (view == itemHome){ changeFragment(new HomeFragment()); }else if (view == itemProfile){ changeFragment(new ProfileFragment()); }else if (view == itemCalendar){ changeFragment(new CalendarFragment()); }else if (view == itemSettings){ changeFragment(new SettingsFragment()); } resideMenu.closeMenu(); } private ResideMenu.OnMenuListener menuListener = new ResideMenu.OnMenuListener() { @Override public void openMenu() { Toast.makeText(mContext, "Menu is opened!", Toast.LENGTH_SHORT).show(); } @Override public void closeMenu() { Toast.makeText(mContext, "Menu is closed!", Toast.LENGTH_SHORT).show(); } }; private void changeFragment(Fragment targetFragment){ resideMenu.clearIgnoredViewList(); getSupportFragmentManager() .beginTransaction() .replace(R.id.main_fragment, targetFragment, "fragment") .setTransitionStyle(FragmentTransaction.TRANSIT_FRAGMENT_FADE) .commit(); } // What good method is to access resideMenu? public ResideMenu getResideMenu(){ return resideMenu; } }
package org.gradle.test.performance.mediummonolithicjavaproject.p356; import org.junit.Test; import static org.junit.Assert.*; public class Test7132 { Production7132 objectUnderTest = new Production7132(); @Test public void testProperty0() { Production7129 value = new Production7129(); objectUnderTest.setProperty0(value); assertEquals(value, objectUnderTest.getProperty0()); } @Test public void testProperty1() { Production7130 value = new Production7130(); objectUnderTest.setProperty1(value); assertEquals(value, objectUnderTest.getProperty1()); } @Test public void testProperty2() { Production7131 value = new Production7131(); objectUnderTest.setProperty2(value); assertEquals(value, objectUnderTest.getProperty2()); } @Test public void testProperty3() { String value = "value"; objectUnderTest.setProperty3(value); assertEquals(value, objectUnderTest.getProperty3()); } @Test public void testProperty4() { String value = "value"; objectUnderTest.setProperty4(value); assertEquals(value, objectUnderTest.getProperty4()); } @Test public void testProperty5() { String value = "value"; objectUnderTest.setProperty5(value); assertEquals(value, objectUnderTest.getProperty5()); } @Test public void testProperty6() { String value = "value"; objectUnderTest.setProperty6(value); assertEquals(value, objectUnderTest.getProperty6()); } @Test public void testProperty7() { String value = "value"; objectUnderTest.setProperty7(value); assertEquals(value, objectUnderTest.getProperty7()); } @Test public void testProperty8() { String value = "value"; objectUnderTest.setProperty8(value); assertEquals(value, objectUnderTest.getProperty8()); } @Test public void testProperty9() { String value = "value"; objectUnderTest.setProperty9(value); assertEquals(value, objectUnderTest.getProperty9()); } }
package com.eventosapp.repository; import org.springframework.data.repository.CrudRepository; import com.eventosapp.models.Usuario; public interface UsuarioRepository extends CrudRepository<Usuario, String>{ Usuario findByLogin(String login); }
/******************************************************************************* * Copyright (c) 2009 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.designer.internal.ui.expressions; import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants; /** * IExpressionConverter */ public interface IExpressionConverter { static final String EXPRESSION_CLASS_CUBE = "cube"; //$NON-NLS-1$ static final String EXPRESSION_CLASS_TABLE = "table"; //$NON-NLS-1$ /** * Returns the binding expression by given name. * * @param bindingName * @return */ String getBindingExpression( String bindingName ); /** * Returns the parameter expression by given name. * * @param paramName * @return */ String getParameterExpression( String paramName ); /** * Returns the cube binding expression by given name. * * @param bindingName * @return */ String getCubeBindingExpression( String bindingName ); /** * Returns the dimension/level/attribute expression for given names. * * @param dimensionName * @param levelName * @param attributeName * @return */ String getDimensionExpression( String dimensionName, String levelName, String attributeName ); /** * Returns the measure expression for given name. * * @param measureName * @return */ String getMeasureExpression( String measureName ); /** * Returns the first binding that found referenced in the given expression. * * @param expression * @return */ String getBinding( String expression ); /** * Returns the result set column expression by given column name. * * @param bindingName * @return */ String getResultSetColumnExpression( String columnName ); /** * Returns the expression as the representation for the given constant value * and type. * * @param value * The constant value string. * @param dataType * The type constants defined as * {@link DesignChoiceConstants#CHOICE_COLUMN_DATA_TYPE} * @return */ String getConstantExpression( String value, String dataType ); /** * Convert the specific expression to the target script type expression. * * @param expr * the specific expression * @param scriptType * the target script type * @param exprClass * the expression class, dimension, binding, measure, etc. * @return an expression can run with the specific script type. * @throws UnsupportedOperationException */ String convertExpression( String expr, String scriptType, String exprClass ) throws UnsupportedOperationException; }
/** * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending */ /* * --------------------------------------------------------------------------------------------------------------------- * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit CharChunkSsaStamp and regenerate * --------------------------------------------------------------------------------------------------------------------- */ package io.deephaven.engine.table.impl.ssa; import io.deephaven.util.compare.DoubleComparisons; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.impl.util.RowRedirection; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.table.impl.util.WritableRowRedirection; /** * Stamp kernel for when the left hand side is a sorted chunk and the right hand side is a ticking SegmentedSortedArray. */ public class DoubleReverseChunkSsaStamp implements ChunkSsaStamp { static DoubleReverseChunkSsaStamp INSTANCE = new DoubleReverseChunkSsaStamp(); private DoubleReverseChunkSsaStamp() {} // use the instance @Override public void processEntry(Chunk<Values> leftStampValues, Chunk<RowKeys> leftStampKeys, SegmentedSortedArray ssa, WritableLongChunk<RowKeys> rightKeysForLeft, boolean disallowExactMatch) { processEntry(leftStampValues.asDoubleChunk(), leftStampKeys, (DoubleReverseSegmentedSortedArray)ssa, rightKeysForLeft, disallowExactMatch); } private static void processEntry(DoubleChunk<Values> leftStampValues, Chunk<RowKeys> leftStampKeys, DoubleReverseSegmentedSortedArray ssa, WritableLongChunk<RowKeys> rightKeysForLeft, boolean disallowExactMatch) { final int leftSize = leftStampKeys.size(); final long rightSize = ssa.size(); if (rightSize == 0) { rightKeysForLeft.fillWithValue(0, leftSize, RowSequence.NULL_ROW_KEY); rightKeysForLeft.setSize(leftSize); return; } final DoubleReverseSegmentedSortedArray.Iterator ssaIt = ssa.iterator(disallowExactMatch, true); for (int li = 0; li < leftSize; ) { final double leftValue = leftStampValues.get(li); final int comparison = doComparison(leftValue, ssaIt.getValue()); if (disallowExactMatch ? comparison <= 0 : comparison < 0) { rightKeysForLeft.set(li++, RowSequence.NULL_ROW_KEY); continue; } else if (comparison == 0) { rightKeysForLeft.set(li++, ssaIt.getKey()); continue; } ssaIt.advanceToLast(leftValue); final long redirectionKey = ssaIt.getKey(); if (!ssaIt.hasNext()) { rightKeysForLeft.fillWithValue(li, leftSize - li, redirectionKey); return; } else { rightKeysForLeft.set(li++, redirectionKey); final double nextRightValue = ssaIt.nextValue(); while (li < leftSize && (disallowExactMatch ? leq(leftStampValues.get(li), nextRightValue) : lt(leftStampValues.get(li), nextRightValue))) { rightKeysForLeft.set(li++, redirectionKey); } } } } @Override public void processRemovals(Chunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, Chunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightKeys, WritableLongChunk<RowKeys> priorRedirections, WritableRowRedirection rowRedirection, RowSetBuilderRandom modifiedBuilder, boolean disallowExactMatch) { processRemovals(leftStampValues.asDoubleChunk(), leftStampKeys, rightStampChunk.asDoubleChunk(), rightKeys, priorRedirections, rowRedirection, modifiedBuilder, disallowExactMatch); } private static void processRemovals(DoubleChunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, DoubleChunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightKeys, WritableLongChunk<RowKeys> nextRedirections, WritableRowRedirection rowRedirection, RowSetBuilderRandom modifiedBuilder, boolean disallowExactMatch) { // When removing a row, record the stamp, redirection key, and prior redirection key. Binary search // in the left for the removed key to find the smallest value geq the removed right. Update all rows // with the removed redirection to the previous key. int leftLowIdx = 0; for (int ii = 0; ii < rightStampChunk.size(); ++ii) { final double rightStampValue = rightStampChunk.get(ii); final long rightStampKey = rightKeys.get(ii); final long newRightStampKey = nextRedirections.get(ii); leftLowIdx = findFirstResponsiveLeft(leftLowIdx, leftStampValues, disallowExactMatch, rightStampValue); while (leftLowIdx < leftStampKeys.size()) { final long leftKey = leftStampKeys.get(leftLowIdx); final long leftRedirectionKey = rowRedirection.get(leftKey); if (leftRedirectionKey == rightStampKey) { modifiedBuilder.addKey(leftKey); if (newRightStampKey == RowSequence.NULL_ROW_KEY) { rowRedirection.removeVoid(leftKey); } else { rowRedirection.putVoid(leftKey, newRightStampKey); } leftLowIdx++; } else { break; } } } } @Override public void processInsertion(Chunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, Chunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightKeys, Chunk<Values> nextRightValue, WritableRowRedirection rowRedirection, RowSetBuilderRandom modifiedBuilder, boolean endsWithLastValue, boolean disallowExactMatch) { processInsertion(leftStampValues.asDoubleChunk(), leftStampKeys, rightStampChunk.asDoubleChunk(), rightKeys, nextRightValue.asDoubleChunk(), rowRedirection, modifiedBuilder, endsWithLastValue, disallowExactMatch); } private static void processInsertion(DoubleChunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, DoubleChunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightKeys, DoubleChunk<Values> nextRightValue, WritableRowRedirection rowRedirection, RowSetBuilderRandom modifiedBuilder, boolean endsWithLastValue, boolean disallowExactMatch) { // We've already filtered out duplicate right stamps by the time we get here, which means that the rightStampChunk // contains only values that are the last in any given run; and thus are possible matches. // We binary search in the left for the first value >=, everything up until the next extant right value (contained // in the nextRightValue chunk) should be re-stamped with our value int leftLowIdx = 0; for (int ii = 0; ii < rightStampChunk.size(); ++ii) { final double rightStampValue = rightStampChunk.get(ii); leftLowIdx = findFirstResponsiveLeft(leftLowIdx, leftStampValues, disallowExactMatch, rightStampValue); final long rightStampKey = rightKeys.get(ii); if (ii == rightStampChunk.size() - 1 && endsWithLastValue) { while (leftLowIdx < leftStampKeys.size()) { final long leftKey = leftStampKeys.get(leftLowIdx); rowRedirection.putVoid(leftKey, rightStampKey); modifiedBuilder.addKey(leftKey); leftLowIdx++; } } else { final double nextRight = nextRightValue.get(ii); while (leftLowIdx < leftStampKeys.size()) { final double leftValue = leftStampValues.get(leftLowIdx); if (disallowExactMatch ? leq(leftValue, nextRight) : lt(leftValue, nextRight)) { final long leftKey = leftStampKeys.get(leftLowIdx); rowRedirection.putVoid(leftKey, rightStampKey); modifiedBuilder.addKey(leftKey); leftLowIdx++; } else { break; } } } } } @Override public int findModified(int first, Chunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, RowRedirection rowRedirection, Chunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightStampIndices, RowSetBuilderRandom modifiedBuilder, boolean disallowExactMatch) { return findModified(first, leftStampValues.asDoubleChunk(), leftStampKeys, rowRedirection, rightStampChunk.asDoubleChunk(), rightStampIndices, modifiedBuilder, disallowExactMatch); } private static int findModified(int leftLowIdx, DoubleChunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, RowRedirection rowRedirection, DoubleChunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightStampIndices, RowSetBuilderRandom modifiedBuilder, boolean disallowExactMatch) { for (int ii = 0; ii < rightStampChunk.size(); ++ii) { final double rightStampValue = rightStampChunk.get(ii); // now find the lowest left value leq (lt) than rightStampValue leftLowIdx = findFirstResponsiveLeft(leftLowIdx, leftStampValues, disallowExactMatch, rightStampValue); final long rightStampKey = rightStampIndices.get(ii); int checkIdx = leftLowIdx; while (checkIdx < leftStampValues.size() && rowRedirection.get(leftStampKeys.get(checkIdx)) == rightStampKey) { modifiedBuilder.addKey(leftStampKeys.get(checkIdx)); checkIdx++; } } return leftLowIdx; } @Override public void applyShift(Chunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, Chunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightStampKeys, long shiftDelta, WritableRowRedirection rowRedirection, boolean disallowExactMatch) { applyShift(leftStampValues.asDoubleChunk(), leftStampKeys, rightStampChunk.asDoubleChunk(), rightStampKeys, shiftDelta, rowRedirection, disallowExactMatch); } private void applyShift(DoubleChunk<Values> leftStampValues, LongChunk<RowKeys> leftStampKeys, DoubleChunk<? extends Values> rightStampChunk, LongChunk<RowKeys> rightStampKeys, long shiftDelta, WritableRowRedirection rowRedirection, boolean disallowExactMatch) { int leftLowIdx = 0; for (int ii = 0; ii < rightStampChunk.size(); ++ii) { final double rightStampValue = rightStampChunk.get(ii); // now find the lowest left value leq (lt) than rightStampValue leftLowIdx = findFirstResponsiveLeft(leftLowIdx, leftStampValues, disallowExactMatch, rightStampValue); final long rightStampKey = rightStampKeys.get(ii); int checkIdx = leftLowIdx; while (checkIdx < leftStampValues.size() && rowRedirection.get(leftStampKeys.get(checkIdx)) == rightStampKey) { rowRedirection.putVoid(leftStampKeys.get(checkIdx), rightStampKey + shiftDelta); checkIdx++; } } } private static int findFirstResponsiveLeft(int leftLowIdx, DoubleChunk<Values> leftStampValues, boolean disallowExactMatch, double rightStampValue) { int leftHighIdx = leftStampValues.size(); while (leftLowIdx < leftHighIdx) { final int leftMidIdx = (leftHighIdx + leftLowIdx) >>> 1; final double leftMidValue = leftStampValues.get(leftMidIdx); final int comparison = doComparison(leftMidValue, rightStampValue); final boolean moveLow = disallowExactMatch ? comparison <= 0 : comparison < 0; if (moveLow) { leftLowIdx = leftMidIdx + 1; } else { leftHighIdx = leftMidIdx; } } return leftLowIdx; } // region comparison functions private static int doComparison(double lhs, double rhs) { return -1 * DoubleComparisons.compare(lhs, rhs); } // endregion comparison functions private static boolean lt(double lhs, double rhs) { return doComparison(lhs, rhs) < 0; } private static boolean leq(double lhs, double rhs) { return doComparison(lhs, rhs) <= 0; } }
package com.richard.novel.http.entity.book; import com.google.gson.annotations.SerializedName; import java.io.Serializable; import io.objectbox.annotation.Entity; import io.objectbox.annotation.Id; /** * Created by XiaoU on 2018/9/28. */ @Entity public class BookCategory implements Serializable{ /** * updateTime : 1521785976000 * createTime : 1521785976000 * code : 102 * parent_code : 0 * name : 心理学 * status : 0 * comment : null * icon : * media_type : 1000 */ @Id private long boxId; private long updateTime; private long createTime; @SerializedName("code") private long codeX; private int parent_code; private String name; private int status; private String comment; private String icon; private int media_type; public BookCategory() { } public BookCategory(long codeX, String name) { this.codeX = codeX; this.name = name; } public long getBoxId() { return boxId; } public void setBoxId(long boxId) { this.boxId = boxId; } public long getUpdateTime() { return updateTime; } public void setUpdateTime(long updateTime) { this.updateTime = updateTime; } public long getCreateTime() { return createTime; } public void setCreateTime(long createTime) { this.createTime = createTime; } public long getCodeX() { return codeX; } public void setCodeX(long codeX) { this.codeX = codeX; } public int getParent_code() { return parent_code; } public void setParent_code(int parent_code) { this.parent_code = parent_code; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public String getComment() { return comment; } public void setComment(String comment) { this.comment = comment; } public String getIcon() { return icon; } public void setIcon(String icon) { this.icon = icon; } public int getMedia_type() { return media_type; } public void setMedia_type(int media_type) { this.media_type = media_type; } }
// -------------------------------------------------------------------------------- // Copyright 2002-2022 Echo Three, LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // -------------------------------------------------------------------------------- package com.echothree.control.user.search.common.edit; import com.echothree.control.user.search.common.spec.SearchDefaultOperatorSpec; public interface SearchDefaultOperatorEdit extends SearchDefaultOperatorSpec, SearchDefaultOperatorDescriptionEdit { String getIsDefault(); void setIsDefault(String isDefault); String getSortOrder(); void setSortOrder(String sortOrder); }
/* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.drs.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/drs-2020-02-26/TerminateRecoveryInstances" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TerminateRecoveryInstancesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The Job for terminating the Recovery Instances. * </p> */ private Job job; /** * <p> * The Job for terminating the Recovery Instances. * </p> * * @param job * The Job for terminating the Recovery Instances. */ public void setJob(Job job) { this.job = job; } /** * <p> * The Job for terminating the Recovery Instances. * </p> * * @return The Job for terminating the Recovery Instances. */ public Job getJob() { return this.job; } /** * <p> * The Job for terminating the Recovery Instances. * </p> * * @param job * The Job for terminating the Recovery Instances. * @return Returns a reference to this object so that method calls can be chained together. */ public TerminateRecoveryInstancesResult withJob(Job job) { setJob(job); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getJob() != null) sb.append("Job: ").append(getJob()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TerminateRecoveryInstancesResult == false) return false; TerminateRecoveryInstancesResult other = (TerminateRecoveryInstancesResult) obj; if (other.getJob() == null ^ this.getJob() == null) return false; if (other.getJob() != null && other.getJob().equals(this.getJob()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getJob() == null) ? 0 : getJob().hashCode()); return hashCode; } @Override public TerminateRecoveryInstancesResult clone() { try { return (TerminateRecoveryInstancesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.service.client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Response; import org.glassfish.jersey.client.proxy.WebResourceFactory; import org.locationtech.geowave.service.ConfigService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ConfigServiceClient implements ConfigService { private static final Logger LOGGER = LoggerFactory.getLogger(ConfigServiceClient.class); private final ConfigService configService; public ConfigServiceClient(final String baseUrl) { this(baseUrl, null, null); } public ConfigServiceClient(final String baseUrl, final String user, final String password) { final WebTarget target = ClientBuilder.newClient().target(baseUrl); configService = WebResourceFactory.newResource(ConfigService.class, target); } @Override public Response list(final String filter) { final Response resp = configService.list(filter); resp.bufferEntity(); return resp; } public Response list() { return configService.list(null); } public Response configGeoServer(final String GeoServerURL) { return configGeoServer( GeoServerURL, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null); } @Override public Response configGeoServer( final String GeoServerURL, final String username, final String pass, final String workspace, final String sslSecurityProtocol, final String sslTrustStorePath, final String sslTrustStorePassword, final String sslTrustStoreType, final String sslTruststoreProvider, final String sslTrustManagerAlgorithm, final String sslTrustManagerProvider, final String sslKeyStorePath, final String sslKeyStorePassword, final String sslKeyStoreProvider, final String sslKeyPassword, final String sslKeyStoreType, final String sslKeyManagerAlgorithm, final String sslKeyManagerProvider) { final Response resp = configService.configGeoServer( GeoServerURL, username, pass, workspace, sslSecurityProtocol, sslTrustStorePath, sslTrustStorePassword, sslTrustStoreType, sslTruststoreProvider, sslTrustManagerAlgorithm, sslTrustManagerProvider, sslKeyStorePath, sslKeyStorePassword, sslKeyStoreProvider, sslKeyPassword, sslKeyStoreType, sslKeyManagerAlgorithm, sslKeyManagerProvider); return resp; } @Override public Response configHDFS(final String HDFSDefaultFSURL) { final Response resp = configService.configHDFS(HDFSDefaultFSURL); return resp; } public Response set(final String name, final String value) { return set(name, value, null); } @Override public Response set(final String name, final String value, final Boolean password) { final Response resp = configService.set(name, value, password); return resp; } }
/* * Copyright 2021 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.car.app; import static androidx.lifecycle.Lifecycle.State.DESTROYED; import static com.google.common.truth.Truth.assertThat; import static org.robolectric.Shadows.shadowOf; import android.app.Application; import android.content.ComponentName; import android.content.Intent; import android.os.Bundle; import android.os.RemoteException; import androidx.test.core.app.ActivityScenario; import androidx.test.core.app.ApplicationProvider; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.internal.DoNotInstrument; import org.robolectric.shadows.ShadowActivity; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** Tests for {@link CarAppPermissionActivity}. */ @RunWith(RobolectricTestRunner.class) @DoNotInstrument public class CarAppPermissionActivityTest { @Mock private OnRequestPermissionsListener mMockListener; private final List<String> mPermisssionsRequested = new ArrayList<>(); private ActivityScenario<CarAppPermissionActivity> mActivity; private Application mApplication; @Before public void setUp() throws RemoteException { MockitoAnnotations.initMocks(this); mApplication = ApplicationProvider.getApplicationContext(); mPermisssionsRequested.add("foo"); mPermisssionsRequested.add("bar"); } @Test public void onCreate_requestPermissionAction_requestsPermissions() { setupActivity(CarContext.REQUEST_PERMISSIONS_ACTION); mActivity.onActivity(activity -> { ShadowActivity shadowActivity = shadowOf(activity); ShadowActivity.PermissionsRequest request = shadowActivity.getLastRequestedPermission(); assertThat(request.requestedPermissions).isEqualTo( mPermisssionsRequested.toArray(new String[0])); }); } @Test public void onCreate_notARequestPermissionAction_finishes() { setupActivity("foo"); assertThat(mActivity.getState()).isEqualTo(DESTROYED); } private Intent createLaunchIntent(String action) { Bundle extras = new Bundle(2); extras.putStringArray(CarContext.EXTRA_PERMISSIONS_KEY, mPermisssionsRequested.toArray(new String[0])); extras.putBinder(CarContext.EXTRA_ON_REQUEST_PERMISSIONS_RESULT_LISTENER_KEY, new IOnRequestPermissionsListener.Stub() { @SuppressWarnings("unckecked") @Override public void onRequestPermissionsResult(String[] approvedPermissions, String[] rejectedPermissions) { mMockListener.onRequestPermissionsResult( Arrays.asList(approvedPermissions), Arrays.asList(rejectedPermissions)); } }.asBinder()); return new Intent(action).setComponent( new ComponentName(mApplication, CarAppPermissionActivity.class)).putExtras(extras); } private void setupActivity(String action) { mActivity = ActivityScenario.launch(createLaunchIntent(action)); } }
package com.fundgroup.backend.service; import com.fundgroup.backend.entity.FundRiskAssessment; import java.util.List; public interface FundRiskAssessmentService { List<FundRiskAssessment> getAll(); }
package us.kbase.cs.orm.dumpers; import us.kbase.cs.orm.Column; import us.kbase.cs.orm.ColumnType; import us.kbase.cs.orm.Dumper; import java.io.IOException; public class ExperimentalUnit extends Dumper { @Column(name="id", type=ColumnType.STRING) private String id; @Column(name="source_id", type=ColumnType.STRING) private String sourceId; private ExperimentalUnit() throws IOException{ super(); } public static ExperimentalUnit newDumper() throws IOException{ return new ExperimentalUnit(); } public String getId() { return id; } public void setId(String id) { this.id = id; } public ExperimentalUnit withId(String id) { this.id = id; return this; } public String getSource_id() { return sourceId; } public void setSource_id(String sourceId) { this.sourceId = sourceId; } public ExperimentalUnit withSource_id(String sourceId) { this.sourceId = sourceId; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.data.management.retention.version; import java.io.IOException; import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.testng.Assert; import org.testng.annotations.Test; import org.apache.gobblin.data.management.retention.version.HiveDatasetVersionCleaner; public class HiveDatasetVersionCleanerTest { private static Config config = ConfigFactory.parseMap(ImmutableMap.<String, String> of( HiveDatasetVersionCleaner.SHOULD_REPLACE_PARTITION_KEY, "true")); private static String replacedDb = "db_orc"; private static String replacedTable = "table_orc"; private static String replacementDb = "db_avro"; private static String replacementTable = "table_avro"; @Test public void testShouldReplacePartitionHappyPath() throws IOException { // Happy path 1: // - Replacement is enabled // - Replacement DB, Table names are specified and are different than Replaced DB and Table names Assert.assertTrue(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.of(replacementDb), Optional.of(replacementTable)), "Replaced and replacement db / table are different. " + "This should have been true. "); // Happy path 2: // - Replacement is enabled // - Replacement DB, Table names are specified and Replaced DB name is different Assert.assertTrue(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.of(replacementDb), Optional.of(replacedTable)), "Replaced and replacement db / table are different. " + "This should have been true. "); // Happy path 3: // - Replacement is enabled // - Replacement DB, Table names are specified and Replaced Table name is different Assert.assertTrue(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.of(replacedDb), Optional.of(replacementTable)), "Replaced and replacement db / table are different. " + "This should have been true. "); } @Test public void testShouldReplacePartitionDisabledByConfig() throws IOException { Config config = ConfigFactory.parseMap(ImmutableMap.<String, String> of( HiveDatasetVersionCleaner.SHOULD_REPLACE_PARTITION_KEY, "false")); Assert.assertFalse(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.of(replacementDb), Optional.of(replacementTable)), "Property governing partition replacement is set to false. " + "This should have been false. "); } @Test public void testShouldReplacePartitionDisabledByCodePath() throws IOException { // Replacement DB and Table names are same as Replaced DB and Table names Assert.assertFalse(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.of(replacedDb), Optional.of(replacedTable)), "Replaced and replacement db / table are same. " + "This should have been false. "); // Replaced DB name is missing Assert.assertFalse(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.<String>absent(), Optional.of(replacementTable)), "Replacement DB name is missing. " + "This should have been false. "); // Replaced Table name is missing Assert.assertFalse(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.of(replacementDb), Optional.<String>absent()), "Replacement table name is missing. " + "This should have been false. "); // Both DB and Table names are missing Assert.assertFalse(HiveDatasetVersionCleaner.shouldReplacePartition(config, replacedDb, replacedTable, Optional.<String>absent(), Optional.<String>absent()), "Replacement DB and table names are missing. " + "This should have been false. "); } }
package com.jackjonson.example; import android.os.Bundle; import io.flutter.app.FlutterActivity; import io.flutter.plugins.GeneratedPluginRegistrant; public class MainActivity extends FlutterActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); GeneratedPluginRegistrant.registerWith(this); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.ListMultimap; import com.google.common.collect.Streams; import com.google.common.io.CharStreams; import com.google.common.util.concurrent.ListenableFuture; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; import io.prestosql.plugin.hive.HiveSplit.BucketConversion; import io.prestosql.plugin.hive.metastore.Column; import io.prestosql.plugin.hive.metastore.Partition; import io.prestosql.plugin.hive.metastore.Table; import io.prestosql.plugin.hive.util.HiveBucketing.BucketingVersion; import io.prestosql.plugin.hive.util.HiveBucketing.HiveBucketFilter; import io.prestosql.plugin.hive.util.HiveFileIterator; import io.prestosql.plugin.hive.util.InternalHiveSplitFactory; import io.prestosql.plugin.hive.util.ResumableTask; import io.prestosql.plugin.hive.util.ResumableTasks; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.type.TypeManager; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat; import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapreduce.MRConfig; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.lang.annotation.Annotation; import java.nio.charset.StandardCharsets; import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.Properties; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.Executor; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.BooleanSupplier; import java.util.function.IntPredicate; import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.util.concurrent.Futures.immediateFuture; import static io.airlift.concurrent.MoreFutures.addExceptionCallback; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_BAD_DATA; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_UNKNOWN_ERROR; import static io.prestosql.plugin.hive.HivePartitionManager.partitionMatches; import static io.prestosql.plugin.hive.HiveSessionProperties.getMaxInitialSplitSize; import static io.prestosql.plugin.hive.HiveSessionProperties.isForceLocalScheduling; import static io.prestosql.plugin.hive.metastore.MetastoreUtil.getHiveSchema; import static io.prestosql.plugin.hive.metastore.MetastoreUtil.getPartitionLocation; import static io.prestosql.plugin.hive.s3select.S3SelectPushdown.shouldEnablePushdownForTable; import static io.prestosql.plugin.hive.util.ConfigurationUtils.toJobConf; import static io.prestosql.plugin.hive.util.HiveFileIterator.NestedDirectoryPolicy.FAIL; import static io.prestosql.plugin.hive.util.HiveFileIterator.NestedDirectoryPolicy.IGNORED; import static io.prestosql.plugin.hive.util.HiveFileIterator.NestedDirectoryPolicy.RECURSE; import static io.prestosql.plugin.hive.util.HiveUtil.checkCondition; import static io.prestosql.plugin.hive.util.HiveUtil.getFooterCount; import static io.prestosql.plugin.hive.util.HiveUtil.getHeaderCount; import static io.prestosql.plugin.hive.util.HiveUtil.getInputFormat; import static io.prestosql.plugin.hive.util.HiveUtil.getPartitionKeyColumnHandles; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static java.lang.Integer.parseInt; import static java.lang.Math.max; import static java.lang.String.format; import static java.util.Collections.max; import static java.util.Objects.requireNonNull; import static org.apache.hadoop.hive.common.FileUtils.HIDDEN_FILES_PATH_FILTER; public class BackgroundHiveSplitLoader implements HiveSplitLoader { // See https://github.com/apache/hive/commit/ffee30e6267e85f00a22767262192abb9681cfb7#diff-5fe26c36b4e029dcd344fc5d484e7347R165 private static final Pattern BUCKET_WITH_OPTIONAL_ATTEMPT_ID_PATTERN = Pattern.compile("bucket_(\\d+)(_\\d+)?$"); private static final Iterable<Pattern> BUCKET_PATTERNS = ImmutableList.of( // legacy Presto naming pattern (current version matches Hive) Pattern.compile("\\d{8}_\\d{6}_\\d{5}_[a-z0-9]{5}_bucket-(\\d+)(?:[-_.].*)?"), // Hive naming pattern per `org.apache.hadoop.hive.ql.exec.Utilities#getBucketIdFromFile()` Pattern.compile("(\\d+)_\\d+.*"), // Hive ACID with optional direct insert attempt id BUCKET_WITH_OPTIONAL_ATTEMPT_ID_PATTERN); private static final ListenableFuture<?> COMPLETED_FUTURE = immediateFuture(null); private final Table table; private final TupleDomain<? extends ColumnHandle> compactEffectivePredicate; private final Supplier<TupleDomain<ColumnHandle>> dynamicFilterSupplier; private final TypeManager typeManager; private final Optional<BucketSplitInfo> tableBucketInfo; private final HdfsEnvironment hdfsEnvironment; private final HdfsContext hdfsContext; private final NamenodeStats namenodeStats; private final DirectoryLister directoryLister; private final int loaderConcurrency; private final boolean recursiveDirWalkerEnabled; private final boolean ignoreAbsentPartitions; private final Executor executor; private final ConnectorSession session; private final ConcurrentLazyQueue<HivePartitionMetadata> partitions; private final Deque<Iterator<InternalHiveSplit>> fileIterators = new ConcurrentLinkedDeque<>(); private final Optional<ValidWriteIdList> validWriteIds; // Purpose of this lock: // * Write lock: when you need a consistent view across partitions, fileIterators, and hiveSplitSource. // * Read lock: when you need to modify any of the above. // Make sure the lock is held throughout the period during which they may not be consistent with each other. // Details: // * When write lock is acquired, except the holder, no one can do any of the following: // ** poll from (or check empty) partitions // ** poll from (or check empty) or push to fileIterators // ** push to hiveSplitSource // * When any of the above three operations is carried out, either a read lock or a write lock must be held. // * When a series of operations involving two or more of the above three operations are carried out, the lock // must be continuously held throughout the series of operations. // Implications: // * if you hold a read lock but not a write lock, you can do any of the above three operations, but you may // see a series of operations involving two or more of the operations carried out half way. private final ReadWriteLock taskExecutionLock = new ReentrantReadWriteLock(); private HiveSplitSource hiveSplitSource; private volatile boolean stopped; public BackgroundHiveSplitLoader( Table table, Iterable<HivePartitionMetadata> partitions, TupleDomain<? extends ColumnHandle> compactEffectivePredicate, Supplier<TupleDomain<ColumnHandle>> dynamicFilterSupplier, TypeManager typeManager, Optional<BucketSplitInfo> tableBucketInfo, ConnectorSession session, HdfsEnvironment hdfsEnvironment, NamenodeStats namenodeStats, DirectoryLister directoryLister, Executor executor, int loaderConcurrency, boolean recursiveDirWalkerEnabled, boolean ignoreAbsentPartitions, Optional<ValidWriteIdList> validWriteIds) { this.table = table; this.compactEffectivePredicate = compactEffectivePredicate; this.dynamicFilterSupplier = dynamicFilterSupplier; this.typeManager = typeManager; this.tableBucketInfo = tableBucketInfo; this.loaderConcurrency = loaderConcurrency; this.session = session; this.hdfsEnvironment = hdfsEnvironment; this.namenodeStats = namenodeStats; this.directoryLister = directoryLister; this.recursiveDirWalkerEnabled = recursiveDirWalkerEnabled; this.ignoreAbsentPartitions = ignoreAbsentPartitions; this.executor = executor; this.partitions = new ConcurrentLazyQueue<>(partitions); this.hdfsContext = new HdfsContext(session, table.getDatabaseName(), table.getTableName()); this.validWriteIds = requireNonNull(validWriteIds, "validWriteIds is null"); } @Override public void start(HiveSplitSource splitSource) { this.hiveSplitSource = splitSource; for (int i = 0; i < loaderConcurrency; i++) { ListenableFuture<?> future = ResumableTasks.submit(executor, new HiveSplitLoaderTask()); addExceptionCallback(future, hiveSplitSource::fail); // best effort; hiveSplitSource could be already completed } } @Override public void stop() { stopped = true; } private class HiveSplitLoaderTask implements ResumableTask { @Override public TaskStatus process() { while (true) { if (stopped) { return TaskStatus.finished(); } ListenableFuture<?> future; taskExecutionLock.readLock().lock(); try { future = loadSplits(); } catch (Throwable e) { if (e instanceof IOException) { e = new PrestoException(HIVE_FILESYSTEM_ERROR, e); } else if (!(e instanceof PrestoException)) { e = new PrestoException(HIVE_UNKNOWN_ERROR, e); } // Fail the split source before releasing the execution lock // Otherwise, a race could occur where the split source is completed before we fail it. hiveSplitSource.fail(e); checkState(stopped); return TaskStatus.finished(); } finally { taskExecutionLock.readLock().unlock(); } invokeNoMoreSplitsIfNecessary(); if (!future.isDone()) { return TaskStatus.continueOn(future); } } } } private void invokeNoMoreSplitsIfNecessary() { taskExecutionLock.readLock().lock(); try { // This is an opportunistic check to avoid getting the write lock unnecessarily if (!partitions.isEmpty() || !fileIterators.isEmpty()) { return; } } catch (Exception e) { hiveSplitSource.fail(e); checkState(stopped, "Task is not marked as stopped even though it failed"); return; } finally { taskExecutionLock.readLock().unlock(); } taskExecutionLock.writeLock().lock(); try { // the write lock guarantees that no one is operating on the partitions, fileIterators, or hiveSplitSource, or half way through doing so. if (partitions.isEmpty() && fileIterators.isEmpty()) { // It is legal to call `noMoreSplits` multiple times or after `stop` was called. // Nothing bad will happen if `noMoreSplits` implementation calls methods that will try to obtain a read lock because the lock is re-entrant. hiveSplitSource.noMoreSplits(); } } catch (Exception e) { hiveSplitSource.fail(e); checkState(stopped, "Task is not marked as stopped even though it failed"); } finally { taskExecutionLock.writeLock().unlock(); } } private ListenableFuture<?> loadSplits() throws IOException { Iterator<InternalHiveSplit> splits = fileIterators.poll(); if (splits == null) { HivePartitionMetadata partition = partitions.poll(); if (partition == null) { return COMPLETED_FUTURE; } return loadPartition(partition); } while (splits.hasNext() && !stopped) { ListenableFuture<?> future = hiveSplitSource.addToQueue(splits.next()); if (!future.isDone()) { fileIterators.addFirst(splits); return future; } } // No need to put the iterator back, since it's either empty or we've stopped return COMPLETED_FUTURE; } private ListenableFuture<?> loadPartition(HivePartitionMetadata partition) throws IOException { HivePartition hivePartition = partition.getHivePartition(); String partitionName = hivePartition.getPartitionId(); Properties schema = getPartitionSchema(table, partition.getPartition()); List<HivePartitionKey> partitionKeys = getPartitionKeys(table, partition.getPartition()); TupleDomain<HiveColumnHandle> effectivePredicate = compactEffectivePredicate.transform(HiveColumnHandle.class::cast); List<HiveColumnHandle> partitionColumns = getPartitionKeyColumnHandles(table, typeManager); BooleanSupplier partitionMatchSupplier = () -> partitionMatches(partitionColumns, dynamicFilterSupplier.get(), hivePartition); if (!partitionMatchSupplier.getAsBoolean()) { // Avoid listing files and creating splits from a partition if it has been pruned due to dynamic filters return COMPLETED_FUTURE; } Path path = new Path(getPartitionLocation(table, partition.getPartition())); Configuration configuration = hdfsEnvironment.getConfiguration(hdfsContext, path); InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, false); FileSystem fs = hdfsEnvironment.getFileSystem(hdfsContext, path); boolean s3SelectPushdownEnabled = shouldEnablePushdownForTable(session, table, path.toString(), partition.getPartition()); if (inputFormat instanceof SymlinkTextInputFormat) { if (tableBucketInfo.isPresent()) { throw new PrestoException(NOT_SUPPORTED, "Bucketed table in SymlinkTextInputFormat is not yet supported"); } // TODO: This should use an iterator like the HiveFileIterator ListenableFuture<?> lastResult = COMPLETED_FUTURE; List<Path> targetPaths = hdfsEnvironment.doAs( hdfsContext.getIdentity().getUser(), () -> getTargetPathsFromSymlink(fs, path)); for (Path targetPath : targetPaths) { // The input should be in TextInputFormat. TextInputFormat targetInputFormat = new TextInputFormat(); // the splits must be generated using the file system for the target path // get the configuration for the target path -- it may be a different hdfs instance FileSystem targetFilesystem = hdfsEnvironment.getFileSystem(hdfsContext, targetPath); JobConf targetJob = toJobConf(targetFilesystem.getConf()); targetJob.setInputFormat(TextInputFormat.class); Optional<Principal> principal = hdfsContext.getIdentity().getPrincipal(); if (principal.isPresent()) { targetJob.set(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME); targetJob.set(MRConfig.MASTER_USER_NAME, principal.get().getName()); } targetInputFormat.configure(targetJob); FileInputFormat.setInputPaths(targetJob, targetPath); InputSplit[] targetSplits = hdfsEnvironment.doAs( hdfsContext.getIdentity().getUser(), () -> targetInputFormat.getSplits(targetJob, 0)); InternalHiveSplitFactory splitFactory = new InternalHiveSplitFactory( targetFilesystem, partitionName, inputFormat, schema, partitionKeys, effectivePredicate, partitionMatchSupplier, partition.getTableToPartitionMapping(), Optional.empty(), getMaxInitialSplitSize(session), isForceLocalScheduling(session), s3SelectPushdownEnabled); lastResult = addSplitsToSource(targetSplits, splitFactory); if (stopped) { return COMPLETED_FUTURE; } } return lastResult; } Optional<BucketConversion> bucketConversion = Optional.empty(); boolean bucketConversionRequiresWorkerParticipation = false; if (partition.getPartition().isPresent()) { Optional<HiveBucketProperty> partitionBucketProperty = partition.getPartition().get().getStorage().getBucketProperty(); if (tableBucketInfo.isPresent() && partitionBucketProperty.isPresent()) { int readBucketCount = tableBucketInfo.get().getReadBucketCount(); BucketingVersion bucketingVersion = partitionBucketProperty.get().getBucketingVersion(); // TODO can partition's bucketing_version be different from table's? int partitionBucketCount = partitionBucketProperty.get().getBucketCount(); // Validation was done in HiveSplitManager#getPartitionMetadata. // Here, it's just trying to see if its needs the BucketConversion. if (readBucketCount != partitionBucketCount) { bucketConversion = Optional.of(new BucketConversion(bucketingVersion, readBucketCount, partitionBucketCount, tableBucketInfo.get().getBucketColumns())); if (readBucketCount > partitionBucketCount) { bucketConversionRequiresWorkerParticipation = true; } } } } InternalHiveSplitFactory splitFactory = new InternalHiveSplitFactory( fs, partitionName, inputFormat, schema, partitionKeys, effectivePredicate, partitionMatchSupplier, partition.getTableToPartitionMapping(), bucketConversionRequiresWorkerParticipation ? bucketConversion : Optional.empty(), getMaxInitialSplitSize(session), isForceLocalScheduling(session), s3SelectPushdownEnabled); // To support custom input formats, we want to call getSplits() // on the input format to obtain file splits. if (shouldUseFileSplitsFromInputFormat(inputFormat)) { if (tableBucketInfo.isPresent()) { throw new PrestoException(NOT_SUPPORTED, "Presto cannot read bucketed partition in an input format with UseFileSplitsFromInputFormat annotation: " + inputFormat.getClass().getSimpleName()); } if (AcidUtils.isTransactionalTable(table.getParameters())) { throw new PrestoException(NOT_SUPPORTED, "Hive transactional tables in an input format with UseFileSplitsFromInputFormat annotation are not supported: " + inputFormat.getClass().getSimpleName()); } JobConf jobConf = toJobConf(configuration); FileInputFormat.setInputPaths(jobConf, path); InputSplit[] splits = inputFormat.getSplits(jobConf, 0); return addSplitsToSource(splits, splitFactory); } List<Path> readPaths; List<HdfsFileStatusWithId> fileStatusOriginalFiles = ImmutableList.of(); AcidInfo.Builder acidInfoBuilder = AcidInfo.builder(path); if (AcidUtils.isTransactionalTable(table.getParameters())) { AcidUtils.Directory directory = hdfsEnvironment.doAs(hdfsContext.getIdentity().getUser(), () -> AcidUtils.getAcidState( path, configuration, validWriteIds.orElseThrow(() -> new IllegalStateException("No validWriteIds present")), false, true)); if (AcidUtils.isFullAcidTable(table.getParameters())) { // From Hive version >= 3.0, delta/base files will always have file '_orc_acid_version' with value >= '2'. Path baseOrDeltaPath = directory.getBaseDirectory() != null ? directory.getBaseDirectory() : (directory.getCurrentDirectories().size() > 0 ? directory.getCurrentDirectories().get(0).getPath() : null); if (baseOrDeltaPath != null && AcidUtils.OrcAcidVersion.getAcidVersionFromMetaFile(baseOrDeltaPath, fs) < 2) { throw new PrestoException(NOT_SUPPORTED, "Hive transactional tables are supported with Hive 3.0 and only after a major compaction has been run"); } } readPaths = new ArrayList<>(); // base if (directory.getBaseDirectory() != null) { readPaths.add(directory.getBaseDirectory()); } // delta directories for (AcidUtils.ParsedDelta delta : directory.getCurrentDirectories()) { if (!delta.isDeleteDelta()) { readPaths.add(delta.getPath()); } } // Create a registry of delete_delta directories for the partition for (AcidUtils.ParsedDelta delta : directory.getCurrentDirectories()) { if (delta.isDeleteDelta()) { acidInfoBuilder.addDeleteDelta(delta.getPath(), delta.getMinWriteId(), delta.getMaxWriteId(), delta.getStatementId()); } } // initialize original files status list if present fileStatusOriginalFiles = directory.getOriginalFiles(); for (HdfsFileStatusWithId hdfsFileStatusWithId : fileStatusOriginalFiles) { Path originalFilePath = hdfsFileStatusWithId.getFileStatus().getPath(); long originalFileLength = hdfsFileStatusWithId.getFileStatus().getLen(); if (originalFileLength == 0) { continue; } // Hive requires "original" files of transactional tables to conform to the bucketed tables naming pattern, to match them with delete deltas. int bucketId = getRequiredBucketNumber(originalFilePath); acidInfoBuilder.addOriginalFile(originalFilePath, originalFileLength, bucketId); } } else { readPaths = ImmutableList.of(path); } // S3 Select pushdown works at the granularity of individual S3 objects, // therefore we must not split files when it is enabled. // Skip header / footer lines are not splittable except for a special case when skip.header.line.count=1 boolean splittable = !s3SelectPushdownEnabled && getFooterCount(schema) == 0 && getHeaderCount(schema) <= 1; // Bucketed partitions are fully loaded immediately since all files must be loaded to determine the file to bucket mapping if (tableBucketInfo.isPresent()) { ListenableFuture<?> lastResult = immediateFuture(null); // TODO document in addToQueue() that it is sufficient to hold on to last returned future for (Path readPath : readPaths) { // list all files in the partition List<LocatedFileStatus> files = new ArrayList<>(); try { Iterators.addAll(files, new HiveFileIterator(table, readPath, fs, directoryLister, namenodeStats, FAIL, ignoreAbsentPartitions)); } catch (HiveFileIterator.NestedDirectoryNotAllowedException e) { // Fail here to be on the safe side. This seems to be the same as what Hive does throw new PrestoException( HIVE_INVALID_BUCKET_FILES, format("Hive table '%s' is corrupt. Found sub-directory in bucket directory for partition: %s", table.getSchemaTableName(), splitFactory.getPartitionName())); } lastResult = hiveSplitSource.addToQueue(getBucketedSplits(files, splitFactory, tableBucketInfo.get(), bucketConversion, splittable, acidInfoBuilder.build())); } for (HdfsFileStatusWithId hdfsFileStatusWithId : fileStatusOriginalFiles) { List<LocatedFileStatus> locatedFileStatuses = ImmutableList.of((LocatedFileStatus) hdfsFileStatusWithId.getFileStatus()); Optional<AcidInfo> acidInfo = Optional.of(acidInfoBuilder.buildWithRequiredOriginalFiles(getRequiredBucketNumber(hdfsFileStatusWithId.getFileStatus().getPath()))); lastResult = hiveSplitSource.addToQueue(getBucketedSplits(locatedFileStatuses, splitFactory, tableBucketInfo.get(), bucketConversion, splittable, acidInfo)); } return lastResult; } for (Path readPath : readPaths) { fileIterators.addLast(createInternalHiveSplitIterator(readPath, fs, splitFactory, splittable, acidInfoBuilder.build())); } if (!fileStatusOriginalFiles.isEmpty()) { fileIterators.addLast(generateOriginalFilesSplits(splitFactory, fileStatusOriginalFiles, splittable, acidInfoBuilder)); } return COMPLETED_FUTURE; } private Iterator<InternalHiveSplit> generateOriginalFilesSplits( InternalHiveSplitFactory splitFactory, List<HdfsFileStatusWithId> originalFileLocations, boolean splittable, AcidInfo.Builder acidInfoBuilder) { return originalFileLocations.stream() .map(HdfsFileStatusWithId::getFileStatus) .map(fileStatus -> { Optional<AcidInfo> acidInfo = Optional.of(acidInfoBuilder.buildWithRequiredOriginalFiles(getRequiredBucketNumber(fileStatus.getPath()))); return splitFactory.createInternalHiveSplit( (LocatedFileStatus) fileStatus, OptionalInt.empty(), splittable, acidInfo); }) .filter(Optional::isPresent) .map(Optional::get) .iterator(); } private ListenableFuture<?> addSplitsToSource(InputSplit[] targetSplits, InternalHiveSplitFactory splitFactory) throws IOException { ListenableFuture<?> lastResult = COMPLETED_FUTURE; for (InputSplit inputSplit : targetSplits) { Optional<InternalHiveSplit> internalHiveSplit = splitFactory.createInternalHiveSplit((FileSplit) inputSplit); if (internalHiveSplit.isPresent()) { lastResult = hiveSplitSource.addToQueue(internalHiveSplit.get()); } if (stopped) { return COMPLETED_FUTURE; } } return lastResult; } private static boolean shouldUseFileSplitsFromInputFormat(InputFormat<?, ?> inputFormat) { return Arrays.stream(inputFormat.getClass().getAnnotations()) .map(Annotation::annotationType) .map(Class::getSimpleName) .anyMatch(name -> name.equals("UseFileSplitsFromInputFormat")); } private Iterator<InternalHiveSplit> createInternalHiveSplitIterator(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, boolean splittable, Optional<AcidInfo> acidInfo) { return Streams.stream(new HiveFileIterator(table, path, fileSystem, directoryLister, namenodeStats, recursiveDirWalkerEnabled ? RECURSE : IGNORED, ignoreAbsentPartitions)) .map(status -> splitFactory.createInternalHiveSplit(status, OptionalInt.empty(), splittable, acidInfo)) .filter(Optional::isPresent) .map(Optional::get) .iterator(); } private List<InternalHiveSplit> getBucketedSplits( List<LocatedFileStatus> files, InternalHiveSplitFactory splitFactory, BucketSplitInfo bucketSplitInfo, Optional<BucketConversion> bucketConversion, boolean splittable, Optional<AcidInfo> acidInfo) { int readBucketCount = bucketSplitInfo.getReadBucketCount(); int tableBucketCount = bucketSplitInfo.getTableBucketCount(); int partitionBucketCount = bucketConversion.map(BucketConversion::getPartitionBucketCount).orElse(tableBucketCount); int bucketCount = max(readBucketCount, partitionBucketCount); // build mapping of file name to bucket ListMultimap<Integer, LocatedFileStatus> bucketFiles = ArrayListMultimap.create(); for (LocatedFileStatus file : files) { String fileName = file.getPath().getName(); OptionalInt bucket = getBucketNumber(fileName); if (bucket.isPresent()) { bucketFiles.put(bucket.getAsInt(), file); continue; } // legacy mode requires exactly one file per bucket if (files.size() != partitionBucketCount) { throw new PrestoException(HIVE_INVALID_BUCKET_FILES, format( "Hive table '%s' is corrupt. File '%s' does not match the standard naming pattern, and the number " + "of files in the directory (%s) does not match the declared bucket count (%s) for partition: %s", table.getSchemaTableName(), fileName, files.size(), partitionBucketCount, splitFactory.getPartitionName())); } // sort FileStatus objects per `org.apache.hadoop.hive.ql.metadata.Table#getSortedPaths()` files.sort(null); // use position in sorted list as the bucket number bucketFiles.clear(); for (int i = 0; i < files.size(); i++) { bucketFiles.put(i, files.get(i)); } break; } validateFileBuckets(bucketFiles, partitionBucketCount, table.getSchemaTableName().toString(), splitFactory.getPartitionName()); // convert files internal splits List<InternalHiveSplit> splitList = new ArrayList<>(); for (int bucketNumber = 0; bucketNumber < bucketCount; bucketNumber++) { // Physical bucket #. This determine file name. It also determines the order of splits in the result. int partitionBucketNumber = bucketNumber % partitionBucketCount; // Logical bucket #. Each logical bucket corresponds to a "bucket" from engine's perspective. int readBucketNumber = bucketNumber % readBucketCount; boolean containsEligibleTableBucket = false; boolean containsIneligibleTableBucket = false; for (int tableBucketNumber = bucketNumber % tableBucketCount; tableBucketNumber < tableBucketCount; tableBucketNumber += bucketCount) { // table bucket number: this is used for evaluating "$bucket" filters. if (bucketSplitInfo.isTableBucketEnabled(tableBucketNumber)) { containsEligibleTableBucket = true; } else { containsIneligibleTableBucket = true; } } if (containsEligibleTableBucket && containsIneligibleTableBucket) { throw new PrestoException( NOT_SUPPORTED, "The bucket filter cannot be satisfied. There are restrictions on the bucket filter when all the following is true: " + "1. a table has a different buckets count as at least one of its partitions that is read in this query; " + "2. the table has a different but compatible bucket number with another table in the query; " + "3. some buckets of the table is filtered out from the query, most likely using a filter on \"$bucket\". " + "(table name: " + table.getTableName() + ", table bucket count: " + tableBucketCount + ", " + "partition bucket count: " + partitionBucketCount + ", effective reading bucket count: " + readBucketCount + ")"); } if (containsEligibleTableBucket) { for (LocatedFileStatus file : bucketFiles.get(partitionBucketNumber)) { // OrcDeletedRows will load only delete delta files matching current bucket id, // so we can pass all delete delta locations here, without filtering. splitFactory.createInternalHiveSplit(file, OptionalInt.of(readBucketNumber), splittable, acidInfo) .ifPresent(splitList::add); } } } return splitList; } @VisibleForTesting static void validateFileBuckets(ListMultimap<Integer, LocatedFileStatus> bucketFiles, int partitionBucketCount, String tableName, String partitionName) { if (bucketFiles.isEmpty()) { return; } int highestBucketNumber = max(bucketFiles.keySet()); // validate the bucket number detected from files, fail the query if the highest bucket number detected from file // exceeds the allowed highest number if (highestBucketNumber >= partitionBucketCount) { throw new PrestoException(HIVE_INVALID_BUCKET_FILES, format( "Hive table '%s' is corrupt. The highest bucket number in the directory (%s) exceeds the bucket number range " + "defined by the declared bucket count (%s) for partition: %s", tableName, highestBucketNumber, partitionBucketCount, partitionName)); } } private static int getRequiredBucketNumber(Path path) { return getBucketNumber(path.getName()) .orElseThrow(() -> new IllegalStateException("Cannot get bucket number from path: " + path)); } @VisibleForTesting static OptionalInt getBucketNumber(String name) { for (Pattern pattern : BUCKET_PATTERNS) { Matcher matcher = pattern.matcher(name); if (matcher.matches()) { return OptionalInt.of(parseInt(matcher.group(1))); } } return OptionalInt.empty(); } public static boolean hasAttemptId(String bucketFilename) { Matcher matcher = BUCKET_WITH_OPTIONAL_ATTEMPT_ID_PATTERN.matcher(bucketFilename); return matcher.matches() && matcher.group(2) != null; } private static List<Path> getTargetPathsFromSymlink(FileSystem fileSystem, Path symlinkDir) { try { FileStatus[] symlinks = fileSystem.listStatus(symlinkDir, HIDDEN_FILES_PATH_FILTER); List<Path> targets = new ArrayList<>(); for (FileStatus symlink : symlinks) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(symlink.getPath()), StandardCharsets.UTF_8))) { CharStreams.readLines(reader).stream() .map(Path::new) .forEach(targets::add); } } return targets; } catch (IOException e) { throw new PrestoException(HIVE_BAD_DATA, "Error parsing symlinks from: " + symlinkDir, e); } } private static List<HivePartitionKey> getPartitionKeys(Table table, Optional<Partition> partition) { if (partition.isEmpty()) { return ImmutableList.of(); } ImmutableList.Builder<HivePartitionKey> partitionKeys = ImmutableList.builder(); List<Column> keys = table.getPartitionColumns(); List<String> values = partition.get().getValues(); checkCondition(keys.size() == values.size(), HIVE_INVALID_METADATA, "Expected %s partition key values, but got %s", keys.size(), values.size()); for (int i = 0; i < keys.size(); i++) { String name = keys.get(i).getName(); HiveType hiveType = keys.get(i).getType(); if (!hiveType.isSupportedType(table.getStorage().getStorageFormat())) { throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName())); } String value = values.get(i); checkCondition(value != null, HIVE_INVALID_PARTITION_VALUE, "partition key value cannot be null for field: %s", name); partitionKeys.add(new HivePartitionKey(name, value)); } return partitionKeys.build(); } private static Properties getPartitionSchema(Table table, Optional<Partition> partition) { if (partition.isEmpty()) { return getHiveSchema(table); } return getHiveSchema(partition.get(), table); } public static class BucketSplitInfo { private final List<HiveColumnHandle> bucketColumns; private final int tableBucketCount; private final int readBucketCount; private final IntPredicate bucketFilter; public static Optional<BucketSplitInfo> createBucketSplitInfo(Optional<HiveBucketHandle> bucketHandle, Optional<HiveBucketFilter> bucketFilter) { requireNonNull(bucketHandle, "bucketHandle is null"); requireNonNull(bucketFilter, "buckets is null"); if (bucketHandle.isEmpty()) { checkArgument(bucketFilter.isEmpty(), "bucketHandle must be present if bucketFilter is present"); return Optional.empty(); } int tableBucketCount = bucketHandle.get().getTableBucketCount(); int readBucketCount = bucketHandle.get().getReadBucketCount(); if (tableBucketCount != readBucketCount && bucketFilter.isPresent()) { // TODO: remove when supported throw new PrestoException(NOT_SUPPORTED, "Filter on \"$bucket\" is not supported when the table has partitions with different bucket counts"); } List<HiveColumnHandle> bucketColumns = bucketHandle.get().getColumns(); IntPredicate predicate = bucketFilter .<IntPredicate>map(filter -> filter.getBucketsToKeep()::contains) .orElse(bucket -> true); return Optional.of(new BucketSplitInfo(bucketColumns, tableBucketCount, readBucketCount, predicate)); } private BucketSplitInfo(List<HiveColumnHandle> bucketColumns, int tableBucketCount, int readBucketCount, IntPredicate bucketFilter) { this.bucketColumns = ImmutableList.copyOf(requireNonNull(bucketColumns, "bucketColumns is null")); this.tableBucketCount = tableBucketCount; this.readBucketCount = readBucketCount; this.bucketFilter = requireNonNull(bucketFilter, "bucketFilter is null"); } public List<HiveColumnHandle> getBucketColumns() { return bucketColumns; } public int getTableBucketCount() { return tableBucketCount; } public int getReadBucketCount() { return readBucketCount; } /** * Evaluates whether the provided table bucket number passes the bucket predicate. * A bucket predicate can be present in two cases: * <ul> * <li>Filter on "$bucket" column. e.g. {@code "$bucket" between 0 and 100} * <li>Single-value equality filter on all bucket columns. e.g. for a table with two bucketing columns, * {@code bucketCol1 = 'a' AND bucketCol2 = 123} * </ul> */ public boolean isTableBucketEnabled(int tableBucketNumber) { return bucketFilter.test(tableBucketNumber); } } }
package com.sap.cloud.lm.sl.cf.core.helpers.v1_0; import static com.sap.cloud.lm.sl.common.util.TestUtil.getResourceAsString; import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; import com.google.gson.reflect.TypeToken; import com.sap.cloud.lm.sl.cf.core.dao.ConfigurationEntryDao; import com.sap.cloud.lm.sl.cf.core.dao.filters.ConfigurationFilter; import com.sap.cloud.lm.sl.cf.core.model.ConfigurationEntry; import com.sap.cloud.lm.sl.common.util.JsonUtil; import com.sap.cloud.lm.sl.common.util.TestUtil; import com.sap.cloud.lm.sl.mta.builders.v1_0.PropertiesChainBuilder; import com.sap.cloud.lm.sl.mta.handlers.v1_0.ConfigurationParser; import com.sap.cloud.lm.sl.mta.handlers.v1_0.DescriptorParser; import com.sap.cloud.lm.sl.mta.model.v1_0.DeploymentDescriptor; import com.sap.cloud.lm.sl.mta.model.v1_0.Platform; import com.sap.cloud.lm.sl.mta.model.v1_0.Target; @RunWith(Parameterized.class) public class ConfigurationReferencesResolverTest { protected static final String SPACE_ID = "SAP"; protected static class DaoMockConfiguration { ConfigurationFilter filter; List<ConfigurationEntry> configurationEntries; } private static Platform platform; private static Target target; private String descriptorLocation; private String expectedDescriptor; protected ConfigurationEntryDao dao = Mockito.mock(ConfigurationEntryDao.class); protected List<DaoMockConfiguration> daoConfigurations; protected DeploymentDescriptor descriptor; public ConfigurationReferencesResolverTest(String descriptorLocation, String daoConfigurationsLocation, String expectedDescriptor) throws Exception { this.daoConfigurations = JsonUtil.fromJson(getResourceAsString(daoConfigurationsLocation, getClass()), new TypeToken<List<DaoMockConfiguration>>() { }.getType()); this.descriptorLocation = descriptorLocation; this.expectedDescriptor = expectedDescriptor; } @Parameters public static Iterable<Object[]> getParameters() { return Arrays.asList(new Object[][] { // @formatter:off // (0) Reference to existing provided dependency in the same space: { "mtad-03.yaml", "configuration-entries-01.json", "R:result.json", }, // (1) Reference with some missing parameters: { "mtad-04.yaml", "configuration-entries-01.json", "E:Could not find required property \"mta-version\"", }, // (2) Multiple configuration entries exist matching the filter: { "mtad-05.yaml", "configuration-entries-02.json", "E:Multiple configuration entries were found matching the filter specified in resource \"resource-2\"", }, // (3) No configuration entries matching the filter: { "mtad-06.yaml", "configuration-entries-03.json", "E:No configuration entries were found matching the filter specified in resource \"resource-2\"", } // @formatter:on }); } @BeforeClass public static void initializePlatformAndPlatformType() throws Exception { ConfigurationParser parser = new ConfigurationParser(); target = parser.parseTargetsJson(ConfigurationReferencesResolverTest.class.getResourceAsStream("/mta/targets.json")).get(2); platform = parser.parsePlatformsJson(ConfigurationReferencesResolverTest.class.getResourceAsStream("/mta/platform-types.json")).get( 0); } @Before public void setUp() throws Exception { this.descriptor = getDescriptorParser().parseDeploymentDescriptorYaml(getClass().getResourceAsStream(descriptorLocation)); for (DaoMockConfiguration configuration : daoConfigurations) { ConfigurationFilter filter = configuration.filter; when(dao.find(filter.getProviderNid(), filter.getProviderId(), filter.getProviderVersion(), filter.getTargetSpace(), filter.getRequiredContent(), null, null)).thenReturn(configuration.configurationEntries); } } @Test public void testResolve() { ConfigurationReferencesResolver referencesResolver = getConfigurationResolver(descriptor); TestUtil.test(() -> { referencesResolver.resolve(descriptor); return descriptor; }, expectedDescriptor, getClass()); } protected ConfigurationReferencesResolver getConfigurationResolver(DeploymentDescriptor descriptor) { return new ConfigurationReferencesResolver(dao, new ConfigurationFilterParser(platform, target, getPropertiesChainBuilder(descriptor)), (org, space) -> SPACE_ID, null); } protected DescriptorParser getDescriptorParser() { return new DescriptorParser(); } protected PropertiesChainBuilder getPropertiesChainBuilder(DeploymentDescriptor descriptor) { return new PropertiesChainBuilder(descriptor, target, platform); } }
package com.szx.bbs.like; import com.szx.bbs.common.account.AccountService; import com.szx.bbs.common.controller.BaseController; import com.szx.bbs.common.model.User; import com.jfinal.aop.Interceptor; import com.jfinal.aop.Invocation; /** * 用于显示 "/my" 个人空间与 "/user" 空间的点赞数量 */ public class LikeInterceptor implements Interceptor { public static final String likeNum = "_likeNum"; public void intercept(Invocation inv) { inv.invoke(); BaseController c = (BaseController) inv.getController(); boolean isUserSpace = inv.getActionKey().startsWith("/user"); if (isUserSpace) { handleUserSpaceLikeCount(c); } else { handleMySpaceLikeCount(c); } } private void handleUserSpaceLikeCount(BaseController c) { User account = AccountService.me.getById(c.getParaToInt()); c.setAttr(likeNum, account.getLikeCount()); } private void handleMySpaceLikeCount(BaseController c) { User account = AccountService.me.getById(c.getLoginAccountId()); c.setAttr(likeNum, account.getLikeCount()); } }
// Copyright 2018-2021 Polyaxon, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* * Polyaxon SDKs and REST API specification. * Polyaxon SDKs and REST API specification. * * The version of the OpenAPI document: 1.8.4 * Contact: contact@polyaxon.com * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package org.openapitools.client.model; import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.IOException; import org.openapitools.client.model.V1ArtifactsType; import org.openapitools.client.model.V1DockerfileType; import org.openapitools.client.model.V1FileType; import org.openapitools.client.model.V1GitType; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; /** * Model tests for V1Init */ public class V1InitTest { private final V1Init model = new V1Init(); /** * Model tests for V1Init */ @Test public void testV1Init() { // TODO: test V1Init } /** * Test the property 'artifacts' */ @Test public void artifactsTest() { // TODO: test artifacts } /** * Test the property 'git' */ @Test public void gitTest() { // TODO: test git } /** * Test the property 'dockerfile' */ @Test public void dockerfileTest() { // TODO: test dockerfile } /** * Test the property 'file' */ @Test public void fileTest() { // TODO: test file } /** * Test the property 'connection' */ @Test public void connectionTest() { // TODO: test connection } /** * Test the property 'path' */ @Test public void pathTest() { // TODO: test path } /** * Test the property 'container' */ @Test public void containerTest() { // TODO: test container } }
/* * Copyright (c) 2016-2019 Roman Pierson * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 * which accompanies this distribution. * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package com.mdac.vertx.web.accesslogger.appender; import io.vertx.core.json.JsonArray; /** * * An IF defining an appender that can handle Access Events * * @author Roman Pierson * */ public interface Appender { /** * * Push the access events to the appender. * * Its the appenders responsibility to implement local storage * * @param accessEvents List of access events the appender should handle - those are no copies */ void push(JsonArray accessEvent); /** * Is called by the AccessLogger when the application is shutdown and gives the appender the chance to perform additional actions eg in case data is buffered etc */ default void notifyShutdown() { // Not forcing the implementations to implement if not required } }
package roth.lib.java.jdbc; import java.beans.PropertyVetoException; import java.io.InputStream; import java.io.PrintWriter; import java.io.Reader; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.sql.Blob; import java.sql.Clob; import java.sql.Connection; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Collection; import java.util.Properties; import java.util.logging.Logger; import javax.sql.DataSource; import com.mchange.v2.c3p0.ComboPooledDataSource; import roth.lib.java.Callback; import roth.lib.java.Characters; import roth.lib.java.Init; import roth.lib.java.jdbc.sql.Delete; import roth.lib.java.jdbc.sql.Insert; import roth.lib.java.jdbc.sql.Select; import roth.lib.java.jdbc.sql.Sql; import roth.lib.java.jdbc.sql.SqlFactory; import roth.lib.java.jdbc.sql.Update; import roth.lib.java.jdbc.sql.Wheres; import roth.lib.java.lang.List; import roth.lib.java.lang.Map; import roth.lib.java.mapper.MapperType; import roth.lib.java.reflector.EntityReflector; import roth.lib.java.reflector.MapperReflector; import roth.lib.java.reflector.PropertyReflector; import roth.lib.java.time.Day; import roth.lib.java.time.Millisecond; import roth.lib.java.time.Month; import roth.lib.java.time.Year; public abstract class Jdbc implements DataSource, JdbcWrapper, Characters, SqlFactory { protected MapperType mapperType; protected MapperReflector mapperReflector; protected String driver; protected String url; protected String username; protected String password; protected String testConnectionString; protected Properties properties; protected int maxConnections = 20; protected int minConnections = 5; protected int maxIdleTime = 0; protected int maxIdleTimeExcessConnections = 0; protected int loginTimeout = 60; protected int deadLockRetries = 3; protected static final int MAX_INSERT_RETRIES = 3; protected PrintWriter logWriter; protected ComboPooledDataSource connectionPool = null; protected Object synchObjectPool = new Object(); protected JdbcCloseHandler closeHandler = new JdbcCloseHandler() { @Override public void close(JdbcConnection connection) throws SQLException { connection.connection.close(); } }; { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { close(); } })); } protected Jdbc(MapperType mapperType) { this.mapperType = mapperType; } public Jdbc(MapperType mapperType, String driver, String url) { this.mapperType = mapperType; init(driver, url); } public Jdbc(MapperType mapperType, String driver, String url, Properties properties) { this.mapperType = mapperType; init(driver, url, properties); } public Jdbc(MapperType mapperType, String driver, String url, String username, String password) { this.mapperType = mapperType; init(driver, url, username, password); } public Jdbc(MapperType mapperType, String driver, String url, String username, String password, String testConnectionString) { this.mapperType = mapperType; init(driver, url, username, password, testConnectionString); } public Jdbc(MapperType mapperType, String driver, String url, String username, String password, Properties properties) { this.mapperType = mapperType; init(driver, url, username, password, null ,properties); } protected void init(String driver, String url) { init(driver, url, new Properties()); } protected void init(String driver, String url, Properties properties) { init(driver, url, null, null, null, properties); } protected void init(String driver, String url, String username, String password) { init(driver, url, username, password, null, new Properties()); } protected void init(String driver, String url, String username, String password, String testConnectionString) { init(driver, url, username, password, testConnectionString, new Properties()); } protected void init(String driver, String url, String username, String password, String testConnectionString, Properties properties) { this.driver = driver; this.url = url; this.username = username; this.password = password; this.properties = properties; this.testConnectionString = testConnectionString; this.mapperReflector = MapperReflector.get(); } protected abstract boolean isDeadLockException(SQLException e); public MapperType getMapperType() { return mapperType; } public MapperReflector getMapperReflector() { return mapperReflector; } public void setMaxConnections(int maxConnections) { this.maxConnections = maxConnections; } public void setMaxIdleTime(int maxIdleTime) { this.maxIdleTime = maxIdleTime; } public void setMaxIdleTimeExcessConnections(int maxIdleTimeExcessConnections) { this.maxIdleTimeExcessConnections = maxIdleTimeExcessConnections; } public void setDeadLockRetries(int deadLockRetries) { this.deadLockRetries = deadLockRetries; } @Override public JdbcConnection getConnection() throws SQLException { return getConnection(username, password); } @Override public JdbcConnection getConnection(String username, String password) throws SQLException { if (connectionPool == null) { synchronized(synchObjectPool) { if (connectionPool == null) { try { connectionPool = new ComboPooledDataSource(); connectionPool.setDriverClass( this.driver ); connectionPool.setJdbcUrl( this.url ); connectionPool.setUser(username); connectionPool.setPassword(password); connectionPool.setMinPoolSize(this.minConnections); connectionPool.setInitialPoolSize(this.minConnections); connectionPool.setAcquireIncrement(this.minConnections); if(this.maxIdleTime > 0) { connectionPool.setMaxIdleTime(this.maxIdleTime); } if(this.maxIdleTimeExcessConnections > 0) { connectionPool.setMaxIdleTimeExcessConnections(this.maxIdleTimeExcessConnections); } connectionPool.setTestConnectionOnCheckout(true); if(this.testConnectionString != null) { connectionPool.setPreferredTestQuery(testConnectionString); } connectionPool.setMaxPoolSize(this.maxConnections); connectionPool.setCheckoutTimeout(loginTimeout*1000); connectionPool.setConnectionCustomizerClassName(JdbcConnectionCustomizer.class.getName()); } catch(PropertyVetoException pvx) { throw new SQLException(pvx); } } } } Connection conn = connectionPool.getConnection(); if (conn != null) { JdbcConnection jdbcConnection = wrap(conn); jdbcConnection.setCloseHandler(closeHandler); jdbcConnection.setAutoCommit(false); return jdbcConnection; } throw new SQLException(String.format("could not get connection within timeout of %d seconds", loginTimeout)); } @Override public PrintWriter getLogWriter() { return logWriter; } @Override public void setLogWriter(PrintWriter logWriter) { this.logWriter = logWriter; } public boolean hasLogWriter() { return logWriter != null; } @Override public void setLoginTimeout(int loginTimeout) { this.loginTimeout = loginTimeout; } @Override public int getLoginTimeout() { return loginTimeout; } public int getDeadLockRetries() { return deadLockRetries; } @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException(); } @Override public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLException(); } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } public List<String> getGeneratedColumns(Type type) { List<String> generatedColumns = new List<String>(); EntityReflector entityReflector = getMapperReflector().getEntityReflector(type); if(entityReflector != null) { for(PropertyReflector propertyReflector : entityReflector.getGeneratedReflectors(getMapperType())) { generatedColumns.add(propertyReflector.getPropertyName(getMapperType())); } } return generatedColumns; } public void setGeneratedFields(JdbcResultSet resultSet, List<String> generatedColumns, JdbcModel model) { try { if(resultSet.next()) { EntityReflector entityReflector = getMapperReflector().getEntityReflector(model.getClass()); for(String name : generatedColumns) { PropertyReflector propertyReflector = entityReflector.getPropertyReflector(name, getMapperType(), getMapperReflector()); if(propertyReflector != null) { try { Field field = propertyReflector.getField(); Object value = resultSet.getValue(1, field.getType()); field.set(model, value); } catch(Exception e) { throw new JdbcException(e.getMessage()); } } } } } catch(Exception e) { throw new JdbcException(e.getMessage()); } } public Wheres toIdWheres(Class<?> klass, Object...values) { Wheres wheres = newWheres(); if(values != null) { EntityReflector entityReflector = getMapperReflector().getEntityReflector(klass); int i = 0; for(PropertyReflector idReflector : entityReflector.getIdReflectors(getMapperType())) { String propertyName = idReflector.getPropertyName(getMapperType()); wheres.andWhere(newWhere().setOpType(OP_EQ).setName(propertyName).addValues(values[i++])); } } return wheres; } public Wheres toIdWheres(JdbcModel model) { Wheres wheres = newWheres(); EntityReflector entityReflector = getMapperReflector().getEntityReflector(model.getClass()); for(PropertyReflector idReflector : entityReflector.getIdReflectors(getMapperType())) { try { String propertyName = idReflector.getPropertyName(getMapperType()); String fieldName = idReflector.getFieldName(); Field field = idReflector.getField(); Object value = null; if(model.getDirtyIdMap().containsKey(fieldName)) { value = model.getDirtyIdMap().get(fieldName); } else { value = field.get(model); } if(value != null) { wheres.andWhere(newWhere().setOpType(OP_EQ).setName(propertyName).addValues(value)); } } catch(Exception e) { throw new JdbcException(e.getMessage()); } } return wheres; } public Select toSelect(JdbcModel model) { EntityReflector entityReflector = getMapperReflector().getEntityReflector(model.getClass()); Wheres wheres = toIdWheres(model); if(!wheres.isEmpty()) { return newSelect().from(entityReflector.getEntityName()).wheres(wheres); } return null; } public Insert toInsert(JdbcModel model) { EntityReflector entityReflector = getMapperReflector().getEntityReflector(model.getClass()); Map<String, Object> nameValues = new Map<String, Object>(); for(PropertyReflector propertyReflector : entityReflector.getPropertyReflectors(getMapperType())) { String column = propertyReflector.getPropertyName(getMapperType()); Field field = propertyReflector.getField(); try { Object value = field.get(model); nameValues.put(column, value); } catch(Exception e) { throw new JdbcException(e.getMessage()); } } if(!nameValues.isEmpty()) { return newInsert().setTable(entityReflector.getEntityName()).setNameValues(nameValues); } return null; } public Update toUpdate(JdbcModel model) { EntityReflector entityReflector = getMapperReflector().getEntityReflector(model.getClass()); Map<String, Object> nameValues = new Map<String, Object>(); if(model.isDirty()) { for(String name : model.getDirtyNames()) { PropertyReflector propertyReflector = entityReflector.getFieldReflector(name, getMapperType(), getMapperReflector()); if(propertyReflector != null) { String column = propertyReflector.getPropertyName(getMapperType()); Field field = propertyReflector.getField(); try { Object value = field.get(model); nameValues.put(column, value); } catch(Exception e) { throw new JdbcException(e.getMessage()); } } } } Wheres wheres = null; if(!nameValues.isEmpty() && !(wheres = toIdWheres(model)).isEmpty()) { return newUpdate().setTable(entityReflector.getEntityName()).setNameValues(nameValues).wheres(wheres); } return null; } public Delete toDelete(JdbcModel model) { EntityReflector entityReflector = getMapperReflector().getEntityReflector(model.getClass()); Wheres wheres = toIdWheres(model); if(!wheres.isEmpty()) { return newDelete().setTable(entityReflector.getEntityName()).wheres(wheres); } return null; } @SuppressWarnings("unchecked") public <T> void fromDbWithPause(JdbcResultSet resultSet, Callback<T> callback, int pauseCount, long delayInMilliseconds) { try { Class<T> klass = callback.getKlass(); ResultSetMetaData metaData = resultSet.getMetaData(); EntityReflector entityReflector = getMapperReflector().getEntityReflector(klass); int count = 0; if(entityReflector != null) { while(resultSet.next()) { T model = fromDb(resultSet, klass, metaData, entityReflector); if(model != null) { callback.call(model); count++; } if(count % pauseCount == 0) { try { Thread.sleep(delayInMilliseconds); } catch(Exception ex) { } } } } else { while(resultSet.next()) { Object value = resultSet.getValue(1, klass); if(value != null && value.getClass().isAssignableFrom(klass)) { callback.call((T) value); count++; if(count % pauseCount == 0) { try { Thread.sleep(delayInMilliseconds); } catch(Exception ex) { } } } } } } catch(Exception e) { throw new JdbcException(e.getMessage()); } } @SuppressWarnings("unchecked") public <T> void fromDb(JdbcResultSet resultSet, Callback<T> callback) { try { Class<T> klass = callback.getKlass(); ResultSetMetaData metaData = resultSet.getMetaData(); EntityReflector entityReflector = getMapperReflector().getEntityReflector(klass); if(entityReflector != null) { while(resultSet.next()) { T model = fromDb(resultSet, klass, metaData, entityReflector); if(model != null) { callback.call(model); } } } else { while(resultSet.next()) { Object value = resultSet.getValue(1, klass); if(value != null && value.getClass().isAssignableFrom(klass)) { callback.call((T) value); } } } } catch(Exception e) { throw new JdbcException(e.getMessage()); } } @SuppressWarnings("unchecked") public <T> List<T> fromDb(JdbcResultSet resultSet, Class<T> klass) { List<T> models = new List<T>(); try { ResultSetMetaData metaData = resultSet.getMetaData(); EntityReflector entityReflector = getMapperReflector().getEntityReflector(klass); if(entityReflector != null) { while(resultSet.next()) { T model = fromDb(resultSet, klass, metaData, entityReflector); if(model != null) { models.add(model); } } } else { while(resultSet.next()) { Object value = resultSet.getValue(1, klass); if(value != null && value.getClass().isAssignableFrom(klass)) { models.add((T) value); } } } } catch(Exception e) { throw new JdbcException(e.getMessage()); } return models; } public <T> T fromDb(JdbcResultSet resultSet, Class<T> klass, ResultSetMetaData metaData, EntityReflector entityReflector) { T model = null; try { Constructor<T> constructor = null; constructor = klass.getDeclaredConstructor(); constructor.setAccessible(true); model = constructor.newInstance(); if(model instanceof JdbcModel) { ((JdbcModel) model).setDb(this).persisted(); } for(int i = 1; i <= metaData.getColumnCount(); i++) { String columnLabel = metaData.getColumnLabel(i).toUpperCase(); PropertyReflector propertyReflector = entityReflector.getPropertyReflector(columnLabel, getMapperType(), getMapperReflector()); if(propertyReflector != null) { Field field = propertyReflector.getField(); Object value = resultSet.getValue(columnLabel, field.getType()); field.set(model, value); } } if(model instanceof Init) { ((Init) model).init(); } } catch(Exception e) { throw new JdbcException(e.getMessage()); } return model; } public List<Map<String, Object>> fromDb(JdbcResultSet resultSet) { List<Map<String, Object>> dataMaps = new List<Map<String, Object>>(); try { while(resultSet.next()) { Map<String, Object> dataMap = new Map<String, Object>(); try { ResultSetMetaData metaData = resultSet.getMetaData(); for(int i = 1; i <= metaData.getColumnCount(); i++) { String columnLabel = metaData.getColumnLabel(i); dataMap.put(columnLabel, resultSet.getObject(columnLabel)); } } catch(Exception e) { throw new JdbcException(e.getMessage()); } if(!dataMap.isEmpty()) { dataMaps.add(dataMap); } } } catch(Exception e) { throw new JdbcException(e.getMessage()); } return dataMaps; } public JdbcPreparedStatement prepareStatement(JdbcConnection connection, Sql sql) throws SQLException { return prepareStatement(connection, sql.toString(), sql.getValues()); } public JdbcPreparedStatement prepareStatement(JdbcConnection connection, Sql sql, List<String> generated) throws SQLException { return prepareStatement(connection, sql.toString(), sql.getValues(), generated); } public JdbcPreparedStatement prepareStatement(JdbcConnection connection, String sql, Collection<Object> values) throws SQLException { return prepareStatement(connection, sql, values, null); } public JdbcPreparedStatement prepareStatement(JdbcConnection connection, String sql, Collection<Object> values, List<String> generatedColumns) throws SQLException { JdbcPreparedStatement preparedStatement = null; if(generatedColumns != null && !generatedColumns.isEmpty()) { preparedStatement = connection.prepareStatement(sql, generatedColumns.toArray(new String[0])); } else { preparedStatement = connection.prepareStatement(sql); } preparedStatement = setValues(preparedStatement, values); if(hasLogWriter() && sql != null) { debugSql(sql, values, preparedStatement); } return preparedStatement; } protected void debugSql(String sql, Collection<Object> values) { debugSql(sql, values, null); } protected void debugSql(String sql, Collection<Object> values, JdbcPreparedStatement preparedStatement) { getLogWriter().println(); try { if(values != null && !values.isEmpty()) { getLogWriter().println(String.format(sql.replaceAll("\\?", "%s"), serializeValues(values))); } else { getLogWriter().println(sql); } } catch(Exception e) { if(preparedStatement != null) { getLogWriter().println(preparedStatement.toString()); } } } protected Object[] serializeValues(Collection<Object> values) { List<String> serializedValues = new List<String>().setAllowNull(true); for(Object value : values) { if(value == null) { serializedValues.add(null); } else if(value instanceof String) { serializedValues.add(SINGLE_QUOTE + (String) value + SINGLE_QUOTE); } else if(value instanceof Enum) { serializedValues.add(SINGLE_QUOTE + value.toString() + SINGLE_QUOTE); } else if(value instanceof Number) { serializedValues.add(value.toString()); } else if(value instanceof Boolean) { serializedValues.add(value.toString()); } else if(value instanceof byte[]) { serializedValues.add("''"); } else if(value instanceof Blob) { serializedValues.add("''"); } else if(value instanceof Clob) { serializedValues.add("''"); } else if(value instanceof InputStream) { serializedValues.add("''"); } else if(value instanceof Reader) { serializedValues.add("''"); } else { if(value instanceof Year || value instanceof Month || value instanceof Day) { value = ((roth.lib.java.time.Time) value).toSqlDate(); } else if(value instanceof roth.lib.java.time.Time) { value = ((roth.lib.java.time.Time) value).toSqlTimestamp(); } else if(value instanceof Calendar) { value = new Timestamp(((Calendar) value).getTimeInMillis()); } if(value instanceof java.sql.Date) { serializedValues.add(SINGLE_QUOTE + new SimpleDateFormat(Day.DEFAULT_PATTERN).format((java.sql.Date) value) + SINGLE_QUOTE); } else if(value instanceof java.sql.Time) { serializedValues.add(SINGLE_QUOTE + new SimpleDateFormat("HH:mm:ss.SSS").format((java.sql.Time) value) + SINGLE_QUOTE); } else if(value instanceof java.sql.Timestamp) { serializedValues.add(SINGLE_QUOTE + new SimpleDateFormat(Millisecond.DEFAULT_PATTERN).format((java.sql.Timestamp) value) + SINGLE_QUOTE); } else if(value instanceof java.util.Date) { serializedValues.add(SINGLE_QUOTE + new SimpleDateFormat(Millisecond.DEFAULT_PATTERN).format((java.util.Date) value) + SINGLE_QUOTE); } else { serializedValues.add("''"); } } } return serializedValues.toArray(new String[0]); } public JdbcPreparedStatement setValues(JdbcPreparedStatement preparedStatement, Collection<Object> values) throws SQLException { if(values != null) { int i = 1; for(Object value : values) { preparedStatement.setObject(i++, value); } } return preparedStatement; } public JdbcPreparedStatement setValues(String sql, JdbcPreparedStatement preparedStatement, Collection<Object> values) throws SQLException { if(values != null) { int i = 1; for(Object value : values) { preparedStatement.setObject(i++, value); } } if(hasLogWriter() && sql != null) { debugSql(sql, values, preparedStatement); } return preparedStatement; } public String table(Class<?> klass) { String table = null; EntityReflector entityReflector = getMapperReflector().getEntityReflector(klass); if(entityReflector != null) { table = entityReflector.getEntityName(); if(table == null) { throw new JdbcException("Entity name not found"); } } else { throw new JdbcException("Entity annotation not found"); } return table; } @SuppressWarnings("unchecked") public <T extends JdbcModel> T query(T model) { Select select = toSelect(model); if(select != null) { return query(select, (Class<T>) model.getClass()); } return null; } public <T> T query(Select select, Class<T> klass) { select.limit(1); return query(select.toString(), select.getValues(), klass); } public <T> T query(String sql, Class<T> klass) { return query(sql, (Collection<Object>) null, klass); } public <T> T query(String sql, Map<String, Object> valueMap, Class<T> klass) { List<T> models = queryAll(sql, valueMap, klass); return !models.isEmpty() ? models.get(0) : null; } public <T> T query(String sql, Collection<Object> values, Class<T> klass) { List<T> models = queryAll(sql, values, klass); return !models.isEmpty() ? models.get(0) : null; } public <T> T query(Select select, Class<T> klass, JdbcConnection connection) throws SQLException { select.limit(1); return query(select.toString(), select.getValues(), klass, connection); } public <T> T query(String sql, Class<T> klass, JdbcConnection connection) throws SQLException { return query(sql, (Collection<Object>) null, klass, connection); } public <T> T query(String sql, Map<String, Object> valueMap, Class<T> klass, JdbcConnection connection) throws SQLException { List<T> models = queryAll(sql, valueMap, klass, connection); return !models.isEmpty() ? models.get(0) : null; } public <T> T query(String sql, Collection<Object> values, Class<T> klass, JdbcConnection connection) throws SQLException { List<T> models = queryAll(sql, values, klass, connection); return !models.isEmpty() ? models.get(0) : null; } public <T> List<T> queryAll(Select select, Class<T> klass) { return queryAll(select.toString(), select.getValues(), klass); } public <T> List<T> queryAll(String sql, Class<T> klass) { return queryAll(sql, (Collection<Object>) null, klass); } public <T> List<T> queryAll(String sql, Map<String, Object> valueMap, Class<T> klass) { List<T> models = new List<T>(); try(JdbcConnection connection = getConnection()) { models = queryAll(sql, valueMap, klass, connection); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } return models; } public <T> List<T> queryAll(String sql, Collection<Object> values, Class<T> klass) { List<T> models = new List<T>(); try(JdbcConnection connection = getConnection()) { models = queryAll(sql, values, klass, connection); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } return models; } public <T> List<T> queryAll(Select select, Class<T> klass, JdbcConnection connection) throws SQLException { return queryAll(select.toString(), select.getValues(), klass, connection); } public <T> List<T> queryAll(String sql, Class<T> klass, JdbcConnection connection) throws SQLException { return queryAll(sql, (Collection<Object>) null, klass, connection); } public <T> List<T> queryAll(String sql, Map<String, Object> valueMap, Class<T> klass, JdbcConnection connection) throws SQLException { JdbcNamedQuery namedQuery = namedQuery(sql, valueMap); return queryAll(namedQuery.getSql(), namedQuery.getValues(), klass, connection); } public <T> List<T> queryAll(String sql, Collection<Object> values, Class<T> klass, JdbcConnection connection) throws SQLException { try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values)) { try(JdbcResultSet resultSet = preparedStatement.executeQuery()) { List<T> result = fromDb(resultSet, klass); return result; } } } public <T> void queryAll(Select select, Callback<T> callback) { queryAll(select.toString(), select.getValues(), callback); } public <T> void queryAll(String sql, Callback<T> callback) { queryAll(sql, (Collection<Object>) null, callback); } public <T> void queryAll(String sql, Map<String, Object> valueMap, Callback<T> callback) { try(JdbcConnection connection = getConnection()) { queryAll(sql, valueMap, callback, connection); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } } public <T> void queryAll(String sql, Collection<Object> values, Callback<T> callback) { try(JdbcConnection connection = getConnection()) { queryAll(sql, values, callback, connection); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } } public <T> void queryAll(Select select, Callback<T> callback, JdbcConnection connection) throws SQLException { queryAll(select.toString(), select.getValues(), callback, connection); } public <T> void queryAllWithPause(Select select, Callback<T> callback, JdbcConnection connection, int pauseCount, long delayInMilliseconds) throws SQLException { try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, select.toString(), select.getValues())) { try(JdbcResultSet resultSet = preparedStatement.executeQuery()) { fromDbWithPause(resultSet, callback, pauseCount, delayInMilliseconds); } } } public <T> void queryAllWithPause(Select select, Callback<T> callback, int pauseCount, long delayInMilliseconds) { try(JdbcConnection connection = getConnection()) { queryAllWithPause(select, callback, connection, pauseCount, delayInMilliseconds); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } } public <T> void queryAll(String sql, Callback<T> callback, JdbcConnection connection) throws SQLException { queryAll(sql, (Collection<Object>) null, callback, connection); } public <T> void queryAll(String sql, Map<String, Object> valueMap, Callback<T> callback, JdbcConnection connection) throws SQLException { JdbcNamedQuery namedQuery = namedQuery(sql, valueMap); queryAll(namedQuery.getSql(), namedQuery.getValues(), callback, connection); } public <T> void queryAll(String sql, Collection<Object> values, Callback<T> callback, JdbcConnection connection) throws SQLException { try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values)) { try(JdbcResultSet resultSet = preparedStatement.executeQuery()) { fromDb(resultSet, callback); } } } public Map<String, Object> query(Select select) { select.limit(1); return query(select.toString(), select.getValues()); } public Map<String, Object> query(String sql) { return query(sql, (Collection<Object>) null); } public Map<String, Object> query(String sql, Map<String, Object> valueMap) { List<Map<String, Object>> maps = queryAll(sql, valueMap); return !maps.isEmpty() ? maps.get(0) : null; } public Map<String, Object> query(String sql, Collection<Object> values) { List<Map<String, Object>> maps = queryAll(sql, values); return !maps.isEmpty() ? maps.get(0) : null; } public Map<String, Object> query(Select select, JdbcConnection connection) throws SQLException { select.limit(1); return query(select.toString(), select.getValues(), connection); } public Map<String, Object> query(String sql, JdbcConnection connection) throws SQLException { return query(sql, (Collection<Object>) null, connection); } public Map<String, Object> query(String sql, Map<String, Object> valueMap, JdbcConnection connection) throws SQLException { List<Map<String, Object>> maps = queryAll(sql, valueMap, connection); return !maps.isEmpty() ? maps.get(0) : null; } public Map<String, Object> query(String sql, Collection<Object> values, JdbcConnection connection) throws SQLException { List<Map<String, Object>> maps = queryAll(sql, values, connection); return !maps.isEmpty() ? maps.get(0) : null; } public List<Map<String, Object>> queryAll(Select select) { return queryAll(select.toString(), select.getValues()); } public List<Map<String, Object>> queryAll(String sql, Map<String, Object> valueMap) { List<Map<String, Object>> maps = new List<Map<String, Object>>(); try(JdbcConnection connection = getConnection()) { maps = queryAll(sql, valueMap, connection); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } return maps; } public List<Map<String, Object>> queryAll(String sql, Collection<Object> values) { List<Map<String, Object>> maps = new List<Map<String, Object>>(); try(JdbcConnection connection = getConnection()) { maps = queryAll(sql, values, connection); connection.commit(); } catch(SQLException e) { throw new JdbcException(e); } return maps; } public List<Map<String, Object>> queryAll(Select select, JdbcConnection connection) throws SQLException { return queryAll(select.toString(), select.getValues(), connection); } public List<Map<String, Object>> queryAll(String sql, JdbcConnection connection) throws SQLException { return queryAll(sql, (Collection<Object>) null, connection); } public List<Map<String, Object>> queryAll(String sql, Map<String, Object> valueMap, JdbcConnection connection) throws SQLException { JdbcNamedQuery namedQuery = namedQuery(sql, valueMap); return queryAll(namedQuery.getSql(), namedQuery.getValues(), connection); } public List<Map<String, Object>> queryAll(String sql, Collection<Object> values, JdbcConnection connection) throws SQLException { try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values)) { try(JdbcResultSet resultSet = preparedStatement.executeQuery()) { List<Map<String, Object>> result = fromDb(resultSet); return result; } } } public int executeInsert(JdbcModel model) { Insert insert = toInsert(model); if(insert != null) { return executeInsert(insert, model); } return 0; } public int executeInsert(Insert insert) { return executeInsert(insert, (JdbcModel) null); } public int executeInsert(Insert insert, JdbcModel model) { return executeInsert(insert.toString(), insert.getValues(), model); } public int executeInsert(String sql) { return executeInsert(sql, (Collection<Object>) null); } public int executeInsert(String sql, Collection<Object> values) { return executeInsert(sql, values, (JdbcModel) null); } protected int executeInsert(String sql, Collection<Object> values, JdbcModel model) { return executeInsert(sql, values, model, 0); } protected int executeInsert(String sql, Collection<Object> values, JdbcModel model, int attempt) { int result = 0; try(JdbcConnection connection = getConnection()) { try { result = executeInsert(sql, values, model, connection); connection.commit(); } catch(SQLException e) { connection.rollback(); connection.close(); throw e; } } catch(SQLException e) { if(isDeadLockException(e) && attempt++ < getDeadLockRetries()) { result = executeInsert(sql, values, model, attempt); } else { throw new JdbcException(e); } } return result; } public int executeInsert(JdbcModel model, JdbcConnection connection) throws SQLException { Insert insert = toInsert(model); if(insert != null) { return executeInsert(insert, model, connection); } return 0; } public int executeInsert(Insert insert, JdbcConnection connection) throws SQLException { return executeInsert(insert, (JdbcModel) null, connection); } public int executeInsert(Insert insert, JdbcModel model, JdbcConnection connection) throws SQLException { return executeInsert(insert.toString(), insert.getValues(), model, connection); } public int executeInsert(String sql, JdbcConnection connection) throws SQLException { return executeInsert(sql, (Collection<Object>) null, connection); } public int executeInsert(String sql, Collection<Object> values, JdbcConnection connection) throws SQLException { return executeInsert(sql, values, (JdbcModel) null, connection); } protected int executeInsert(String sql, Collection<Object> values, JdbcModel model, JdbcConnection connection) throws SQLException { int result = 0; boolean retry = true; int attempt = 1; List<String> generatedColumns = new List<String>(); if(model != null) { generatedColumns = getGeneratedColumns(model.getClass()); } try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values, generatedColumns)) { while(retry) { result = preparedStatement.executeUpdate(); if(result > 0) { retry = false; } else if(attempt >= MAX_INSERT_RETRIES) { retry = false; throw new SQLException("Insert failed, no rows inserted " + sql); } else { try { if(values != null && !values.isEmpty()) { System.out.println("Retrying insert " + String.format(sql.replaceAll("\\?", "%s"), serializeValues(values))); } else { System.out.println("Retrying insert " + sql); } Thread.sleep(500); } catch(Exception ex) { } } attempt++; } if(model != null) { model.persisted(); model.resetDirty(); if(!generatedColumns.isEmpty()) { try(JdbcResultSet resultSet = preparedStatement.getGeneratedKeys()) { setGeneratedFields(resultSet, generatedColumns, model); } } } } return result; } public int executeBulkInsert(List<? extends JdbcModel> insertObjects, int batchSize) { return executeBulkInsert(insertObjects, batchSize, 0); } protected int executeBulkInsert(List<? extends JdbcModel> insertObjects, int batchSize, int attempt) { int result = 0; try(JdbcConnection connection = getConnection()) { try { result = executeBulkInsert(insertObjects, connection, batchSize); connection.commit(); } catch(SQLException e) { connection.rollback(); connection.close(); throw e; } } catch(SQLException e) { if(isDeadLockException(e) && attempt++ < getDeadLockRetries()) { result = executeBulkInsert(insertObjects, batchSize, attempt); } else { throw new JdbcException(e); } } return result; } public int executeBulkInsert(List<? extends JdbcModel> insertObjects, JdbcConnection connection, int batchSize) { if (insertObjects == null || insertObjects.size() == 0) { return 0; } if (insertObjects.size() <= batchSize || batchSize == -1) { return executeBulkInsert(insertObjects, connection, 0, insertObjects.size()); } int numBatches = insertObjects.size() / batchSize; int count = 0; if (insertObjects.size() % batchSize > 0) numBatches++; for (int i = 0; i < numBatches; i++) { count += executeBulkInsert(insertObjects, connection, i*batchSize, (i+1)*batchSize); } return count; } private int executeBulkInsert(List<? extends JdbcModel> insertObjects, JdbcConnection connection, int start, int end) { int results = 0; if (end > insertObjects.size()) { end = insertObjects.size(); } try { if (insertObjects.size() == 0) { return 0; } JdbcModel model = insertObjects.get(start); Insert insert = toInsert(model); List<List<Object>> parameters = new List<List<Object>>(); JdbcModel insertObject; for(int i = start; i < end; i++) { insertObject = insertObjects.get(i); if (insertObject.getClass().equals(model.getClass()) == false) { throw new JdbcException("All objects must be same for bulk insert"); } insertObject.preSave(connection); insertObject.preInsert(connection); parameters.add(toInsert(insertObject).getValues()); } String sql = toBulkValuesString(insert.getTable(), insert.getNames(), parameters); if(hasLogWriter() && sql != null) { debugSql(sql, null); } List<String> generatedColumns = getGeneratedColumns(model.getClass()); JdbcPreparedStatement preparedStatement = connection.prepareStatement(sql, generatedColumns.toArray(new String[0])); results = preparedStatement.executeUpdate(); JdbcResultSet resultSet = preparedStatement.getGeneratedKeys(); JdbcModel updatedModel; for(int j = start; j < end; j++) { updatedModel = insertObjects.get(j); updatedModel.postInsert(connection, results); updatedModel.postSave(connection, results); updatedModel.persisted(); updatedModel.resetDirty(); if (resultSet != null && generatedColumns.size() > 0) { setGeneratedFields(resultSet, generatedColumns, updatedModel); } } preparedStatement.close(); } catch(SQLException e) { throw new JdbcException(e); } return results; } public String toBulkValuesString(String table, List<String> names, List<List<Object>> bulkValues) { String baseParams = "(" + Sql.param(names.size()) + ")"; StringBuilder builder = new StringBuilder(); builder.append(INSERT + Sql.tick(table) + " (" + Sql.tick(names) + ")"); builder.append(LF + VALUES ); boolean firstTime = true; for (List<Object> sublist : bulkValues) { if (!firstTime) { builder.append(", "); } builder.append(LF + String.format(baseParams.replaceAll("\\?", "%s"), serializeValues(sublist))); firstTime = false; } builder.append(END); return builder.toString(); } public int executeBulkInsert(String table, String fieldName, List<? extends Object> values, int batchSize) { return executeBulkInsert(table, fieldName, values, batchSize, 0); } protected int executeBulkInsert(String table, String fieldName, List<? extends Object> values, int batchSize, int attempt) { int result = 0; try(JdbcConnection connection = getConnection()) { try { result = executeBulkInsert(connection, table, fieldName, values, batchSize); connection.commit(); } catch(SQLException e) { connection.rollback(); connection.close(); throw e; } } catch(SQLException e) { if(isDeadLockException(e) && attempt++ < getDeadLockRetries()) { result = executeBulkInsert(table, fieldName, values, batchSize, attempt); } else { throw new JdbcException(e); } } return result; } public int executeBulkInsert(JdbcConnection connection, String table, String fieldName, List<? extends Object> values, int batchSize) { if (values == null || values.size() == 0) { return 0; } List<String> names = new List<String>(); names.add(fieldName); List<List<Object>> valueArray = new List<List<Object>>(); for (Object s : values) { List<Object> paramArray = new List<Object>(); paramArray.add(s); valueArray.add(paramArray); } if (valueArray.size() <= batchSize || batchSize == -1) { return executeBulkInsert(connection, table, names, valueArray); } int numBatches = valueArray.size() / batchSize; int count = 0; if (valueArray.size() % batchSize > 0) numBatches++; List<List<Object>> newList; for (int i = 0; i < numBatches; i++) { newList = new List<List<Object>>(); for (int j = i*batchSize; j < (i+1)*batchSize && j < valueArray.size(); j++) { newList.add(valueArray.get(j)); } count += executeBulkInsert(connection, table, names, newList); } return count; } private int executeBulkInsert(JdbcConnection connection, String table, List<String> names, List<List<Object>> values) { int result = 0; try { String sql = toBulkValuesString(table, names, values); if(hasLogWriter() && sql != null) { debugSql(sql, null); } JdbcPreparedStatement preparedStatement = connection.prepareStatement(sql); result = preparedStatement.executeUpdate(); preparedStatement.close(); } catch(SQLException e) { throw new JdbcException(e); } return result; } public int executeUpdate(JdbcModel model) { Update update = toUpdate(model); if(update != null) { return executeUpdate(update, model); } return 0; } public int executeUpdate(Update update) { return executeUpdate(update, (JdbcModel) null); } public int executeUpdate(Update update, JdbcModel model) { return executeUpdate(update.toString(), update.getValues(), model); } public int executeUpdate(String sql) { return executeUpdate(sql, (Collection<Object>) null); } public int executeUpdate(String sql, Collection<Object> values) { return executeUpdate(sql, values, (JdbcModel) null); } protected int executeUpdate(String sql, Collection<Object> values, JdbcModel model) { return executeUpdate(sql, values, model, 0); } protected int executeUpdate(String sql, Collection<Object> values, JdbcModel model, int attempt) { int result = 0; try(JdbcConnection connection = getConnection()) { try { result = executeUpdate(sql, values, model, connection); connection.commit(); } catch(SQLException e) { connection.rollback(); connection.close(); throw e; } } catch(SQLException e) { if(isDeadLockException(e) && attempt++ < getDeadLockRetries()) { result = executeUpdate(sql, values, model, attempt); } else { throw new JdbcException(e); } } return result; } public int executeUpdate(JdbcModel model, JdbcConnection connection) throws SQLException { Update update = toUpdate(model); if(update != null) { return executeUpdate(update, model, connection); } return 0; } public int executeUpdate(Update update, JdbcConnection connection) throws SQLException { return executeUpdate(update, (JdbcModel) null, connection); } public int executeUpdate(Update update, JdbcModel model, JdbcConnection connection) throws SQLException { return executeUpdate(update.toString(), update.getValues(), model, connection); } public int executeUpdate(String sql, JdbcConnection connection) throws SQLException { return executeUpdate(sql, (Collection<Object>) null, connection); } public int executeUpdate(String sql, Collection<Object> values, JdbcConnection connection) throws SQLException { return executeUpdate(sql, values, (JdbcModel) null, connection); } protected int executeUpdate(String sql, Collection<Object> values, JdbcModel model, JdbcConnection connection) throws SQLException { int result = 0; try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values)) { result = preparedStatement.executeUpdate(); if(model != null) { model.persisted(); model.resetDirty(); } } return result; } public int executeDelete(JdbcModel model) { Delete delete = toDelete(model); if(delete != null) { return executeDelete(delete, model); } return 0; } public int executeDelete(Delete delete) { return executeDelete(delete, (JdbcModel) null); } public int executeDelete(Delete delete, JdbcModel model) { return executeDelete(delete.toString(), delete.getValues(), model); } public int executeDelete(String sql) { return executeDelete(sql, (Collection<Object>) null); } public int executeDelete(String sql, Collection<Object> values) { return executeDelete(sql, values, (JdbcModel) null); } protected int executeDelete(String sql, Collection<Object> values, JdbcModel model) { return executeDelete(sql, values, model, 0); } protected int executeDelete(String sql, Collection<Object> values, JdbcModel model, int attempt) { int result = 0; try(JdbcConnection connection = getConnection()) { try { result = executeDelete(sql, values, model, connection); connection.commit(); } catch(SQLException e) { connection.rollback(); connection.close(); throw e; } } catch(SQLException e) { if(isDeadLockException(e) && attempt++ < getDeadLockRetries()) { result = executeDelete(sql, values, model, attempt); } else { throw new JdbcException(e); } } return result; } public int executeDelete(JdbcModel model, JdbcConnection connection) throws SQLException { Delete delete = toDelete(model); if(delete != null) { return executeDelete(delete, connection); } return 0; } public int executeDelete(Delete delete, JdbcConnection connection) throws SQLException { return executeDelete(delete, (JdbcModel) null, connection); } public int executeDelete(Delete delete, JdbcModel model, JdbcConnection connection) throws SQLException { return executeDelete(delete.toString(), delete.getValues(), model, connection); } public int executeDelete(String sql, JdbcConnection connection) throws SQLException { return executeDelete(sql, (Collection<Object>) null, connection); } public int executeDelete(String sql, Collection<Object> values, JdbcConnection connection) throws SQLException { return executeDelete(sql, values, (JdbcModel) null, connection); } protected int executeDelete(String sql, Collection<Object> values, JdbcModel model, JdbcConnection connection) throws SQLException { int result = 0; try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values)) { result = preparedStatement.executeUpdate(); if(model != null) { model.deleted(); model.resetDirty(); } } return result; } public int execute(String sql) { return execute(sql, (Collection<Object>) null); } public int execute(String sql, Collection<Object> values) { return execute(sql, values, 0); } protected int execute(String sql, Collection<Object> values, int attempt) { int result = 0; try(JdbcConnection connection = getConnection()) { try { result = execute(sql, values, connection); connection.commit(); } catch(SQLException e) { connection.rollback(); connection.close(); throw e; } } catch(SQLException e) { if(isDeadLockException(e) && attempt++ < getDeadLockRetries()) { result = execute(sql, values, attempt); } else { throw new JdbcException(e); } } return result; } protected int execute(String sql, JdbcConnection connection) throws SQLException { return execute(sql, (Collection<Object>) null, connection); } protected int execute(String sql, Collection<Object> values, JdbcConnection connection) throws SQLException { int result = 0; try(JdbcPreparedStatement preparedStatement = prepareStatement(connection, sql, values)) { result = preparedStatement.executeUpdate(); } return result; } public void close() { if(this.connectionPool != null) { this.connectionPool.close(); } } public JdbcNamedQuery namedQuery(String sql, Map<String, Object> valueMap) { return new JdbcNamedQuery(sql, valueMap); } }
package datawave.ingest.data; import org.apache.hadoop.conf.Configuration; import datawave.ingest.data.config.ingest.IgnorableErrorHelperInterface; public class TestIgnorableHelper implements IgnorableErrorHelperInterface { public void setup(Configuration conf) {} /** * @return true if the event is missing the UUID. */ @Override public boolean isIgnorableFatalError(RawRecordContainer e, String err) { if (err.equals(RawDataErrorNames.UUID_MISSING)) { return true; } return false; } }
package com.romanceabroad.ui; import org.openqa.selenium.WebElement; import org.testng.Assert; import org.testng.annotations.Test; import org.testng.asserts.SoftAssert; import java.util.List; public class SearchTests extends BaseUI { String linkSearch; String titleSearch; String urlSearch; public static final boolean testCase11 = true; public static final boolean testCase12 = true; public static final boolean testCase13 = true; @Test(priority = 2, enabled = testCase12, groups = {"ie"}) public void testSearchPageTestCase12() { SoftAssert softAssert = new SoftAssert(); linkSearch = mainPage.verifyLinkSearch(); softAssert.assertEquals(linkSearch, Data.expectedLinkTextSearch, "Wrong Link Text!"); softAssert.assertTrue(driver.findElement(Locators.LINK_SEARCH).isDisplayed(), "Element is not displayed"); mainPage.clickLinkSearch(); titleSearch = searchPage.verifyTitleSearch(); softAssert.assertEquals(titleSearch, Data.expectedPageTitleSearch, "Wrong title!"); urlSearch = searchPage.verifyUrlSearch(); softAssert.assertEquals(urlSearch, Data.expectedUrlSearch, "Wrong URL!"); // searchPage.selectRandomMinAge(); // searchPage.selectRandomMaxAge(); // searchPage.selectRandomSortBy(); // searchPage.testLinksOnSearchPage(); searchPage.selectAge(); searchPage.sortingCriteria(); searchPage.sortASC(); searchPage.sortDESC(); searchPage.clickListViewLink(); searchPage.clickGalleryViewLink(); searchPage.clickNextPageSortBlock(); searchPage.clickPreviousPageSortBlock(); searchPage.clickLastPagePagination(); searchPage.clickFirstPagePagination(); searchPage.clickNextPagePagination(); searchPage.clickPreviousPagePagination(); System.out.println("testCase12"); System.out.println("priority 2"); softAssert.assertAll(); } @Test(dataProvider = "Search", dataProviderClass = DataProviders.class, priority = 3, enabled = testCase13, groups = {"user", "admin"}) public void searchDifferentResults13(String minAge, String maxAge, String sortBy) { System.out.println("testCase13"); System.out.println("priority 3"); int min = Integer.parseInt(minAge); int max = Integer.parseInt(maxAge); System.out.println(min); System.out.println(max); mainPage.clickLinkSearch(); searchPage.getDropDownListByText(driver.findElement(Locators.DROP_DOWN_LIST_AGE_MIN), minAge); searchPage.getDropDownListByText(driver.findElement(Locators.DROP_DOWN_LIST_AGE_MAX), maxAge); // searchPage.clickMainSearchButton(); searchPage.getDropDownListByText(driver.findElement(Locators.DROP_DOWN_LIST_SORT_BY), sortBy); searchPage.clickMainSearchButton(); List<WebElement> infoAboutUser = driver.findElements(Locators.TEXT_USER_INFO); System.out.println("Number of users: " + infoAboutUser.size() / 2); for (int i = 0; i < infoAboutUser.size(); i++) { if (i % 2 == 0) { WebElement text = infoAboutUser.get(i); String info = text.getText(); String[] splittedPhrase = info.split(", "); String age = splittedPhrase[1]; int ageNum = Integer.parseInt(age); if (min <= ageNum || ageNum <= max) { System.out.println("This age: " + ageNum + " is correct."); } else { Assert.fail("Wrong age: " + ageNum); } } mainPage.javaWaitSec(3); infoAboutUser = driver.findElements(Locators.TEXT_USER_INFO); } } @Test(priority = 1, enabled = testCase11, groups = {"user", "admin"}) public void selectRandomDropDownListOnSearchPageTestCase11() { mainPage.clickLinkSearch(); int sizeOfDropDownListSortBy = searchPage.getSizeDropDownList(Locators.DROP_DOWN_LIST_SORT_BY); System.out.println(sizeOfDropDownListSortBy); for (int i = 0; i < sizeOfDropDownListSortBy; i++) { System.out.print(i + 1 + ". "); searchPage.selectItemDropDownRandomOption(Locators.DROP_DOWN_LIST_SORT_BY, "Sort by"); mainPage.javaWaitSec(2);// } System.out.println("testCase11"); System.out.println("priority 1"); } @Test public void testSplit() { String info = "Name, 23"; String[] splittedPhrase = info.split(", "); String age = splittedPhrase[1]; System.out.println(age); } }
/* * The MIT License * * Copyright 2018 Thibault Debatty. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package yellow.mongo.proxy.element; /** * * @author Thibault Debatty */ public class DummyElement extends Element<Object> { /** * * @param type type of Element * @param name name of Element */ public DummyElement(final int type, final String name) { super(type, name); } /** * * @return false using to compare if the class object return a String value. */ public boolean isString() { return false; } /** * * @return null value. */ @Override public Object value() { return null; } }
package com.nisshoku.springpetsclinic; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit.jupiter.SpringExtension; @ExtendWith(SpringExtension.class) @SpringBootTest() public class SpringPetClinicApplicationTests { @Test public void contextLoads() { } }
package org.wikiup.core.inf.ext; import org.wikiup.core.inf.Dictionary; public interface ClassDictionary extends Dictionary<Class> { }
/** * The MIT License * * Copyright (c) 2009-2019 PrimeTek * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.primefaces.component.export; public class ExcelOptions implements ExporterOptions { private String facetFontStyle; private String facetFontColor; private String facetBgColor; private String facetFontSize; private String cellFontStyle; private String cellFontColor; private String cellFontSize; private String fontName; private boolean autoSizeColumn = true; public ExcelOptions() { } public ExcelOptions(String facetFontStyle, String facetFontColor, String facetBgColor, String facetFontSize) { this.facetFontStyle = facetFontStyle; this.facetFontColor = facetFontColor; this.facetBgColor = facetBgColor; this.facetFontSize = facetFontSize; } public ExcelOptions(String cellFontStyle, String cellFontColor, String cellFontSize) { this.cellFontStyle = cellFontStyle; this.cellFontColor = cellFontColor; this.cellFontSize = cellFontSize; } public ExcelOptions(String facetFontStyle, String facetFontColor, String facetBgColor, String facetFontSize, String cellFontStyle, String cellFontColor, String cellFontSize) { this(facetFontStyle, facetFontColor, facetBgColor, facetFontSize); this.cellFontStyle = cellFontStyle; this.cellFontColor = cellFontColor; this.cellFontSize = cellFontSize; } @Override public String getFacetFontStyle() { return facetFontStyle; } public void setFacetFontStyle(String facetFontStyle) { this.facetFontStyle = facetFontStyle; } @Override public String getFacetFontColor() { return facetFontColor; } public void setFacetFontColor(String facetFontColor) { this.facetFontColor = facetFontColor; } @Override public String getFacetBgColor() { return facetBgColor; } public void setFacetBgColor(String facetBgColor) { this.facetBgColor = facetBgColor; } @Override public String getFacetFontSize() { return facetFontSize; } public void setFacetFontSize(String facetFontSize) { this.facetFontSize = facetFontSize; } @Override public String getCellFontStyle() { return cellFontStyle; } public void setCellFontStyle(String cellFontStyle) { this.cellFontStyle = cellFontStyle; } @Override public String getCellFontColor() { return cellFontColor; } public void setCellFontColor(String cellFontColor) { this.cellFontColor = cellFontColor; } @Override public String getCellFontSize() { return cellFontSize; } public void setCellFontSize(String cellFontSize) { this.cellFontSize = cellFontSize; } @Override public String getFontName() { return fontName; } public void setFontName(String fontName) { this.fontName = fontName; } public boolean isAutoSizeColumn() { return autoSizeColumn; } public void setAutoSizeColumn(boolean autoSizeColumn) { this.autoSizeColumn = autoSizeColumn; } }
package com.huaweicloud.sdk.servicestage.v2.model; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import java.util.function.Consumer; import java.util.Objects; /** * 运行时参数。 */ public class RuntimeTypeView { @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value="type_name") private String typeName; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value="display_name") private String displayName; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value="container_default_port") private Integer containerDefaultPort; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value="type_desc") private String typeDesc; public RuntimeTypeView withTypeName(String typeName) { this.typeName = typeName; return this; } /** * 类型名称。 * @return typeName */ public String getTypeName() { return typeName; } public void setTypeName(String typeName) { this.typeName = typeName; } public RuntimeTypeView withDisplayName(String displayName) { this.displayName = displayName; return this; } /** * 显示名称。 * @return displayName */ public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public RuntimeTypeView withContainerDefaultPort(Integer containerDefaultPort) { this.containerDefaultPort = containerDefaultPort; return this; } /** * 容器默认端口。 * @return containerDefaultPort */ public Integer getContainerDefaultPort() { return containerDefaultPort; } public void setContainerDefaultPort(Integer containerDefaultPort) { this.containerDefaultPort = containerDefaultPort; } public RuntimeTypeView withTypeDesc(String typeDesc) { this.typeDesc = typeDesc; return this; } /** * 类型描述。 * @return typeDesc */ public String getTypeDesc() { return typeDesc; } public void setTypeDesc(String typeDesc) { this.typeDesc = typeDesc; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } RuntimeTypeView runtimeTypeView = (RuntimeTypeView) o; return Objects.equals(this.typeName, runtimeTypeView.typeName) && Objects.equals(this.displayName, runtimeTypeView.displayName) && Objects.equals(this.containerDefaultPort, runtimeTypeView.containerDefaultPort) && Objects.equals(this.typeDesc, runtimeTypeView.typeDesc); } @Override public int hashCode() { return Objects.hash(typeName, displayName, containerDefaultPort, typeDesc); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RuntimeTypeView {\n"); sb.append(" typeName: ").append(toIndentedString(typeName)).append("\n"); sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); sb.append(" containerDefaultPort: ").append(toIndentedString(containerDefaultPort)).append("\n"); sb.append(" typeDesc: ").append(toIndentedString(typeDesc)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.frauddetector.model; import javax.annotation.Generated; /** * */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum RuleExecutionMode { ALL_MATCHED("ALL_MATCHED"), FIRST_MATCHED("FIRST_MATCHED"); private String value; private RuleExecutionMode(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return RuleExecutionMode corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static RuleExecutionMode fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (RuleExecutionMode enumEntry : RuleExecutionMode.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
package module8packageJava0; import java.lang.Integer; public class Foo19 { Integer int0; Integer int1; public void foo0() { new module8packageJava0.Foo18().foo6(); } public void foo1() { foo0(); } public void foo2() { foo1(); } public void foo3() { foo2(); } public void foo4() { foo3(); } public void foo5() { foo4(); } public void foo6() { foo5(); } }
package org.apache.spark.deploy; public class OptionAssigner$ extends scala.runtime.AbstractFunction5<java.lang.String, java.lang.Object, java.lang.Object, java.lang.String, java.lang.String, org.apache.spark.deploy.OptionAssigner> implements scala.Serializable { /** * Static reference to the singleton instance of this Scala object. */ public static final OptionAssigner$ MODULE$ = null; public OptionAssigner$ () { throw new RuntimeException(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.tugraz.sysds.test.integration.functions.parfor; import java.util.HashMap; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.tugraz.sysds.common.Types.ExecMode; import org.tugraz.sysds.hops.OptimizerUtils; import org.tugraz.sysds.lops.LopProperties.ExecType; import org.tugraz.sysds.runtime.controlprogram.ParForProgramBlock; import org.tugraz.sysds.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer; import org.tugraz.sysds.runtime.matrix.data.MatrixValue.CellIndex; import org.tugraz.sysds.test.AutomatedTestBase; import org.tugraz.sysds.test.TestConfiguration; import org.tugraz.sysds.test.TestUtils; import org.tugraz.sysds.utils.Statistics; public class ParForRepeatedOptimizationTest extends AutomatedTestBase { private final static String TEST_NAME1 = "parfor_repeatedopt1"; private final static String TEST_NAME2 = "parfor_repeatedopt2"; private final static String TEST_NAME3 = "parfor_repeatedopt3"; private final static String TEST_DIR = "functions/parfor/"; private final static String TEST_CLASS_DIR = TEST_DIR + ParForRepeatedOptimizationTest.class.getSimpleName() + "/"; private final static double eps = 1e-8; private final static int rows = 1000000; private final static int cols = 10; private final static double sparsity = 0.7; @Override public void setUp() { addTestConfiguration( TEST_NAME1, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[]{"R"}) ); addTestConfiguration( TEST_NAME2, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME2, new String[]{"R"}) ); addTestConfiguration( TEST_NAME3, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME3, new String[]{"R"}) ); if (TEST_CACHE_ENABLED) { setOutAndExpectedDeletionDisabled(true); } } @BeforeClass public static void init() { TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR); } @AfterClass public static void cleanUp() { if (TEST_CACHE_ENABLED) { TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR); } } @Test public void testParForRepeatedOptNoReuseNoUpdateCP() { int numExpectedMRJobs = 1+3; //reblock, 3*partition runParForRepeatedOptTest( false, false, false, ExecType.CP, numExpectedMRJobs ); } @Test public void testParForRepeatedOptNoReuseUpdateCP() { int numExpectedMRJobs = 1+3+2; //reblock, 3*partition, 2*GMR (previously 3GMR, now 1GMR removed on V*1) runParForRepeatedOptTest( false, true, false, ExecType.CP, numExpectedMRJobs ); } @Test public void testParForRepeatedOptNoReuseChangedDimCP() { int numExpectedMRJobs = 1+3+3; //reblock, 3*partition, 3*GMR runParForRepeatedOptTest( false, false, true, ExecType.CP, numExpectedMRJobs ); } @Test public void testParForRepeatedOptReuseNoUpdateCP() { int numExpectedMRJobs = 1+1; //reblock, partition runParForRepeatedOptTest( true, false, false, ExecType.CP, numExpectedMRJobs ); } @Test public void testParForRepeatedOptReuseUpdateCP() { int numExpectedMRJobs = 1+3+2; //reblock, 3*partition, 2*GMR (previously 3GMR, now 1GMR removed on V*1) runParForRepeatedOptTest( true, true, false, ExecType.CP, numExpectedMRJobs ); } @Test public void testParForRepeatedOptReuseChangedDimCP() { int numExpectedMRJobs = 1+3+3; //reblock, 3*partition, 3*GMR runParForRepeatedOptTest( true, false, true, ExecType.CP, numExpectedMRJobs ); } /** * update, refers to changing data * changed dim, refers to changing dimensions and changing parfor predicate * * * @param outer execution mode of outer parfor loop * @param inner execution mode of inner parfor loop * @param instType execution mode of instructions */ private void runParForRepeatedOptTest( boolean reusePartitionedData, boolean update, boolean changedDim, ExecType et, int numExpectedMR ) { ExecMode platformOld = rtplatform; double memfactorOld = OptimizerUtils.MEM_UTIL_FACTOR; boolean reuseOld = ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS; String TEST_NAME = update ? TEST_NAME2 : ( changedDim ? TEST_NAME3 : TEST_NAME1); TestConfiguration config = getTestConfiguration(TEST_NAME); config.addVariable("rows", rows); config.addVariable("cols", cols); String TEST_CACHE_DIR = ""; if (TEST_CACHE_ENABLED) { TEST_CACHE_DIR = TEST_NAME + "/"; } loadTestConfiguration(config, TEST_CACHE_DIR); try { rtplatform = ExecMode.HYBRID; OptimizerUtils.MEM_UTIL_FACTOR = computeMemoryUtilFactor( 70 ); //force partitioning ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS = reusePartitionedData; String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME + ".dml"; programArgs = new String[]{"-stats","-args", input("V"), Integer.toString(rows), Integer.toString(cols), output("R"), Integer.toString((update||changedDim)?1:0)}; fullRScriptName = HOME + TEST_NAME + ".R"; rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir() + " " + Integer.toString((update||changedDim)?1:0); double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, 7); writeInputMatrix("V", V, true); runTest(true, false, null, -1); runRScript(true); Assert.assertEquals("Unexpected number of executed MR jobs.", numExpectedMR, Statistics.getNoOfExecutedMRJobs()); //compare matrices HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R"); HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R"); TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R"); } finally { //reset optimizer flags to pre-test configuration rtplatform = platformOld; OptimizerUtils.MEM_UTIL_FACTOR = memfactorOld; ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS = reuseOld; } } private static double computeMemoryUtilFactor( int mb ) { return Math.min(1, ((double)1024*1024*mb)/InfrastructureAnalyzer.getLocalMaxMemory()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aliyuncs.ecsops.model.v20160401; import com.aliyuncs.AcsResponse; import com.aliyuncs.ecsops.transform.v20160401.OpsCreateSystemEventMetaResponseUnmarshaller; import com.aliyuncs.transform.UnmarshallerContext; /** * @author auto create * @version */ public class OpsCreateSystemEventMetaResponse extends AcsResponse { private String requestId; public String getRequestId() { return this.requestId; } public void setRequestId(String requestId) { this.requestId = requestId; } @Override public OpsCreateSystemEventMetaResponse getInstance(UnmarshallerContext context) { return OpsCreateSystemEventMetaResponseUnmarshaller.unmarshall(this, context); } }
package org.swtk.commons.dict.wordnet.indexbyname.instance.w.a.r; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstanceWAR { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"war\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"00802786\", \"01238861\", \"14004888\", \"00975181\"]}"); add("{\"term\":\"war admiral\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02386564\"]}"); add("{\"term\":\"war advocacy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06232181\"]}"); add("{\"term\":\"war baby\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10786077\"]}"); add("{\"term\":\"war between the states\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01304121\"]}"); add("{\"term\":\"war bride\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10786294\"]}"); add("{\"term\":\"war chest\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13381427\"]}"); add("{\"term\":\"war cloud\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07300983\"]}"); add("{\"term\":\"war correspondent\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10786386\"]}"); add("{\"term\":\"war crime\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00784172\"]}"); add("{\"term\":\"war criminal\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10786567\"]}"); add("{\"term\":\"war cry\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07138112\", \"07167460\"]}"); add("{\"term\":\"war dance\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00544151\"]}"); add("{\"term\":\"war department\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08162100\"]}"); add("{\"term\":\"war game\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00462463\"]}"); add("{\"term\":\"war god\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10787322\"]}"); add("{\"term\":\"war hawk\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10182237\"]}"); add("{\"term\":\"war machine\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08215965\"]}"); add("{\"term\":\"war of american independence\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01304577\"]}"); add("{\"term\":\"war of greek independence\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01312565\"]}"); add("{\"term\":\"war of nerves\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00955521\"]}"); add("{\"term\":\"war of the austrian succession\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01312838\"]}"); add("{\"term\":\"war of the grand alliance\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01313027\"]}"); add("{\"term\":\"war of the league of augsburg\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01313027\"]}"); add("{\"term\":\"war of the roses\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01313536\"]}"); add("{\"term\":\"war of the spanish succession\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01313280\"]}"); add("{\"term\":\"war paint\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"03719774\", \"04559383\", \"04559481\"]}"); add("{\"term\":\"war party\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"08281031\", \"08282833\"]}"); add("{\"term\":\"war power\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13968734\"]}"); add("{\"term\":\"war room\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04559837\"]}"); add("{\"term\":\"war secretary\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00604246\"]}"); add("{\"term\":\"war to end war\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01314011\"]}"); add("{\"term\":\"war vessel\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04559982\"]}"); add("{\"term\":\"war whoop\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07138112\"]}"); add("{\"term\":\"war widow\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10788478\"]}"); add("{\"term\":\"war zone\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08706517\"]}"); add("{\"term\":\"waratah\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"12244086\", \"12244281\"]}"); add("{\"term\":\"warble\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02197909\"]}"); add("{\"term\":\"warble fly\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02197743\"]}"); add("{\"term\":\"warbler\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"01565769\", \"10786160\"]}"); add("{\"term\":\"warburg\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11393545\", \"11393667\"]}"); add("{\"term\":\"ward\", \"synsetCount\":7, \"upperType\":\"NOUN\", \"ids\":[\"02995487\", \"11393892\", \"11394080\", \"11394276\", \"04557205\", \"08690135\", \"10786688\"]}"); add("{\"term\":\"warden\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10786822\"]}"); add("{\"term\":\"wardenship\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00606927\"]}"); add("{\"term\":\"warder\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10786933\"]}"); add("{\"term\":\"wardership\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00607024\"]}"); add("{\"term\":\"wardress\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10787100\"]}"); add("{\"term\":\"wardrobe\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"04557832\", \"04557712\", \"04557470\"]}"); add("{\"term\":\"wardroom\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04557962\"]}"); add("{\"term\":\"ware\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"03754377\", \"04558126\"]}"); add("{\"term\":\"warehouse\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04558341\"]}"); add("{\"term\":\"warehouseman\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10787187\"]}"); add("{\"term\":\"warehouser\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10787187\"]}"); add("{\"term\":\"warehousing\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00373415\"]}"); add("{\"term\":\"warfare\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"01238861\", \"00975181\"]}"); add("{\"term\":\"warfarin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04558491\"]}"); add("{\"term\":\"warhead\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04558661\"]}"); add("{\"term\":\"warhol\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11394443\"]}"); add("{\"term\":\"warhorse\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"02381067\", \"10768791\", \"04558924\"]}"); add("{\"term\":\"wariness\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04671770\"]}"); add("{\"term\":\"warji\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06995980\"]}"); add("{\"term\":\"warlock\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10787430\"]}"); add("{\"term\":\"warlord\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10787505\"]}"); add("{\"term\":\"warlpiri\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06953363\"]}"); add("{\"term\":\"warm front\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11481127\"]}"); add("{\"term\":\"warm up\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01145712\"]}"); add("{\"term\":\"warmer\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03513245\"]}"); add("{\"term\":\"warmheartedness\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07560035\", \"07561649\"]}"); add("{\"term\":\"warming\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11502540\", \"13513079\"]}"); add("{\"term\":\"warming pan\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04559119\"]}"); add("{\"term\":\"warmness\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"05023937\", \"07560035\"]}"); add("{\"term\":\"warmonger\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10335671\"]}"); add("{\"term\":\"warmongering\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06232181\"]}"); add("{\"term\":\"warmth\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"04634357\", \"04635549\", \"05023937\", \"07561649\", \"05733472\"]}"); add("{\"term\":\"warner\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"10787690\", \"11394584\"]}"); add("{\"term\":\"warning\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"07227534\", \"06685042\", \"07238811\"]}"); add("{\"term\":\"warning bell\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04451255\"]}"); add("{\"term\":\"warning coloration\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04986251\"]}"); add("{\"term\":\"warning device\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02697285\"]}"); add("{\"term\":\"warning light\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06887621\"]}"); add("{\"term\":\"warning of attack\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07240811\"]}"); add("{\"term\":\"warning of war\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07240990\"]}"); add("{\"term\":\"warning signal\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06816015\"]}"); add("{\"term\":\"warp\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"04559236\", \"07448702\", \"13908393\", \"14528328\"]}"); add("{\"term\":\"warpath\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00417659\", \"07564326\"]}"); add("{\"term\":\"warping\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07448702\"]}"); add("{\"term\":\"warplane\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04559634\"]}"); add("{\"term\":\"warragal\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"02118293\", \"02384396\"]}"); add("{\"term\":\"warrant\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"06698201\", \"06700103\", \"13439392\", \"06559050\"]}"); add("{\"term\":\"warrant officer\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10788059\"]}"); add("{\"term\":\"warrantee\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"06698201\", \"10787816\", \"10787940\"]}"); add("{\"term\":\"warranter\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10169047\"]}"); add("{\"term\":\"warrantor\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10169047\"]}"); add("{\"term\":\"warranty\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06698201\"]}"); add("{\"term\":\"warren\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"08013040\", \"08696253\", \"09497895\", \"11394784\", \"11394947\"]}"); add("{\"term\":\"warren burger\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10891874\"]}"); add("{\"term\":\"warren earl burger\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10891874\"]}"); add("{\"term\":\"warren gamaliel harding\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11048442\"]}"); add("{\"term\":\"warren harding\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11048442\"]}"); add("{\"term\":\"warrener\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10788173\"]}"); add("{\"term\":\"warrigal\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"02118293\", \"02384396\"]}"); add("{\"term\":\"warrior\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10788253\"]}"); add("{\"term\":\"wars of the roses\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01313536\"]}"); add("{\"term\":\"warsaw\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09005475\"]}"); add("{\"term\":\"warship\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04559982\"]}"); add("{\"term\":\"warszawa\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09005475\"]}"); add("{\"term\":\"wart\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"04703780\", \"14488467\", \"13918545\"]}"); add("{\"term\":\"warthog\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02399748\"]}"); add("{\"term\":\"wartime\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15318571\"]}"); add("{\"term\":\"wartweed\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12939369\"]}"); add("{\"term\":\"wartwort\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12939369\"]}"); add("{\"term\":\"warwick\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11395082\"]}"); } private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } }
package com.owlbyte.spotifystreamer; import android.app.Application; import android.test.ApplicationTestCase; /** * <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a> */ public class ApplicationTest extends ApplicationTestCase<Application> { public ApplicationTest() { super(Application.class); } }
/** * Licensed to ESUP-Portail under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * ESUP-Portail licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.esupportail.sympa.domain.services; import java.util.List; import java.util.Map; import org.esupportail.sympa.domain.model.CreateListInfo; import org.esupportail.sympa.domain.model.UserSympaListWithUrl; import org.esupportail.sympa.domain.services.sympa.AbstractSympaServer; public abstract interface IDomainService { public enum SympaListFields { address, owner, editor, subscriber }; public List<UserSympaListWithUrl> getWhich(); public List<UserSympaListWithUrl> getWhich(List<SympaListCriterion>criterion,boolean mathAll); public List<CreateListInfo> getCreateListInfo(); public Map<String, AbstractSympaServer> getServerList(); public String getHomeUrl(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators.sort; import java.io.File; import java.io.IOException; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.TypeSerializerFactory; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.disk.ChannelReaderInputViewIterator; import org.apache.flink.runtime.io.disk.iomanager.FileIOChannel; import org.apache.flink.runtime.io.disk.iomanager.BlockChannelReader; import org.apache.flink.runtime.io.disk.iomanager.BlockChannelWriter; import org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView; import org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.disk.iomanager.FileIOChannel.ID; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.memory.MemoryAllocationException; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.util.EmptyMutableObjectIterator; import org.apache.flink.util.MutableObjectIterator; /** * The {@link UnilateralSortMerger} is a full fledged sorter. It implements a multi-way merge sort. Internally, * the logic is factored into three threads (read, sort, spill) which communicate through a set of blocking queues, * forming a closed loop. Memory is allocated using the {@link MemoryManager} interface. Thus the component will * not exceed the provided memory limits. */ public class UnilateralSortMerger<E> implements Sorter<E> { // ------------------------------------------------------------------------ // Constants // ------------------------------------------------------------------------ /** Logging. */ private static final Logger LOG = LoggerFactory.getLogger(UnilateralSortMerger.class); /** Fix length records with a length below this threshold will be in-place sorted, if possible. */ private static final int THRESHOLD_FOR_IN_PLACE_SORTING = 32; /** The minimal number of buffers to use by the writers. */ protected static final int MIN_NUM_WRITE_BUFFERS = 2; /** The maximal number of buffers to use by the writers. */ protected static final int MAX_NUM_WRITE_BUFFERS = 4; /** The minimum number of segments that are required for the sort to operate. */ protected static final int MIN_NUM_SORT_MEM_SEGMENTS = 10; // ------------------------------------------------------------------------ // Threads // ------------------------------------------------------------------------ /** The thread that reads the input channels into buffers and passes them on to the merger. */ private final ThreadBase<E> readThread; /** The thread that merges the buffer handed from the reading thread. */ private final ThreadBase<E> sortThread; /** The thread that handles spilling to secondary storage. */ private final ThreadBase<E> spillThread; // ------------------------------------------------------------------------ // Memory // ------------------------------------------------------------------------ /** The memory segments used first for sorting and later for reading/pre-fetching * during the external merge. */ protected final List<MemorySegment> sortReadMemory; /** The memory segments used to stage data to be written. */ protected final List<MemorySegment> writeMemory; /** The memory manager through which memory is allocated and released. */ protected final MemoryManager memoryManager; // ------------------------------------------------------------------------ // Miscellaneous Fields // ------------------------------------------------------------------------ /** * The handler for large records, that do not go though the in-memory sorter as a whole, but * directly go to disk. */ private final LargeRecordHandler<E> largeRecordHandler; /** * Collection of all currently open channels, to be closed and deleted during cleanup. */ private final HashSet<FileIOChannel> openChannels; /** * Collection of all temporary files created and to be removed when closing the sorter. */ private final HashSet<FileIOChannel.ID> channelsToDeleteAtShutdown; /** * The monitor which guards the iterator field. */ protected final Object iteratorLock = new Object(); /** * The iterator to be returned by the sort-merger. This variable is null, while receiving and merging is still in * progress and it will be set once we have &lt; merge factor sorted sub-streams that will then be streamed sorted. */ protected volatile MutableObjectIterator<E> iterator; /** * The exception that is set, if the iterator cannot be created. */ protected volatile IOException iteratorException; /** * Flag indicating that the sorter was closed. */ protected volatile boolean closed; /** * Whether to reuse objects during deserialization. */ protected final boolean objectReuseEnabled; // ------------------------------------------------------------------------ // Constructor & Shutdown // ------------------------------------------------------------------------ public UnilateralSortMerger(MemoryManager memoryManager, IOManager ioManager, MutableObjectIterator<E> input, AbstractInvokable parentTask, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, double memoryFraction, int maxNumFileHandles, float startSpillingFraction, boolean handleLargeRecords, boolean objectReuseEnabled) throws IOException, MemoryAllocationException { this(memoryManager, ioManager, input, parentTask, serializerFactory, comparator, memoryFraction, -1, maxNumFileHandles, startSpillingFraction, handleLargeRecords, objectReuseEnabled); } public UnilateralSortMerger(MemoryManager memoryManager, IOManager ioManager, MutableObjectIterator<E> input, AbstractInvokable parentTask, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, double memoryFraction, int numSortBuffers, int maxNumFileHandles, float startSpillingFraction, boolean handleLargeRecords, boolean objectReuseEnabled) throws IOException, MemoryAllocationException { this(memoryManager, ioManager, input, parentTask, serializerFactory, comparator, memoryFraction, numSortBuffers, maxNumFileHandles, startSpillingFraction, false, handleLargeRecords, objectReuseEnabled); } public UnilateralSortMerger(MemoryManager memoryManager, List<MemorySegment> memory, IOManager ioManager, MutableObjectIterator<E> input, AbstractInvokable parentTask, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, int numSortBuffers, int maxNumFileHandles, float startSpillingFraction, boolean handleLargeRecords, boolean objectReuseEnabled) throws IOException { this(memoryManager, memory, ioManager, input, parentTask, serializerFactory, comparator, numSortBuffers, maxNumFileHandles, startSpillingFraction, false, handleLargeRecords, objectReuseEnabled); } protected UnilateralSortMerger(MemoryManager memoryManager, IOManager ioManager, MutableObjectIterator<E> input, AbstractInvokable parentTask, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, double memoryFraction, int numSortBuffers, int maxNumFileHandles, float startSpillingFraction, boolean noSpillingMemory, boolean handleLargeRecords, boolean objectReuseEnabled) throws IOException, MemoryAllocationException { this(memoryManager, memoryManager.allocatePages(parentTask, memoryManager.computeNumberOfPages(memoryFraction)), ioManager, input, parentTask, serializerFactory, comparator, numSortBuffers, maxNumFileHandles, startSpillingFraction, noSpillingMemory, handleLargeRecords, objectReuseEnabled); } protected UnilateralSortMerger(MemoryManager memoryManager, List<MemorySegment> memory, IOManager ioManager, MutableObjectIterator<E> input, AbstractInvokable parentTask, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, int numSortBuffers, int maxNumFileHandles, float startSpillingFraction, boolean noSpillingMemory, boolean handleLargeRecords, boolean objectReuseEnabled) throws IOException { // sanity checks if (memoryManager == null | (ioManager == null && !noSpillingMemory) | serializerFactory == null | comparator == null) { throw new NullPointerException(); } if (parentTask == null) { throw new NullPointerException("Parent Task must not be null."); } if (maxNumFileHandles < 2) { throw new IllegalArgumentException("Merger cannot work with less than two file handles."); } this.memoryManager = memoryManager; this.objectReuseEnabled = objectReuseEnabled; // adjust the memory quotas to the page size final int numPagesTotal = memory.size(); if (numPagesTotal < MIN_NUM_WRITE_BUFFERS + MIN_NUM_SORT_MEM_SEGMENTS) { throw new IllegalArgumentException("Too little memory provided to sorter to perform task. " + "Required are at least " + (MIN_NUM_WRITE_BUFFERS + MIN_NUM_SORT_MEM_SEGMENTS) + " pages. Current page size is " + memoryManager.getPageSize() + " bytes."); } // determine how many buffers to use for writing final int numWriteBuffers; final int numLargeRecordBuffers; if (noSpillingMemory && !handleLargeRecords) { numWriteBuffers = 0; numLargeRecordBuffers = 0; } else { int numConsumers = (noSpillingMemory ? 0 : 1) + (handleLargeRecords ? 2 : 0); // determine how many buffers we have when we do a full mere with maximal fan-in final int minBuffersForMerging = maxNumFileHandles + numConsumers * MIN_NUM_WRITE_BUFFERS; if (minBuffersForMerging > numPagesTotal) { numWriteBuffers = noSpillingMemory ? 0 : MIN_NUM_WRITE_BUFFERS; numLargeRecordBuffers = handleLargeRecords ? 2*MIN_NUM_WRITE_BUFFERS : 0; maxNumFileHandles = numPagesTotal - numConsumers * MIN_NUM_WRITE_BUFFERS; if (LOG.isDebugEnabled()) { LOG.debug("Reducing maximal merge fan-in to " + maxNumFileHandles + " due to limited memory availability during merge"); } } else { // we are free to choose. make sure that we do not eat up too much memory for writing final int fractionalAuxBuffers = numPagesTotal / (numConsumers * 100); if (fractionalAuxBuffers >= MAX_NUM_WRITE_BUFFERS) { numWriteBuffers = noSpillingMemory ? 0 : MAX_NUM_WRITE_BUFFERS; numLargeRecordBuffers = handleLargeRecords ? 2*MAX_NUM_WRITE_BUFFERS : 0; } else { numWriteBuffers = noSpillingMemory ? 0 : Math.max(MIN_NUM_WRITE_BUFFERS, fractionalAuxBuffers); // at least the lower bound numLargeRecordBuffers = handleLargeRecords ? Math.max(2*MIN_NUM_WRITE_BUFFERS, fractionalAuxBuffers) // at least the lower bound : 0; } } } final int sortMemPages = numPagesTotal - numWriteBuffers - numLargeRecordBuffers; final long sortMemory = ((long) sortMemPages) * memoryManager.getPageSize(); // decide how many sort buffers to use if (numSortBuffers < 1) { if (sortMemory > 100 * 1024 * 1024) { numSortBuffers = 2; } else { numSortBuffers = 1; } } final int numSegmentsPerSortBuffer = sortMemPages / numSortBuffers; if (LOG.isDebugEnabled()) { LOG.debug(String.format("Instantiating sorter with %d pages of sorting memory (=" + "%d bytes total) divided over %d sort buffers (%d pages per buffer). Using %d" + " buffers for writing sorted results and merging maximally %d streams at once. " + "Using %d memory segments for large record spilling.", sortMemPages, sortMemory, numSortBuffers, numSegmentsPerSortBuffer, numWriteBuffers, maxNumFileHandles, numLargeRecordBuffers)); } this.sortReadMemory = memory; this.writeMemory = new ArrayList<MemorySegment>(numWriteBuffers); final TypeSerializer<E> serializer = serializerFactory.getSerializer(); // move some pages from the sort memory to the write memory if (numWriteBuffers > 0) { for (int i = 0; i < numWriteBuffers; i++) { this.writeMemory.add(this.sortReadMemory.remove(this.sortReadMemory.size() - 1)); } } if (numLargeRecordBuffers > 0) { List<MemorySegment> mem = new ArrayList<MemorySegment>(); for (int i = 0; i < numLargeRecordBuffers; i++) { mem.add(this.sortReadMemory.remove(this.sortReadMemory.size() - 1)); } this.largeRecordHandler = new LargeRecordHandler<E>(serializer, comparator.duplicate(), ioManager, memoryManager, mem, parentTask, maxNumFileHandles); } else { this.largeRecordHandler = null; } // circular queues pass buffers between the threads final CircularQueues<E> circularQueues = new CircularQueues<E>(); // allocate the sort buffers and fill empty queue with them final Iterator<MemorySegment> segments = this.sortReadMemory.iterator(); for (int i = 0; i < numSortBuffers; i++) { // grab some memory final List<MemorySegment> sortSegments = new ArrayList<MemorySegment>(numSegmentsPerSortBuffer); for (int k = (i == numSortBuffers - 1 ? Integer.MAX_VALUE : numSegmentsPerSortBuffer); k > 0 && segments.hasNext(); k--) { sortSegments.add(segments.next()); } final TypeComparator<E> comp = comparator.duplicate(); final InMemorySorter<E> buffer; // instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter if (comp.supportsSerializationWithKeyNormalization() && serializer.getLength() > 0 && serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING) { buffer = new FixedLengthRecordSorter<E>(serializerFactory.getSerializer(), comp, sortSegments); } else { buffer = new NormalizedKeySorter<E>(serializerFactory.getSerializer(), comp, sortSegments); } // add to empty queue CircularElement<E> element = new CircularElement<E>(i, buffer, sortSegments); circularQueues.empty.add(element); } // exception handling ExceptionHandler<IOException> exceptionHandler = new ExceptionHandler<IOException>() { public void handleException(IOException exception) { // forward exception if (!closed) { setResultIteratorException(exception); close(); } } }; // create sets that track the channels we need to clean up when closing the sorter this.channelsToDeleteAtShutdown = new HashSet<FileIOChannel.ID>(64); this.openChannels = new HashSet<FileIOChannel>(64); // start the thread that reads the input channels this.readThread = getReadingThread(exceptionHandler, input, circularQueues, largeRecordHandler, parentTask, serializer, ((long) (startSpillingFraction * sortMemory))); // start the thread that sorts the buffers this.sortThread = getSortingThread(exceptionHandler, circularQueues, parentTask); // start the thread that handles spilling to secondary storage this.spillThread = getSpillingThread(exceptionHandler, circularQueues, parentTask, memoryManager, ioManager, serializerFactory, comparator, this.sortReadMemory, this.writeMemory, maxNumFileHandles); // propagate the context class loader to the spawned threads ClassLoader contextLoader = Thread.currentThread().getContextClassLoader(); if (contextLoader != null) { if (this.readThread != null) { this.readThread.setContextClassLoader(contextLoader); } if (this.sortThread != null) { this.sortThread.setContextClassLoader(contextLoader); } if (this.spillThread != null) { this.spillThread.setContextClassLoader(contextLoader); } } startThreads(); } /** * Starts all the threads that are used by this sort-merger. */ protected void startThreads() { if (this.readThread != null) { this.readThread.start(); } if (this.sortThread != null) { this.sortThread.start(); } if (this.spillThread != null) { this.spillThread.start(); } } /** * Shuts down all the threads initiated by this sort/merger. Also releases all previously allocated * memory, if it has not yet been released by the threads, and closes and deletes all channels (removing * the temporary files). * <p> * The threads are set to exit directly, but depending on their operation, it may take a while to actually happen. * The sorting thread will for example not finish before the current batch is sorted. This method attempts to wait * for the working thread to exit. If it is however interrupted, the method exits immediately and is not guaranteed * how long the threads continue to exist and occupy resources afterwards. * * @see java.io.Closeable#close() */ @Override public void close() { // check if the sorter has been closed before synchronized (this) { if (this.closed) { return; } // mark as closed this.closed = true; } // from here on, the code is in a try block, because even through errors might be thrown in this block, // we need to make sure that all the memory is released. try { // if the result iterator has not been obtained yet, set the exception synchronized (this.iteratorLock) { if (this.iteratorException == null) { this.iteratorException = new IOException("The sorter has been closed."); this.iteratorLock.notifyAll(); } } // stop all the threads if (this.readThread != null) { try { this.readThread.shutdown(); } catch (Throwable t) { LOG.error("Error shutting down reader thread: " + t.getMessage(), t); } } if (this.sortThread != null) { try { this.sortThread.shutdown(); } catch (Throwable t) { LOG.error("Error shutting down sorter thread: " + t.getMessage(), t); } } if (this.spillThread != null) { try { this.spillThread.shutdown(); } catch (Throwable t) { LOG.error("Error shutting down spilling thread: " + t.getMessage(), t); } } try { if (this.readThread != null) { this.readThread.join(); } if (this.sortThread != null) { this.sortThread.join(); } if (this.spillThread != null) { this.spillThread.join(); } } catch (InterruptedException iex) { LOG.debug("Closing of sort/merger was interrupted. " + "The reading/sorting/spilling threads may still be working.", iex); } } finally { // RELEASE ALL MEMORY. If the threads and channels are still running, this should cause // exceptions, because their memory segments are freed try { if (!this.writeMemory.isEmpty()) { this.memoryManager.release(this.writeMemory); } this.writeMemory.clear(); } catch (Throwable t) {} try { if (!this.sortReadMemory.isEmpty()) { this.memoryManager.release(this.sortReadMemory); } this.sortReadMemory.clear(); } catch (Throwable t) {} // we have to loop this, because it may fail with a concurrent modification exception while (!this.openChannels.isEmpty()) { try { for (Iterator<FileIOChannel> channels = this.openChannels.iterator(); channels.hasNext(); ) { final FileIOChannel channel = channels.next(); channels.remove(); channel.closeAndDelete(); } } catch (Throwable t) {} } // we have to loop this, because it may fail with a concurrent modification exception while (!this.channelsToDeleteAtShutdown.isEmpty()) { try { for (Iterator<FileIOChannel.ID> channels = this.channelsToDeleteAtShutdown.iterator(); channels.hasNext(); ) { final FileIOChannel.ID channel = channels.next(); channels.remove(); try { final File f = new File(channel.getPath()); if (f.exists()) { f.delete(); } } catch (Throwable t) {} } } catch (Throwable t) {} } try { if (this.largeRecordHandler != null) { this.largeRecordHandler.close(); } } catch (Throwable t) {} } } // ------------------------------------------------------------------------ // Factory Methods // ------------------------------------------------------------------------ /** * Creates the reading thread. The reading thread simply reads the data off the input and puts it * into the buffer where it will be sorted. * <p> * The returned thread is not yet started. * * @param exceptionHandler * The handler for exceptions in the thread. * @param reader * The reader from which the thread reads. * @param queues * The queues through which the thread communicates with the other threads. * @param parentTask * The task at which the thread registers itself (for profiling purposes). * @param serializer * The serializer used to serialize records. * @param startSpillingBytes * The number of bytes after which the reader thread will send the notification to * start the spilling. * * @return The thread that reads data from an input, writes it into sort buffers and puts * them into a queue. */ protected ThreadBase<E> getReadingThread(ExceptionHandler<IOException> exceptionHandler, MutableObjectIterator<E> reader, CircularQueues<E> queues, LargeRecordHandler<E> largeRecordHandler, AbstractInvokable parentTask, TypeSerializer<E> serializer, long startSpillingBytes) { return new ReadingThread<E>(exceptionHandler, reader, queues, largeRecordHandler, serializer.createInstance(),parentTask, startSpillingBytes); } protected ThreadBase<E> getSortingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues, AbstractInvokable parentTask) { return new SortingThread<E>(exceptionHandler, queues, parentTask); } protected ThreadBase<E> getSpillingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues, AbstractInvokable parentTask, MemoryManager memoryManager, IOManager ioManager, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, List<MemorySegment> sortReadMemory, List<MemorySegment> writeMemory, int maxFileHandles) { return new SpillingThread(exceptionHandler, queues, parentTask, memoryManager, ioManager, serializerFactory.getSerializer(), comparator, sortReadMemory, writeMemory, maxFileHandles); } // ------------------------------------------------------------------------ // Result Iterator // ------------------------------------------------------------------------ @Override public MutableObjectIterator<E> getIterator() throws InterruptedException { synchronized (this.iteratorLock) { // wait while both the iterator and the exception are not set while (this.iterator == null && this.iteratorException == null) { this.iteratorLock.wait(); } if (this.iteratorException != null) { throw new RuntimeException("Error obtaining the sorted input: " + this.iteratorException.getMessage(), this.iteratorException); } else { return this.iterator; } } } /** * Sets the result iterator. By setting the result iterator, all threads that are waiting for the result * iterator are notified and will obtain it. * * @param iterator The result iterator to set. */ protected final void setResultIterator(MutableObjectIterator<E> iterator) { synchronized (this.iteratorLock) { // set the result iterator only, if no exception has occurred if (this.iteratorException == null) { this.iterator = iterator; this.iteratorLock.notifyAll(); } } } /** * Reports an exception to all threads that are waiting for the result iterator. * * @param ioex The exception to be reported to the threads that wait for the result iterator. */ protected final void setResultIteratorException(IOException ioex) { synchronized (this.iteratorLock) { if (this.iteratorException == null) { this.iteratorException = ioex; this.iteratorLock.notifyAll(); } } } // ------------------------------------------------------------------------ // Inter-Thread Communication // ------------------------------------------------------------------------ /** * The element that is passed as marker for the end of data. */ private static final CircularElement<Object> EOF_MARKER = new CircularElement<Object>(); /** * The element that is passed as marker for signal beginning of spilling. */ private static final CircularElement<Object> SPILLING_MARKER = new CircularElement<Object>(); /** * Gets the element that is passed as marker for the end of data. * * @return The element that is passed as marker for the end of data. */ protected static <T> CircularElement<T> endMarker() { @SuppressWarnings("unchecked") CircularElement<T> c = (CircularElement<T>) EOF_MARKER; return c; } /** * Gets the element that is passed as marker for signal beginning of spilling. * * @return The element that is passed as marker for signal beginning of spilling. */ protected static <T> CircularElement<T> spillingMarker() { @SuppressWarnings("unchecked") CircularElement<T> c = (CircularElement<T>) SPILLING_MARKER; return c; } /** * Class representing buffers that circulate between the reading, sorting and spilling thread. */ protected static final class CircularElement<E> { final int id; final InMemorySorter<E> buffer; final List<MemorySegment> memory; public CircularElement() { this.id = -1; this.buffer = null; this.memory = null; } public CircularElement(int id, InMemorySorter<E> buffer, List<MemorySegment> memory) { this.id = id; this.buffer = buffer; this.memory = memory; } } /** * Collection of queues that are used for the communication between the threads. */ protected static final class CircularQueues<E> { final BlockingQueue<CircularElement<E>> empty; final BlockingQueue<CircularElement<E>> sort; final BlockingQueue<CircularElement<E>> spill; public CircularQueues() { this.empty = new LinkedBlockingQueue<CircularElement<E>>(); this.sort = new LinkedBlockingQueue<CircularElement<E>>(); this.spill = new LinkedBlockingQueue<CircularElement<E>>(); } public CircularQueues(int numElements) { this.empty = new ArrayBlockingQueue<CircularElement<E>>(numElements); this.sort = new ArrayBlockingQueue<CircularElement<E>>(numElements); this.spill = new ArrayBlockingQueue<CircularElement<E>>(numElements); } } // ------------------------------------------------------------------------ // Threads // ------------------------------------------------------------------------ /** * Base class for all working threads in this sort-merger. The specific threads for reading, sorting, spilling, * merging, etc... extend this subclass. * <p> * The threads are designed to terminate themselves when the task they are set up to do is completed. Further more, * they terminate immediately when the <code>shutdown()</code> method is called. */ protected static abstract class ThreadBase<E> extends Thread implements Thread.UncaughtExceptionHandler { /** * The queue of empty buffer that can be used for reading; */ protected final CircularQueues<E> queues; /** * The exception handler for any problems. */ private final ExceptionHandler<IOException> exceptionHandler; /** * The flag marking this thread as alive. */ private volatile boolean alive; /** * Creates a new thread. * * @param exceptionHandler The exception handler to call for all exceptions. * @param name The name of the thread. * @param queues The queues used to pass buffers between the threads. * @param parentTask The task that started this thread. If non-null, it is used to register this thread. */ protected ThreadBase(ExceptionHandler<IOException> exceptionHandler, String name, CircularQueues<E> queues, AbstractInvokable parentTask) { // thread setup super(name); this.setDaemon(true); // exception handling this.exceptionHandler = exceptionHandler; this.setUncaughtExceptionHandler(this); this.queues = queues; this.alive = true; } /** * Implements exception handling and delegates to go(). */ public void run() { try { go(); } catch (Throwable t) { internalHandleException(new IOException("Thread '" + getName() + "' terminated due to an exception: " + t.getMessage(), t)); } } /** * Equivalent to the run() method. * * @throws IOException Exceptions that prohibit correct completion of the work may be thrown by the thread. */ protected abstract void go() throws IOException; /** * Checks whether this thread is still alive. * * @return true, if the thread is alive, false otherwise. */ public boolean isRunning() { return this.alive; } /** * Forces an immediate shutdown of the thread. Looses any state and all buffers that the thread is currently * working on. This terminates cleanly for the JVM, but looses intermediate results. */ public void shutdown() { this.alive = false; this.interrupt(); } /** * Internally handles an exception and makes sure that this method returns without a problem. * * @param ioex * The exception to handle. */ protected final void internalHandleException(IOException ioex) { if (!isRunning()) { // discard any exception that occurs when after the thread is killed. return; } if (this.exceptionHandler != null) { try { this.exceptionHandler.handleException(ioex); } catch (Throwable t) {} } } /* (non-Javadoc) * @see java.lang.Thread.UncaughtExceptionHandler#uncaughtException(java.lang.Thread, java.lang.Throwable) */ @Override public void uncaughtException(Thread t, Throwable e) { internalHandleException(new IOException("Thread '" + t.getName() + "' terminated due to an uncaught exception: " + e.getMessage(), e)); } } /** * The thread that consumes the input data and puts it into a buffer that will be sorted. */ protected static class ReadingThread<E> extends ThreadBase<E> { /** The input channels to read from. */ private final MutableObjectIterator<E> reader; private final LargeRecordHandler<E> largeRecords; /** The fraction of the buffers that must be full before the spilling starts. */ private final long startSpillingBytes; /** The object into which the thread reads the data from the input. */ private final E readTarget; /** * Creates a new reading thread. * * @param exceptionHandler The exception handler to call for all exceptions. * @param reader The reader to pull the data from. * @param queues The queues used to pass buffers between the threads. * @param parentTask The task that started this thread. If non-null, it is used to register this thread. */ public ReadingThread(ExceptionHandler<IOException> exceptionHandler, MutableObjectIterator<E> reader, CircularQueues<E> queues, LargeRecordHandler<E> largeRecordsHandler, E readTarget, AbstractInvokable parentTask, long startSpillingBytes) { super(exceptionHandler, "SortMerger Reading Thread", queues, parentTask); // members this.reader = reader; this.readTarget = readTarget; this.startSpillingBytes = startSpillingBytes; this.largeRecords = largeRecordsHandler; } /** * The entry point for the thread. Gets a buffer for all threads and then loops as long as there is input * available. */ public void go() throws IOException { final MutableObjectIterator<E> reader = this.reader; E current = this.readTarget; E leftoverRecord = null; CircularElement<E> element = null; long bytesUntilSpilling = this.startSpillingBytes; boolean done = false; // check if we should directly spill if (bytesUntilSpilling < 1) { bytesUntilSpilling = 0; // add the spilling marker this.queues.sort.add(UnilateralSortMerger.<E>spillingMarker()); } // now loop until all channels have no more input data while (!done && isRunning()) { // grab the next buffer while (element == null) { try { element = this.queues.empty.take(); } catch (InterruptedException iex) { throw new IOException(iex); } } // get the new buffer and check it final InMemorySorter<E> buffer = element.buffer; if (!buffer.isEmpty()) { throw new IOException("New buffer is not empty."); } if (LOG.isDebugEnabled()) { LOG.debug("Retrieved empty read buffer " + element.id + "."); } // write the last leftover pair, if we have one if (leftoverRecord != null) { if (!buffer.write(leftoverRecord)) { // did not fit in a fresh buffer, must be large... if (this.largeRecords != null) { if (LOG.isDebugEnabled()) { LOG.debug("Large record did not fit into a fresh sort buffer. Putting into large record store."); } this.largeRecords.addRecord(leftoverRecord); } else { throw new IOException("The record exceeds the maximum size of a sort buffer (current maximum: " + buffer.getCapacity() + " bytes)."); } buffer.reset(); } leftoverRecord = null; } // we have two distinct code paths, depending on whether the spilling // threshold will be crossed in the current buffer, or not. boolean available = true; if (bytesUntilSpilling > 0 && buffer.getCapacity() >= bytesUntilSpilling) { boolean fullBuffer = false; // spilling will be triggered while this buffer is filled // loop until the buffer is full or the reader is exhausted E newCurrent; while (isRunning() && (available = (newCurrent = reader.next(current)) != null)) { current = newCurrent; if (!buffer.write(current)) { leftoverRecord = current; fullBuffer = true; break; } // successfully added record if (bytesUntilSpilling - buffer.getOccupancy() <= 0) { bytesUntilSpilling = 0; // send the spilling marker final CircularElement<E> SPILLING_MARKER = spillingMarker(); this.queues.sort.add(SPILLING_MARKER); // we drop out of this loop and continue with the loop that // does not have the check break; } } if (fullBuffer) { // buffer is full. it may be that the last element would have crossed the // spilling threshold, so check it if (bytesUntilSpilling > 0) { bytesUntilSpilling -= buffer.getCapacity(); if (bytesUntilSpilling <= 0) { bytesUntilSpilling = 0; // send the spilling marker final CircularElement<E> SPILLING_MARKER = spillingMarker(); this.queues.sort.add(SPILLING_MARKER); } } // send the buffer if (LOG.isDebugEnabled()) { LOG.debug("Emitting full buffer from reader thread: " + element.id + "."); } this.queues.sort.add(element); element = null; continue; } } else if (bytesUntilSpilling > 0) { // this block must not be entered, if the last loop dropped out because // the input is exhausted. bytesUntilSpilling -= buffer.getCapacity(); if (bytesUntilSpilling <= 0) { bytesUntilSpilling = 0; // send the spilling marker final CircularElement<E> SPILLING_MARKER = spillingMarker(); this.queues.sort.add(SPILLING_MARKER); } } // no spilling will be triggered (any more) while this buffer is being processed // loop until the buffer is full or the reader is exhausted if (available) { E newCurrent; while (isRunning() && ((newCurrent = reader.next(current)) != null)) { current = newCurrent; if (!buffer.write(current)) { leftoverRecord = current; break; } } } // check whether the buffer is exhausted or the reader is if (leftoverRecord != null) { if (LOG.isDebugEnabled()) { LOG.debug("Emitting full buffer from reader thread: " + element.id + "."); } } else { done = true; if (LOG.isDebugEnabled()) { LOG.debug("Emitting final buffer from reader thread: " + element.id + "."); } } // we can use add to add the element because we have no capacity restriction if (!buffer.isEmpty()) { this.queues.sort.add(element); } else { buffer.reset(); this.queues.empty.add(element); } element = null; } // we read all there is to read, or we are no longer running if (!isRunning()) { return; } // add the sentinel to notify the receivers that the work is done // send the EOF marker final CircularElement<E> EOF_MARKER = endMarker(); this.queues.sort.add(EOF_MARKER); LOG.debug("Reading thread done."); } } /** * The thread that sorts filled buffers. */ protected static class SortingThread<E> extends ThreadBase<E> { private final IndexedSorter sorter; /** * Creates a new sorting thread. * * @param exceptionHandler The exception handler to call for all exceptions. * @param queues The queues used to pass buffers between the threads. * @param parentTask The task that started this thread. If non-null, it is used to register this thread. */ public SortingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues, AbstractInvokable parentTask) { super(exceptionHandler, "SortMerger sorting thread", queues, parentTask); // members this.sorter = new QuickSort(); } /** * Entry point of the thread. */ public void go() throws IOException { boolean alive = true; // loop as long as the thread is marked alive while (isRunning() && alive) { CircularElement<E> element = null; try { element = this.queues.sort.take(); } catch (InterruptedException iex) { if (isRunning()) { if (LOG.isErrorEnabled()) { LOG.error( "Sorting thread was interrupted (without being shut down) while grabbing a buffer. " + "Retrying to grab buffer..."); } continue; } else { return; } } if (element != EOF_MARKER && element != SPILLING_MARKER) { if (element.buffer.size() == 0) { element.buffer.reset(); this.queues.empty.add(element); continue; } if (LOG.isDebugEnabled()) { LOG.debug("Sorting buffer " + element.id + "."); } this.sorter.sort(element.buffer); if (LOG.isDebugEnabled()) { LOG.debug("Sorted buffer " + element.id + "."); } } else if (element == EOF_MARKER) { if (LOG.isDebugEnabled()) { LOG.debug("Sorting thread done."); } alive = false; } this.queues.spill.add(element); } } } /** * The thread that handles the spilling of intermediate results and sets up the merging. It also merges the * channels until sufficiently few channels remain to perform the final streamed merge. */ protected class SpillingThread extends ThreadBase<E> { protected final MemoryManager memManager; // memory manager to release memory protected final IOManager ioManager; // I/O manager to create channels protected final TypeSerializer<E> serializer; // The serializer for the data type protected final TypeComparator<E> comparator; // The comparator that establishes the order relation. protected final List<MemorySegment> writeMemory; // memory segments for writing protected final List<MemorySegment> mergeReadMemory; // memory segments for sorting/reading protected final int maxFanIn; protected final int numWriteBuffersToCluster; /** * Creates the spilling thread. * * @param exceptionHandler The exception handler to call for all exceptions. * @param queues The queues used to pass buffers between the threads. * @param parentTask The task that started this thread. If non-null, it is used to register this thread. * @param memManager The memory manager used to allocate buffers for the readers and writers. * @param ioManager The I/I manager used to instantiate readers and writers from. * @param serializer * @param comparator * @param sortReadMemory * @param writeMemory * @param maxNumFileHandles */ public SpillingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues, AbstractInvokable parentTask, MemoryManager memManager, IOManager ioManager, TypeSerializer<E> serializer, TypeComparator<E> comparator, List<MemorySegment> sortReadMemory, List<MemorySegment> writeMemory, int maxNumFileHandles) { super(exceptionHandler, "SortMerger spilling thread", queues, parentTask); this.memManager = memManager; this.ioManager = ioManager; this.serializer = serializer; this.comparator = comparator; this.mergeReadMemory = sortReadMemory; this.writeMemory = writeMemory; this.maxFanIn = maxNumFileHandles; this.numWriteBuffersToCluster = writeMemory.size() >= 4 ? writeMemory.size() / 2 : 1; } /** * Entry point of the thread. */ public void go() throws IOException { final Queue<CircularElement<E>> cache = new ArrayDeque<CircularElement<E>>(); CircularElement<E> element; boolean cacheOnly = false; // ------------------- In-Memory Cache ------------------------ // fill cache while (isRunning()) { // take next element from queue try { element = this.queues.spill.take(); } catch (InterruptedException iex) { throw new IOException("The spilling thread was interrupted."); } if (element == SPILLING_MARKER) { break; } else if (element == EOF_MARKER) { cacheOnly = true; break; } cache.add(element); } // check whether the thread was canceled if (!isRunning()) { return; } MutableObjectIterator<E> largeRecords = null; // check if we can stay in memory with the large record handler if (cacheOnly && largeRecordHandler != null && largeRecordHandler.hasData()) { List<MemorySegment> memoryForLargeRecordSorting = new ArrayList<MemorySegment>(); CircularElement<E> circElement; while ((circElement = this.queues.empty.poll()) != null) { circElement.buffer.dispose(); memoryForLargeRecordSorting.addAll(circElement.memory); } if (memoryForLargeRecordSorting.isEmpty()) { cacheOnly = false; LOG.debug("Going to disk-based merge because of large records."); } else { LOG.debug("Sorting large records, to add them to in-memory merge."); largeRecords = largeRecordHandler.finishWriteAndSortKeys(memoryForLargeRecordSorting); } } // ------------------- In-Memory Merge ------------------------ if (cacheOnly) { // operates on in-memory buffers only if (LOG.isDebugEnabled()) { LOG.debug("Initiating in memory merge."); } List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(cache.size() + 1); // iterate buffers and collect a set of iterators for (CircularElement<E> cached : cache) { // note: the yielded iterator only operates on the buffer heap (and disregards the stack) iterators.add(cached.buffer.getIterator()); } if (largeRecords != null) { iterators.add(largeRecords); } // release the remaining sort-buffers if (LOG.isDebugEnabled()) { LOG.debug("Releasing unused sort-buffer memory."); } disposeSortBuffers(true); // set lazy iterator setResultIterator(iterators.isEmpty() ? EmptyMutableObjectIterator.<E>get() : iterators.size() == 1 ? iterators.get(0) : new MergeIterator<E>(iterators, this.comparator)); return; } // ------------------- Spilling Phase ------------------------ final FileIOChannel.Enumerator enumerator = this.ioManager.createChannelEnumerator(); List<ChannelWithBlockCount> channelIDs = new ArrayList<ChannelWithBlockCount>(); // loop as long as the thread is marked alive and we do not see the final element while (isRunning()) { try { element = takeNext(this.queues.spill, cache); } catch (InterruptedException iex) { if (isRunning()) { LOG.error("Sorting thread was interrupted (without being shut down) while grabbing a buffer. " + "Retrying to grab buffer..."); continue; } else { return; } } // check if we are still running if (!isRunning()) { return; } // check if this is the end-of-work buffer if (element == EOF_MARKER) { break; } // open next channel FileIOChannel.ID channel = enumerator.next(); registerChannelToBeRemovedAtShudown(channel); // create writer final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); registerOpenChannelToBeRemovedAtShudown(writer); final ChannelWriterOutputView output = new ChannelWriterOutputView(writer, this.writeMemory, this.memManager.getPageSize()); // write sort-buffer to channel if (LOG.isDebugEnabled()) { LOG.debug("Spilling buffer " + element.id + "."); } element.buffer.writeToOutput(output, largeRecordHandler); if (LOG.isDebugEnabled()) { LOG.debug("Spilled buffer " + element.id + "."); } output.close(); unregisterOpenChannelToBeRemovedAtShudown(writer); if (output.getBytesWritten() > 0) { channelIDs.add(new ChannelWithBlockCount(channel, output.getBlockCount())); } // pass empty sort-buffer to reading thread element.buffer.reset(); this.queues.empty.add(element); } // done with the spilling if (LOG.isDebugEnabled()) { LOG.debug("Spilling done."); LOG.debug("Releasing sort-buffer memory."); } // clear the sort buffers, but do not return the memory to the manager, as we use it for merging disposeSortBuffers(false); // ------------------- Merging Phase ------------------------ // make sure we have enough memory to merge and for large record handling List<MemorySegment> mergeReadMemory; if (largeRecordHandler != null && largeRecordHandler.hasData()) { List<MemorySegment> longRecMem; if (channelIDs.isEmpty()) { // only long records longRecMem = this.mergeReadMemory; mergeReadMemory = Collections.emptyList(); } else { int maxMergedStreams = Math.min(this.maxFanIn, channelIDs.size()); int pagesPerStream = Math.max(MIN_NUM_WRITE_BUFFERS, Math.min(MAX_NUM_WRITE_BUFFERS, this.mergeReadMemory.size() / 2 / maxMergedStreams)); int totalMergeReadMemory = maxMergedStreams * pagesPerStream; // grab the merge memory mergeReadMemory = new ArrayList<MemorySegment>(totalMergeReadMemory); for (int i = 0; i < totalMergeReadMemory; i++) { mergeReadMemory.add(this.mergeReadMemory.get(i)); } // the remainder of the memory goes to the long record sorter longRecMem = new ArrayList<MemorySegment>(); for (int i = totalMergeReadMemory; i < this.mergeReadMemory.size(); i++) { longRecMem.add(this.mergeReadMemory.get(i)); } } if (LOG.isDebugEnabled()) { LOG.debug("Sorting keys for large records."); } largeRecords = largeRecordHandler.finishWriteAndSortKeys(longRecMem); } else { mergeReadMemory = this.mergeReadMemory; } // merge channels until sufficient file handles are available while (isRunning() && channelIDs.size() > this.maxFanIn) { channelIDs = mergeChannelList(channelIDs, mergeReadMemory, this.writeMemory); } // from here on, we won't write again this.memManager.release(this.writeMemory); this.writeMemory.clear(); // check if we have spilled some data at all if (channelIDs.isEmpty()) { if (largeRecords == null) { setResultIterator(EmptyMutableObjectIterator.<E>get()); } else { setResultIterator(largeRecords); } } else { if (LOG.isDebugEnabled()) { LOG.debug("Beginning final merge."); } // allocate the memory for the final merging step List<List<MemorySegment>> readBuffers = new ArrayList<List<MemorySegment>>(channelIDs.size()); // allocate the read memory and register it to be released getSegmentsForReaders(readBuffers, mergeReadMemory, channelIDs.size()); // get the readers and register them to be released setResultIterator(getMergingIterator(channelIDs, readBuffers, new ArrayList<FileIOChannel>(channelIDs.size()), largeRecords)); } // done if (LOG.isDebugEnabled()) { LOG.debug("Spilling and merging thread done."); } } /** * Releases the memory that is registered for in-memory sorted run generation. */ protected final void disposeSortBuffers(boolean releaseMemory) { while (!this.queues.empty.isEmpty()) { try { CircularElement<E> element = this.queues.empty.take(); element.buffer.dispose(); if (releaseMemory) { this.memManager.release(element.memory); } } catch (InterruptedException iex) { if (isRunning()) { LOG.error("Spilling thread was interrupted (without being shut down) while collecting empty buffers to release them. " + "Retrying to collect buffers..."); } else { return; } } } } protected final CircularElement<E> takeNext(BlockingQueue<CircularElement<E>> queue, Queue<CircularElement<E>> cache) throws InterruptedException { return cache.isEmpty() ? queue.take() : cache.poll(); } // ------------------------------------------------------------------------ // Result Merging // ------------------------------------------------------------------------ /** * Returns an iterator that iterates over the merged result from all given channels. * * @param channelIDs The channels that are to be merged and returned. * @param inputSegments The buffers to be used for reading. The list contains for each channel one * list of input segments. The size of the <code>inputSegments</code> list must be equal to * that of the <code>channelIDs</code> list. * @return An iterator over the merged records of the input channels. * @throws IOException Thrown, if the readers encounter an I/O problem. */ protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs, final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords) throws IOException { // create one iterator per channel id if (LOG.isDebugEnabled()) { LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams."); } final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1); for (int i = 0; i < channelIDs.size(); i++) { final ChannelWithBlockCount channel = channelIDs.get(i); final List<MemorySegment> segsForChannel = inputSegments.get(i); // create a reader. if there are multiple segments for the reader, issue multiple together per I/O request final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel()); readerList.add(reader); registerOpenChannelToBeRemovedAtShudown(reader); unregisterChannelToBeRemovedAtShudown(channel.getChannel()); // wrap channel reader as a view, to get block spanning record deserialization final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, channel.getBlockCount(), false); iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer)); } if (largeRecords != null) { iterators.add(largeRecords); } return new MergeIterator<E>(iterators, this.comparator); } /** * Merges the given sorted runs to a smaller number of sorted runs. * * @param channelIDs The IDs of the sorted runs that need to be merged. * @param allReadBuffers * @param writeBuffers The buffers to be used by the writers. * @return A list of the IDs of the merged channels. * @throws IOException Thrown, if the readers or writers encountered an I/O problem. */ protected final List<ChannelWithBlockCount> mergeChannelList(final List<ChannelWithBlockCount> channelIDs, final List<MemorySegment> allReadBuffers, final List<MemorySegment> writeBuffers) throws IOException { // A channel list with length maxFanIn<sup>i</sup> can be merged to maxFanIn files in i-1 rounds where every merge // is a full merge with maxFanIn input channels. A partial round includes merges with fewer than maxFanIn // inputs. It is most efficient to perform the partial round first. final double scale = Math.ceil(Math.log(channelIDs.size()) / Math.log(this.maxFanIn)) - 1; final int numStart = channelIDs.size(); final int numEnd = (int) Math.pow(this.maxFanIn, scale); final int numMerges = (int) Math.ceil((numStart - numEnd) / (double) (this.maxFanIn - 1)); final int numNotMerged = numEnd - numMerges; final int numToMerge = numStart - numNotMerged; // unmerged channel IDs are copied directly to the result list final List<ChannelWithBlockCount> mergedChannelIDs = new ArrayList<ChannelWithBlockCount>(numEnd); mergedChannelIDs.addAll(channelIDs.subList(0, numNotMerged)); final int channelsToMergePerStep = (int) Math.ceil(numToMerge / (double) numMerges); // allocate the memory for the merging step final List<List<MemorySegment>> readBuffers = new ArrayList<List<MemorySegment>>(channelsToMergePerStep); getSegmentsForReaders(readBuffers, allReadBuffers, channelsToMergePerStep); final List<ChannelWithBlockCount> channelsToMergeThisStep = new ArrayList<ChannelWithBlockCount>(channelsToMergePerStep); int channelNum = numNotMerged; while (isRunning() && channelNum < channelIDs.size()) { channelsToMergeThisStep.clear(); for (int i = 0; i < channelsToMergePerStep && channelNum < channelIDs.size(); i++, channelNum++) { channelsToMergeThisStep.add(channelIDs.get(channelNum)); } mergedChannelIDs.add(mergeChannels(channelsToMergeThisStep, readBuffers, writeBuffers)); } return mergedChannelIDs; } /** * Merges the sorted runs described by the given Channel IDs into a single sorted run. The merging process * uses the given read and write buffers. * * @param channelIDs The IDs of the runs' channels. * @param readBuffers The buffers for the readers that read the sorted runs. * @param writeBuffers The buffers for the writer that writes the merged channel. * @return The ID and number of blocks of the channel that describes the merged run. */ protected ChannelWithBlockCount mergeChannels(List<ChannelWithBlockCount> channelIDs, List<List<MemorySegment>> readBuffers, List<MemorySegment> writeBuffers) throws IOException { // the list with the readers, to be closed at shutdown final List<FileIOChannel> channelAccesses = new ArrayList<FileIOChannel>(channelIDs.size()); // the list with the target iterators final MergeIterator<E> mergeIterator = getMergingIterator(channelIDs, readBuffers, channelAccesses, null); // create a new channel writer final FileIOChannel.ID mergedChannelID = this.ioManager.createChannel(); registerChannelToBeRemovedAtShudown(mergedChannelID); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(mergedChannelID); registerOpenChannelToBeRemovedAtShudown(writer); final ChannelWriterOutputView output = new ChannelWriterOutputView(writer, writeBuffers, this.memManager.getPageSize()); // read the merged stream and write the data back if (objectReuseEnabled) { final TypeSerializer<E> serializer = this.serializer; E rec = serializer.createInstance(); while ((rec = mergeIterator.next(rec)) != null) { serializer.serialize(rec, output); } } else { E rec; while ((rec = mergeIterator.next()) != null) { serializer.serialize(rec, output); } } output.close(); final int numBlocksWritten = output.getBlockCount(); // register merged result to be removed at shutdown unregisterOpenChannelToBeRemovedAtShudown(writer); // remove the merged channel readers from the clear-at-shutdown list for (int i = 0; i < channelAccesses.size(); i++) { FileIOChannel access = channelAccesses.get(i); access.closeAndDelete(); unregisterOpenChannelToBeRemovedAtShudown(access); } return new ChannelWithBlockCount(mergedChannelID, numBlocksWritten); } /** * Divides the given collection of memory buffers among {@code numChannels} sublists. * * @param target The list into which the lists with buffers for the channels are put. * @param memory A list containing the memory buffers to be distributed. The buffers are not * removed from this list. * @param numChannels The number of channels for which to allocate buffers. Must not be zero. */ protected final void getSegmentsForReaders(List<List<MemorySegment>> target, List<MemorySegment> memory, int numChannels) { // determine the memory to use per channel and the number of buffers final int numBuffers = memory.size(); final int buffersPerChannelLowerBound = numBuffers / numChannels; final int numChannelsWithOneMore = numBuffers % numChannels; final Iterator<MemorySegment> segments = memory.iterator(); // collect memory for the channels that get one segment more for (int i = 0; i < numChannelsWithOneMore; i++) { final ArrayList<MemorySegment> segs = new ArrayList<MemorySegment>(buffersPerChannelLowerBound + 1); target.add(segs); for (int k = buffersPerChannelLowerBound; k >= 0; k--) { segs.add(segments.next()); } } // collect memory for the remaining channels for (int i = numChannelsWithOneMore; i < numChannels; i++) { final ArrayList<MemorySegment> segs = new ArrayList<MemorySegment>(buffersPerChannelLowerBound); target.add(segs); for (int k = buffersPerChannelLowerBound; k > 0; k--) { segs.add(segments.next()); } } } // ------------------------------------------------------------------------ // Cleanup of Temp Files and Allocated Memory // ------------------------------------------------------------------------ /** * Adds a channel to the list of channels that are to be removed at shutdown. * * @param channel The channel id. */ protected void registerChannelToBeRemovedAtShudown(FileIOChannel.ID channel) { UnilateralSortMerger.this.channelsToDeleteAtShutdown.add(channel); } /** * Removes a channel from the list of channels that are to be removed at shutdown. * * @param channel The channel id. */ protected void unregisterChannelToBeRemovedAtShudown(FileIOChannel.ID channel) { UnilateralSortMerger.this.channelsToDeleteAtShutdown.remove(channel); } /** * Adds a channel reader/writer to the list of channels that are to be removed at shutdown. * * @param channel The channel reader/writer. */ protected void registerOpenChannelToBeRemovedAtShudown(FileIOChannel channel) { UnilateralSortMerger.this.openChannels.add(channel); } /** * Removes a channel reader/writer from the list of channels that are to be removed at shutdown. * * @param channel The channel reader/writer. */ protected void unregisterOpenChannelToBeRemovedAtShudown(FileIOChannel channel) { UnilateralSortMerger.this.openChannels.remove(channel); } } protected static final class ChannelWithBlockCount { private final FileIOChannel.ID channel; private final int blockCount; public ChannelWithBlockCount(ID channel, int blockCount) { this.channel = channel; this.blockCount = blockCount; } public FileIOChannel.ID getChannel() { return channel; } public int getBlockCount() { return blockCount; } } }
package ru.job4j.array; import org.junit.Test; import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; public class EndsWith1Test { @Test public void whenEndWithPrefixThenTrue() { char[] word = {'H', 'e', 'l', 'l', 'o'}; char[] post = {'l', 'o'}; boolean result = EndsWith1.endsWith1(word, post); assertThat(result, is(true)); } @Test public void whenNotEndWithPrefixThenFalse() { char[] word = {'H', 'e', 'l', 'l', 'o'}; char[] post = {'l', 'a'}; boolean result = EndsWith1.endsWith1(word, post); assertThat(result, is(false)); } }
/******************************************************************************* * Copyright (c) 2010 Haifeng Li * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package smile.interpolation; import java.util.Arrays; /** * Laplace interpolation to restore missing or unmeasured values on a 2-dimensional * evenly spaced regular grid. In some sense, Laplace interpolation * produces the smoothest possible interpolant, which are obtained by solving * a very sparse linear equations with biconjugate gradient method. * * @author Haifeng Li */ public class LaplaceInterpolation { /** * Laplace interpolation. * @param matrix on input, values of NaN are deemed to be missing data. * On output, they are refilled with the interpolated solution. * @return the estimated error. */ public static double interpolate(double[][] matrix) { return interpolate(matrix, 1.0E-6); } /** * Laplace interpolation. * @param matrix on input, values of NaN are deemed to be missing data. * On output, they are refilled with the interpolated solution. * @param tol the desired convergence tolerance. * @return the estimated error. */ public static double interpolate(double[][] matrix, double tol) { return interpolate(matrix, tol, 2 * Math.max(matrix.length, matrix[0].length)); } /** * Laplace interpolation. * @param matrix on input, values of NaN are deemed to be missing data. * On output, they are refilled with the interpolated solution. * @param tol the desired convergence tolerance. * @param maxIter the maximum number of allowed iterations. * @return the estimated error. */ public static double interpolate(double[][] matrix, double tol, int maxIter) { int nrows = matrix.length; int ncols = matrix[0].length; int n = nrows * ncols; double[] b = new double[n]; double[] y = new double[n]; boolean[] mask = new boolean[n]; double vl = 0.; for (int k = 0; k < n; k++) { int i = k / ncols; int j = k - i * ncols; if (!Double.isNaN(matrix[i][j])) { b[k] = y[k] = vl = matrix[i][j]; mask[k] = true; } else { b[k] = 0.; y[k] = vl; mask[k] = false; } } double err = solve(matrix, b, y, mask, tol, maxIter); for (int k = 0, i = 0; i < nrows; i++) { for (int j = 0; j < ncols; j++) { matrix[i][j] = y[k++]; } } return err; } /** * Solves A * x = b by iterative biconjugate gradient method. * @param b the right hand side of linear equations. * @param x on input, x should be set to an initial guess of the solution * (or all zeros). On output, x is reset to the improved solution. * @param tol the desired convergence tolerance. * @param maxIter the maximum number of allowed iterations. * @return the estimated error. */ private static double solve(double[][] matrix, double[] b, double[] x, boolean[] mask, double tol, int maxIter) { double err = 0.0; double ak, akden, bk, bkden = 1.0, bknum, bnrm; int j, n = b.length; double[] p = new double[n]; double[] pp = new double[n]; double[] r = new double[n]; double[] rr = new double[n]; double[] z = new double[n]; double[] zz = new double[n]; ax(matrix, x, r, mask); for (j = 0; j < n; j++) { r[j] = b[j] - r[j]; rr[j] = r[j]; } bnrm = snorm(b); asolve(r, z); for (int iter = 0; iter < maxIter; iter++) { asolve(rr, zz); for (bknum = 0.0, j = 0; j < n; j++) { bknum += z[j] * rr[j]; } if (iter == 1) { for (j = 0; j < n; j++) { p[j] = z[j]; pp[j] = zz[j]; } } else { bk = bknum / bkden; for (j = 0; j < n; j++) { p[j] = bk * p[j] + z[j]; pp[j] = bk * pp[j] + zz[j]; } } bkden = bknum; ax(matrix, p, z, mask); for (akden = 0.0, j = 0; j < n; j++) { akden += z[j] * pp[j]; } ak = bknum / akden; atx(matrix, pp, zz, mask); for (j = 0; j < n; j++) { x[j] += ak * p[j]; r[j] -= ak * z[j]; rr[j] -= ak * zz[j]; } asolve(r, z); err = snorm(r) / bnrm; if (err <= tol) { break; } } return err; } /** * Solve A' * x = b for some preconditioner matrix A', which is possibly * the trivial diagonal part of A. */ private static void asolve(double[] b, double[] x) { System.arraycopy(b, 0, x, 0, b.length); } /** * Returns A * x. */ private static void ax(double[][] matrix, double[] x, double[] r, boolean[] mask) { int nrows = matrix.length; int ncols = matrix[0].length; int n = r.length, jjt, it; Arrays.fill(r, 0.0); for (int k = 0; k < n; k++) { int i = k / ncols; int j = k - i * ncols; if (mask[k]) { r[k] += x[k]; } else if (i > 0 && i < nrows - 1 && j > 0 && j < ncols - 1) { r[k] = x[k] - 0.25 * (x[k - 1] + x[k + 1] + x[k + ncols] + x[k - ncols]); } else if (i > 0 && i < nrows - 1) { r[k] = x[k] - 0.5 * (x[k + ncols] + x[k - ncols]); } else if (j > 0 && j < ncols - 1) { r[k] = x[k] - 0.5 * (x[k + 1] + x[k - 1]); } else { jjt = i == 0 ? ncols : -ncols; it = j == 0 ? 1 : -1; r[k] = x[k] - 0.5 * (x[k + jjt] + x[k + it]); } } } /** * Returns A' * x. */ private static void atx(double[][] matrix, double[] x, double[] r, boolean[] mask) { int nrows = matrix.length; int ncols = matrix[0].length; int n = r.length, jjt, it; double del; Arrays.fill(r, 0.0); for (int k = 0; k < n; k++) { int i = k / ncols; int j = k - i * ncols; if (mask[k]) { r[k] += x[k]; } else if (i > 0 && i < nrows - 1 && j > 0 && j < ncols - 1) { r[k] += x[k]; del = -0.25 * x[k]; r[k - 1] += del; r[k + 1] += del; r[k - ncols] += del; r[k + ncols] += del; } else if (i > 0 && i < nrows - 1) { r[k] += x[k]; del = -0.5 * x[k]; r[k - ncols] += del; r[k + ncols] += del; } else if (j > 0 && j < ncols - 1) { r[k] += x[k]; del = -0.5 * x[k]; r[k - 1] += del; r[k + 1] += del; } else { jjt = i == 0 ? ncols : -ncols; it = j == 0 ? 1 : -1; r[k] += x[k]; del = -0.5 * x[k]; r[k + jjt] += del; r[k + it] += del; } } } /** * Compute squared root of L2 norms for a vector. */ private static double snorm(double[] sx) { int n = sx.length; double ans = 0.0; for (int i = 0; i < n; i++) { ans += sx[i] * sx[i]; } return Math.sqrt(ans); } }
package com.orientechnologies.orient.server.distributed; import com.orientechnologies.common.exception.OException; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.db.*; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.record.OElement; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.executor.OResult; import com.orientechnologies.orient.server.OServer; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.concurrent.CountDownLatch; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class SimpleLiveQueryDistributedIT { private OServer server0; private OServer server1; @Before public void before() throws Exception { OGlobalConfiguration.SERVER_BACKWARD_COMPATIBILITY.setValue(false); server0 = OServer.startFromClasspathConfig("orientdb-simple-dserver-config-0.xml"); server1 = OServer.startFromClasspathConfig("orientdb-simple-dserver-config-1.xml"); OrientDB remote = new OrientDB("remote:localhost", "root", "test", OrientDBConfig.defaultConfig()); remote.create("test", ODatabaseType.PLOCAL); ODatabaseSession session = remote.open("test", "admin", "admin"); session.createClass("test"); session.close(); remote.close(); } @Test public void testLiveQueryDifferentNode() throws InterruptedException { OrientDB remote1 = new OrientDB("remote:localhost", "root", "test", OrientDBConfig.defaultConfig()); ODatabaseSession session = remote1.open("test", "admin", "admin"); EventListener listener = new EventListener(); OLiveQueryMonitor monitor = session.live("select from test", listener); OrientDB remote2 = new OrientDB("remote:localhost:2425", "root", "test", OrientDBConfig.defaultConfig()); ODatabaseSession session2 = remote1.open("test", "admin", "admin"); OElement el = session2.save(session2.newElement("test")); el.setProperty("name", "name"); session2.save(el); session2.delete(el); session2.close(); session.activateOnCurrentThread(); monitor.unSubscribe(); session.close(); listener.latch.await(); assertEquals(1, listener.create); assertEquals(1, listener.delete); assertEquals(1, listener.update); remote1.close(); remote2.close(); } @After public void after() throws InterruptedException { OrientDB remote = new OrientDB("remote:localhost", "root", "test", OrientDBConfig.defaultConfig()); remote.drop("test"); remote.close(); server0.shutdown(); server1.shutdown(); ODatabaseDocumentTx.closeAll(); } private static class EventListener implements OLiveQueryResultListener { public int create = 0; public int update = 0; public int delete = 0; public CountDownLatch latch = new CountDownLatch(1); @Override public void onCreate(ODatabaseDocument database, OResult data) { create++; } @Override public void onUpdate(ODatabaseDocument database, OResult before, OResult after) { update++; } @Override public void onDelete(ODatabaseDocument database, OResult data) { delete++; } @Override public void onError(ODatabaseDocument database, OException exception) { } @Override public void onEnd(ODatabaseDocument database) { latch.countDown(); } } }
package net.neoremind.fountain.producer.dispatch.misc; import java.util.Iterator; /** * 只有一个元素的迭代器 * * @author hexiufeng */ public class SingleIterator implements Iterator<Object> { private int count = 1; private final Object message; /** * 构造器 * * @param message 消息 */ public SingleIterator(Object message) { this.message = message; } @Override public boolean hasNext() { return count > 0; } @Override public Object next() { count--; return message; } @Override public void remove() { // don't support } }
package com.github.nhojpatrick.config.files; import com.github.nhojpatrick.config.files.internal.FindUtilImpl; import java.io.File; import java.io.InputStream; import java.util.Optional; public interface FindUtil { static FindUtil getDefault() { return new FindUtilImpl(); } /** * Find {@link java.io.File} by searching list of {@code filePaths}. * * @param filePaths a list of potential files. * @return the {@link java.io.File} if found, otherwise throw {@link java.lang.RuntimeException}. * @throws RuntimeException if file not found. */ File findFile(String... filePaths); /** * Find optional {@link java.io.File} by searching list of {@code filePaths}. * * @param filePaths a list of potential files. * @return the {@link java.io.File} if found, otherwise {@link java.util.Optional#empty()}. */ Optional<File> findOptionalFile(String... filePaths); /** * Find {@link java.io.InputStream} by searching list of {@code filePaths}. * * @param filePaths a list of potential files. * @return the {@link java.io.InputStream} if found, otherwise throw {@link java.lang.RuntimeException}. * @throws RuntimeException if file not found. */ InputStream findInputStream(String... filePaths); /** * Find optional {@link java.io.InputStream} by searching list of {@code filePaths}. * * @param filePaths a list of potential files. * @return the {@link java.io.InputStream} if found, otherwise {@link java.util.Optional#empty()}. */ Optional<InputStream> findOptionalInputStream(String... filePaths); }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.vfs; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionInputMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Artifact.SourceArtifact; import com.google.devtools.build.lib.actions.ArtifactPathResolver; import com.google.devtools.build.lib.actions.BuildFailedException; import com.google.devtools.build.lib.actions.EnvironmentalExecException; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.MetadataConsumer; import com.google.devtools.build.lib.actions.cache.MetadataHandler; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.util.AbruptExitException; import com.google.devtools.build.skyframe.SkyFunction.Environment; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.UUID; import java.util.function.Function; import javax.annotation.Nullable; /** * An OutputService retains control over the Blaze output tree, and provides a higher level of * abstraction compared to the VFS layer. * * <p>Higher-level facilities include batch statting, cleaning the output tree, creating symlink * trees, and out-of-band insertion of metadata into the tree. */ public interface OutputService { /** Properties of the action file system implementation provided by this output service. */ enum ActionFileSystemType { /** Action file system is disabled */ DISABLED, /** * The action file system implementation does not take over the output base but complements the * file system by being able to stage remote outputs accessed as inputs by local actions, as * used by Bazel. */ STAGE_REMOTE_FILES, /** * The action file system implementation is fully featured in-memory file system implementation * and takes full control of the output base, as used by Blaze. */ IN_MEMORY_FILE_SYSTEM; public boolean inMemoryFileSystem() { return this == IN_MEMORY_FILE_SYSTEM; } public boolean isEnabled() { return this != DISABLED; } } /** * @return the name of filesystem, akin to what you might see in /proc/mounts */ String getFilesSystemName(); /** * Start the build. * * @param buildId the UUID build identifier * @param finalizeActions whether this build is finalizing actions so that the output service * can track output tree modifications * @return a ModifiedFileSet of changed output files. * @throws BuildFailedException if build preparation failed * @throws InterruptedException */ ModifiedFileSet startBuild(EventHandler eventHandler, UUID buildId, boolean finalizeActions) throws BuildFailedException, AbruptExitException, InterruptedException; /** * Finish the build. * * @param buildSuccessful iff build was successful * @throws BuildFailedException on failure */ void finalizeBuild(boolean buildSuccessful) throws BuildFailedException, AbruptExitException, InterruptedException; /** Notify the output service of a completed action. */ void finalizeAction(Action action, MetadataHandler metadataHandler) throws IOException, EnvironmentalExecException; /** * @return the BatchStat instance or null. */ BatchStat getBatchStatter(); /** * @return true iff createSymlinkTree() is available. */ boolean canCreateSymlinkTree(); /** * Creates the symlink tree * * @param inputPath the input manifest * @param outputPath the output manifest * @param filesetTree is true iff we're constructing a Fileset * @param symlinkTreeRoot the symlink tree root, relative to the execRoot * @throws ExecException on failure * @throws InterruptedException */ void createSymlinkTree(Path inputPath, Path outputPath, boolean filesetTree, PathFragment symlinkTreeRoot) throws ExecException, InterruptedException; /** * Cleans the entire output tree. * * @throws ExecException on failure * @throws InterruptedException */ void clean() throws ExecException, InterruptedException; /** @return true iff the file actually lives on a remote server */ boolean isRemoteFile(Artifact file); default ActionFileSystemType actionFileSystemType() { return ActionFileSystemType.DISABLED; } /** * @param sourceDelegate filesystem for reading source files (excludes output files) * @param execRootFragment absolute path fragment pointing to the execution root * @param relativeOutputPath execution root relative path to output * @param sourceRoots list of directories on the package path (from {@link * com.google.devtools.build.lib.pkgcache.PathPackageLocator}) * @param inputArtifactData information about required inputs to the action * @param outputArtifacts required outputs of the action * @param sourceArtifactFactory obtains source artifacts from source exec paths * @return an action-scoped filesystem if {@link #supportsActionFileSystem} is not {@code NONE} */ @Nullable default FileSystem createActionFileSystem( FileSystem sourceDelegate, PathFragment execRootFragment, String relativeOutputPath, ImmutableList<Root> sourceRoots, ActionInputMap inputArtifactData, Iterable<Artifact> outputArtifacts, Function<PathFragment, SourceArtifact> sourceArtifactFactory) { return null; } /** * Updates the context used by the filesystem returned by {@link #createActionFileSystem}. * * <p>Should be called as context changes throughout action execution. * * @param actionFileSystem must be a filesystem returned by {@link #createActionFileSystem}. * @param filesets The Fileset symlinks known for this action. */ default void updateActionFileSystemContext( FileSystem actionFileSystem, Environment env, MetadataConsumer consumer, ImmutableMap<Artifact, ImmutableList<FilesetOutputSymlink>> filesets) throws IOException {} default boolean supportsPathResolverForArtifactValues() { return false; } default ArtifactPathResolver createPathResolverForArtifactValues( PathFragment execRoot, String relativeOutputPath, FileSystem fileSystem, ImmutableList<Root> pathEntries, ActionInputMap actionInputMap, Map<Artifact, Collection<Artifact>> expandedArtifacts, Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesets) throws IOException { throw new IllegalStateException("Path resolver not supported by this class"); } }
/* * Copyright 2011-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not * use this file except in compliance with the License. A copy of the License is * located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; /** * <p> * Contains information about an MFA device. * </p> * <p> * This data type is used as a response element in the <a>ListMFADevices</a> * action. * </p> */ public class MFADevice implements Serializable, Cloneable { /** * <p> * The user with whom the MFA device is associated. * </p> */ private String userName; /** * <p> * The serial number that uniquely identifies the MFA device. For virtual * MFA devices, the serial number is the device ARN. * </p> */ private String serialNumber; /** * <p> * The date when the MFA device was enabled for the user. * </p> */ private java.util.Date enableDate; /** * Default constructor for MFADevice object. Callers should use the setter * or fluent setter (with...) methods to initialize the object after * creating it. */ public MFADevice() { } /** * Constructs a new MFADevice object. Callers should use the setter or * fluent setter (with...) methods to initialize any additional object * members. * * @param userName * The user with whom the MFA device is associated. * @param serialNumber * The serial number that uniquely identifies the MFA device. For * virtual MFA devices, the serial number is the device ARN. * @param enableDate * The date when the MFA device was enabled for the user. */ public MFADevice(String userName, String serialNumber, java.util.Date enableDate) { setUserName(userName); setSerialNumber(serialNumber); setEnableDate(enableDate); } /** * <p> * The user with whom the MFA device is associated. * </p> * * @param userName * The user with whom the MFA device is associated. */ public void setUserName(String userName) { this.userName = userName; } /** * <p> * The user with whom the MFA device is associated. * </p> * * @return The user with whom the MFA device is associated. */ public String getUserName() { return this.userName; } /** * <p> * The user with whom the MFA device is associated. * </p> * * @param userName * The user with whom the MFA device is associated. * @return Returns a reference to this object so that method calls can be * chained together. */ public MFADevice withUserName(String userName) { setUserName(userName); return this; } /** * <p> * The serial number that uniquely identifies the MFA device. For virtual * MFA devices, the serial number is the device ARN. * </p> * * @param serialNumber * The serial number that uniquely identifies the MFA device. For * virtual MFA devices, the serial number is the device ARN. */ public void setSerialNumber(String serialNumber) { this.serialNumber = serialNumber; } /** * <p> * The serial number that uniquely identifies the MFA device. For virtual * MFA devices, the serial number is the device ARN. * </p> * * @return The serial number that uniquely identifies the MFA device. For * virtual MFA devices, the serial number is the device ARN. */ public String getSerialNumber() { return this.serialNumber; } /** * <p> * The serial number that uniquely identifies the MFA device. For virtual * MFA devices, the serial number is the device ARN. * </p> * * @param serialNumber * The serial number that uniquely identifies the MFA device. For * virtual MFA devices, the serial number is the device ARN. * @return Returns a reference to this object so that method calls can be * chained together. */ public MFADevice withSerialNumber(String serialNumber) { setSerialNumber(serialNumber); return this; } /** * <p> * The date when the MFA device was enabled for the user. * </p> * * @param enableDate * The date when the MFA device was enabled for the user. */ public void setEnableDate(java.util.Date enableDate) { this.enableDate = enableDate; } /** * <p> * The date when the MFA device was enabled for the user. * </p> * * @return The date when the MFA device was enabled for the user. */ public java.util.Date getEnableDate() { return this.enableDate; } /** * <p> * The date when the MFA device was enabled for the user. * </p> * * @param enableDate * The date when the MFA device was enabled for the user. * @return Returns a reference to this object so that method calls can be * chained together. */ public MFADevice withEnableDate(java.util.Date enableDate) { setEnableDate(enableDate); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUserName() != null) sb.append("UserName: " + getUserName() + ","); if (getSerialNumber() != null) sb.append("SerialNumber: " + getSerialNumber() + ","); if (getEnableDate() != null) sb.append("EnableDate: " + getEnableDate()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof MFADevice == false) return false; MFADevice other = (MFADevice) obj; if (other.getUserName() == null ^ this.getUserName() == null) return false; if (other.getUserName() != null && other.getUserName().equals(this.getUserName()) == false) return false; if (other.getSerialNumber() == null ^ this.getSerialNumber() == null) return false; if (other.getSerialNumber() != null && other.getSerialNumber().equals(this.getSerialNumber()) == false) return false; if (other.getEnableDate() == null ^ this.getEnableDate() == null) return false; if (other.getEnableDate() != null && other.getEnableDate().equals(this.getEnableDate()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUserName() == null) ? 0 : getUserName().hashCode()); hashCode = prime * hashCode + ((getSerialNumber() == null) ? 0 : getSerialNumber() .hashCode()); hashCode = prime * hashCode + ((getEnableDate() == null) ? 0 : getEnableDate().hashCode()); return hashCode; } @Override public MFADevice clone() { try { return (MFADevice) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package handlers; import com.jfoenix.controls.JFXListCell; import com.jfoenix.controls.JFXSpinner; import handlers.Convenience; import handlers.HandleNet; import javafx.application.Platform; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Parent; import javafx.scene.control.ListCell; import javafx.scene.layout.AnchorPane; import models.Events; import java.io.IOException; import java.net.URL; import java.util.ResourceBundle; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Class which handles loss of internet connection, * displays a Pop Up with the respective message, * polls the server in a loop until connection is reestablished * and locks the UI in the meanwhile * * @author Gheorghe Mironica */ public class NoInternet implements Initializable { private FXMLLoader loader; @FXML private AnchorPane noInternetAnchorPane; @FXML private JFXSpinner spinner; @Override public void initialize(URL location, ResourceBundle resources) { pollServer(); } /** * Method which polls the server in the loop until connection is reestablished */ private synchronized void pollServer(){ Thread loop = new Thread(()-> { while(!HandleNet.hasNetConnection()){ try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } Convenience.getDialog().setOverlayClose(false); try { Thread.sleep(2000); } catch (InterruptedException e) {e.printStackTrace(); } } Convenience.getDialog().setOverlayClose(true); Convenience.closePreviousDialog(); }); if(!loop.isAlive()) loop.start(); } }
package com.vladmihalcea.book.hpjp.jdbc.transaction; import java.sql.Connection; /** * @author Vlad Mihalcea */ public class MySQLTableLockTest extends AbstractTableLockTest { @Override protected DataSourceProvider dataSourceProvider() { return new MySQLDataSourceProvider(); } @Override protected String lockEmployeeTableSql() { return "SELECT * FROM employee WHERE department_id = 1 FOR UPDATE"; } @Override protected void prepareConnection(Connection connection) { /*try { connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); } catch (SQLException e) { fail(e.getMessage()); }*/ executeStatement(connection, "SET GLOBAL innodb_lock_wait_timeout = 100"); } }
package enemies; import java.awt.*; import java.awt.geom.GeneralPath; import java.awt.geom.Rectangle2D; import java.util.Random; public abstract class EnemyObject implements Enemy { // enemy's properties private int x,y; private Color color; // enemy's x,y movement change (speed) private double deltaX; private double deltaY; // setting the enemy's properties public EnemyObject(int x, int y, Color color) { this.x = x; this.y = y; this.color = color; this.deltaX = 3.0; this.deltaY = -3.0; } @Override public int getX() { return this.x; } @Override public void setX(int newX) { this.x = newX; } @Override public int getY() { return this.y; } @Override public void setY(int newY) { this.y = newY; } public double getDeltaX() { return this.deltaX; } public void setDeltaX(double deltaX) { this.deltaX = deltaX; } public double getDeltaY() { return this.deltaY; } public void setDeltaY(double deltaY) { this.deltaY = deltaY; } public Color getColor() { return this.color; } @Override // changes the direction and the speed of the enemy on a random base public void changeDeltaX(double min, double max) { Random rand = new Random(); // if deltaX = 0 or the deltaY = 0, then moves the enemy horizontal or vertical double randNum = min + (max-min) * rand.nextDouble(); if (randNum > 2.0 || randNum < -2.0) { this.deltaX = randNum; } else { this.changeDeltaX(min,max); } } @Override public void changeDeltaY(double min, double max) { Random rand = new Random(); // if deltaX = 0 or the deltaY = 0, then moves the enemy horizontal or vertical double randNum = min + (max-min) * rand.nextDouble(); if (randNum > 2.0 || randNum < -2.0) { this.deltaY = randNum; } else { this.changeDeltaY(min, max); } } @Override public abstract Rectangle2D getBounds(); @Override public abstract void update(); @Override public abstract void intersect(GeneralPath path); @Override public abstract void drawObject(Graphics2D g2d); }
/* First created by JCasGen Wed Aug 19 22:32:28 CDT 2020 */ package org.apache.ctakes.typesystem.type.textsem; /* * #%L * cTAKES based concept mapper * %% * Copyright (C) 2010 - 2021 Department of Veterans Affairs * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.JCasRegistry; import org.apache.uima.cas.impl.TypeImpl; import org.apache.uima.cas.Type; /** Quantitative results of a laboratory, with number and unit. * Updated by JCasGen Wed Aug 19 22:32:28 CDT 2020 * @generated */ public class LabValueModifier_Type extends Modifier_Type { /** @generated */ @SuppressWarnings ("hiding") public final static int typeIndexID = LabValueModifier.typeIndexID; /** @generated @modifiable */ @SuppressWarnings ("hiding") public final static boolean featOkTst = JCasRegistry.getFeatOkTst("org.apache.ctakes.typesystem.type.textsem.LabValueModifier"); /** initialize variables to correspond with Cas Type and Features * @generated * @param jcas JCas * @param casType Type */ public LabValueModifier_Type(JCas jcas, Type casType) { super(jcas, casType); casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator()); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2014 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascan; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.commons.configuration.ConfigurationException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.Constant; import org.zaproxy.zap.utils.ZapXmlConfiguration; public class PolicyManager { public static final String POLICY_EXTENSION = ".policy"; private static final String DEFAULT_POLICY_NAME = Constant.messages.getString("ascan.policymgr.default.name"); public static final String ILLEGAL_POLICY_NAME_CHRS = "/`?*\\<>|\":\t\n\r"; private List<String> allPolicyNames = null; private ExtensionActiveScan extension; private static final Logger logger = LogManager.getLogger(PolicyManager.class); public PolicyManager(ExtensionActiveScan extension) { this.extension = extension; } public void init() { // Force load getAllPolicyNames(); } public synchronized List<String> getAllPolicyNames() { if (allPolicyNames == null) { allPolicyNames = new ArrayList<>(); String[] files = Constant.getPoliciesDir().list(); if (files != null) { for (String file : files) { if (file.endsWith(POLICY_EXTENSION)) { logger.debug("Found policy file " + file); allPolicyNames.add(file.substring(0, file.lastIndexOf(POLICY_EXTENSION))); } } } if (allPolicyNames.size() == 0) { // No policies :( Create a default one ScanPolicy defaultPolicy = new ScanPolicy(); defaultPolicy.setName(DEFAULT_POLICY_NAME); // Load from the 'old' configs defaultPolicy .getPluginFactory() .loadAllPlugin(extension.getScannerParam().getConfig()); try { // Note this will add the name to allPolicyNames this.savePolicy(defaultPolicy); } catch (ConfigurationException e) { logger.debug( "Failed to create default scan policy in " + Constant.getPoliciesDir().getAbsolutePath(), e); } } Collections.sort(allPolicyNames); } return allPolicyNames; } public void savePolicy(ScanPolicy policy) throws ConfigurationException { this.savePolicy(policy, null); } public void savePolicy(ScanPolicy policy, String previousName) throws ConfigurationException { logger.debug("Save policy " + policy.getName()); File file = new File(Constant.getPoliciesDir(), policy.getName() + POLICY_EXTENSION); ZapXmlConfiguration conf = new ZapXmlConfiguration(); conf.setProperty("policy", policy.getName()); conf.setProperty("scanner.level", policy.getDefaultThreshold().name()); conf.setProperty("scanner.strength", policy.getDefaultStrength().name()); policy.getPluginFactory().saveTo(conf); conf.save(file); if (previousName != null && previousName.length() > 0) { allPolicyNames.remove(previousName); } if (!allPolicyNames.contains(policy.getName())) { allPolicyNames.add(policy.getName()); Collections.sort(allPolicyNames); } } /** * Tells whether or not a scan policy with the given {@code name} exists. * * @param name the name of the scan policy * @return {@code true} if the scan policy exists, {@code false} otherwise * @since 2.4.3 */ public static boolean policyExists(String name) { return (new File(Constant.getPoliciesDir(), name + POLICY_EXTENSION)).exists(); } public ScanPolicy getPolicy(String name) throws ConfigurationException { return this.loadPolicy(new File(Constant.getPoliciesDir(), name + POLICY_EXTENSION)); } public ScanPolicy loadPolicy(String name) throws ConfigurationException { return this.loadPolicy(new File(Constant.getPoliciesDir(), name + POLICY_EXTENSION)); } private ScanPolicy loadPolicy(File file) throws ConfigurationException { File policyFile; try { // Obtain the name of the file in correct case, for DEFAULT_POLICY_NAME it might not be // exactly the same if the file // system is case insensitive, thus not matching with the name read directly from the // file system (method // getAllPolicyNames()). policyFile = file.toPath().toRealPath().toFile(); } catch (IOException e) { throw new ConfigurationException( "Failed to obtain the real path of the policy file:", e); } ScanPolicy policy = new ScanPolicy(new ZapXmlConfiguration(policyFile)); if (!policyFile.getName().equals(policy.getName() + POLICY_EXTENSION)) { // The file name takes precedence in case theres another policy with the same name policy.setName( policyFile .getName() .substring(0, policyFile.getName().indexOf(POLICY_EXTENSION))); } return policy; } public void importPolicy(File file) throws ConfigurationException, IOException { logger.debug("Import policy from " + file.getAbsolutePath()); ScanPolicy policy = new ScanPolicy(new ZapXmlConfiguration(file)); String baseName = file.getName(); if (baseName.endsWith(POLICY_EXTENSION)) { // Stip off the extension for the 'friendly name' and if we need to prevent overwriting // an existing one baseName = baseName.substring(0, baseName.indexOf(POLICY_EXTENSION)); } String finalName = baseName; File newFile = new File(Constant.getPoliciesDir(), finalName + POLICY_EXTENSION); int i = 2; while (newFile.exists()) { finalName = baseName + i; newFile = new File(Constant.getPoliciesDir(), finalName + POLICY_EXTENSION); i++; } policy.setName(finalName); this.savePolicy(policy); } public void exportPolicy(ScanPolicy policy, File file) throws ConfigurationException { logger.debug("Export policy to " + file.getAbsolutePath()); ZapXmlConfiguration conf = new ZapXmlConfiguration(); conf.setProperty("policy", policy.getName()); conf.setProperty("scanner.level", policy.getDefaultThreshold().name()); conf.setProperty("scanner.strength", policy.getDefaultStrength().name()); policy.getPluginFactory().saveTo(conf); conf.save(file); } public ScanPolicy getTemplatePolicy() throws ConfigurationException { return new ScanPolicy(); } public void deletePolicy(String name) { logger.debug("Delete policy " + name); File file = new File(Constant.getPoliciesDir(), name + POLICY_EXTENSION); if (file.exists()) { file.delete(); } this.allPolicyNames.remove(name); } public ScanPolicy getDefaultScanPolicy() { try { String policyName = extension.getScannerParam().getDefaultPolicy(); if (policyExists(policyName)) { logger.debug("getDefaultScanPolicy: " + policyName); return this.loadPolicy(policyName); } // No good, try the default name policyName = DEFAULT_POLICY_NAME; if (policyExists(policyName)) { logger.debug("getDefaultScanPolicy (default name): " + policyName); return this.loadPolicy(policyName); } if (this.allPolicyNames.size() > 0) { // Still no joy, try the first logger.debug("getDefaultScanPolicy (first one): " + policyName); return this.loadPolicy(this.allPolicyNames.get(0)); } } catch (ConfigurationException e) { logger.error(e.getMessage(), e); } // Return a new 'blank' one logger.debug("getDefaultScanPolicy (new blank)"); return new ScanPolicy(); } public ScanPolicy getAttackScanPolicy() { try { String policyName = extension.getScannerParam().getAttackPolicy(); if (policyExists(policyName)) { return this.loadPolicy(policyName); } // No good, try the default name policyName = DEFAULT_POLICY_NAME; if (policyExists(policyName)) { return this.loadPolicy(policyName); } if (this.allPolicyNames.size() > 0) { // Still no joy, try the first return this.loadPolicy(this.allPolicyNames.get(0)); } } catch (ConfigurationException e) { logger.error(e.getMessage(), e); } // Return a new 'blank' one return new ScanPolicy(); } public boolean isLegalPolicyName(String str) { for (int i = 0; i < str.length(); i++) { if (ILLEGAL_POLICY_NAME_CHRS.indexOf(str.charAt(i)) >= 0) { return false; } } return true; } }
/* * Copyright 2017 Bridje Framework. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bridje.sql; /** * Delete from step. */ public interface DeleteFromStep extends DeleteWhereStep { /** * Creates a INNER JOIN statement for this query. * * @param table The table to join. * @param on The condition to join. * * @return The next step. */ DeleteFromStep innerJoin(TableExpr table, BooleanExpr<?, ?> on); /** * Creates a LEFT JOIN statement for this query. * * @param table The table to join. * @param on The condition to join. * * @return The next step. */ DeleteFromStep leftJoin(TableExpr table, BooleanExpr<?, ?> on); /** * Creates a RIGHT JOIN statement for this query. * * @param table The table to join. * @param on The condition to join. * * @return The next step. */ DeleteFromStep rightJoin(TableExpr table, BooleanExpr<?, ?> on); /** * Adds a WHERE statement to the query. * * @param condition The condition for the where statement. * * @return The next step. */ DeleteWhereStep where(BooleanExpr<?, ?> condition); }
/* * The MIT License (MIT) * * Copyright (c) 2015 baoyongzhang <baoyz94@gmail.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.xnfh.bjhospital.weight; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.graphics.RectF; import android.util.TypedValue; /** * Created by wangxuewei on 2017/10/11. */ public class SmartisanDrawable extends RefreshDrawable { RectF mBounds; float mWidth; float mHeight; float mCenterX; float mCenterY; float mPercent; final float mMaxAngle = (float) (180f * .85); final float mRadius = dp2px(12); final float mLineLength = (float) (Math.PI / 180 * mMaxAngle * mRadius); final float mLineWidth = dp2px(3); final float mArrowLength = (int) (mLineLength * .15); final float mArrowAngle = (float) (Math.PI / 180 * 25); final float mArrowXSpace = (int) (mArrowLength * Math.sin(mArrowAngle)); final float mArrowYSpace = (int) (mArrowLength * Math.cos(mArrowAngle)); final Paint mPaint = new Paint(); int mOffset; boolean mRunning; float mDegrees; public SmartisanDrawable(Context context, PullRefreshLayout layout) { super(context, layout); mPaint.setAntiAlias(true); mPaint.setStrokeJoin(Paint.Join.ROUND); mPaint.setStrokeCap(Paint.Cap.ROUND); mPaint.setStrokeWidth(mLineWidth); mPaint.setStyle(Paint.Style.STROKE); mPaint.setColor(Color.GRAY); } @Override protected void onBoundsChange(Rect bounds) { super.onBoundsChange(bounds); mHeight = getRefreshLayout().getFinalOffset(); mWidth = mHeight; mBounds = new RectF(bounds.width() / 2 - mWidth / 2, bounds.top - mHeight / 2, bounds.width() / 2 + mWidth / 2, bounds.top + mHeight / 2); mCenterX = mBounds.centerX(); mCenterY = mBounds.centerY(); } @Override public void setPercent(float percent) { mPercent = percent; invalidateSelf(); } @Override public void setColorSchemeColors(int[] colorSchemeColors) { if (colorSchemeColors != null && colorSchemeColors.length > 0) { mPaint.setColor(colorSchemeColors[0]); } } @Override public void offsetTopAndBottom(int offset) { mOffset += offset; invalidateSelf(); } @Override public void start() { mRunning = true; mDegrees = 0; invalidateSelf(); } @Override public void stop() { mRunning = false; } @Override public boolean isRunning() { return mRunning; } @Override public void draw(Canvas canvas) { canvas.save(); canvas.translate(0, mOffset / 2); canvas.clipRect(mBounds); if (mOffset > mHeight && !isRunning()) { canvas.rotate((mOffset - mHeight) / mHeight * 360, mCenterX, mCenterY); } if (isRunning()) { canvas.rotate(mDegrees, mCenterX, mCenterY); mDegrees = mDegrees < 360 ? mDegrees + 10 : 0; invalidateSelf(); } if (mPercent <= .5f) { float percent = mPercent / .5f; // left float leftX = mCenterX - mRadius; float leftY = mCenterY + mLineLength - mLineLength * percent; canvas.drawLine(leftX, leftY, leftX, leftY + mLineLength, mPaint); // left arrow canvas.drawLine(leftX, leftY, leftX - mArrowXSpace, leftY + mArrowYSpace, mPaint); // right float rightX = mCenterX + mRadius; float rightY = mCenterY - mLineLength + mLineLength * percent; canvas.drawLine(rightX, rightY, rightX, rightY - mLineLength, mPaint); // right arrow canvas.drawLine(rightX, rightY, rightX + mArrowXSpace, rightY - mArrowYSpace, mPaint); } else { float percent = (mPercent - .5f) / .5f; // left float leftX = mCenterX - mRadius; float leftY = mCenterY; canvas.drawLine(leftX, leftY, leftX, leftY + mLineLength - mLineLength * percent, mPaint); RectF oval = new RectF(mCenterX - mRadius, mCenterY - mRadius, mCenterX + mRadius, mCenterY + mRadius); canvas.drawArc(oval, 180, mMaxAngle * percent, false, mPaint); // right float rightX = mCenterX + mRadius; float rightY = mCenterY; canvas.drawLine(rightX, rightY, rightX, rightY - mLineLength + mLineLength * percent, mPaint); canvas.drawArc(oval, 0, mMaxAngle * percent, false, mPaint); // arrow canvas.save(); // canvas.translate(mCenterX, mCenterY); canvas.rotate(mMaxAngle * percent, mCenterX, mCenterY); // canvas.translate(-mCenterX, -mCenterY); // left arrow canvas.drawLine(leftX, leftY, leftX - mArrowXSpace, leftY + mArrowYSpace, mPaint); // right arrow canvas.drawLine(rightX, rightY, rightX + mArrowXSpace, rightY - mArrowYSpace, mPaint); canvas.restore(); } canvas.restore(); } private int dp2px(int dp) { return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, getContext().getResources().getDisplayMetrics()); } }
package io.github.moulberry.notenoughupdates.core; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.ScaledResolution; import org.lwjgl.opengl.GL11; import java.util.LinkedList; public class GlScissorStack { private static class Bounds { int left; int top; int right; int bottom; public Bounds(int left, int top, int right, int bottom) { this.left = left; this.top = top; this.right = right; this.bottom = bottom; } public Bounds createSubBound(int left, int top, int right, int bottom) { left = Math.max(left, this.left); top = Math.max(top, this.top); right = Math.min(right, this.right); bottom = Math.min(bottom, this.bottom); if(top > bottom) { top = bottom; } if(left > right) { left = right; } return new Bounds(left, top, right, bottom); } public void set(ScaledResolution scaledResolution) { int height = Minecraft.getMinecraft().displayHeight; int scale = scaledResolution.getScaleFactor(); GL11.glScissor(left*scale, height-bottom*scale, (right-left)*scale, (bottom-top)*scale); } } private static LinkedList<Bounds> boundsStack = new LinkedList<>(); public static void push(int left, int top, int right, int bottom, ScaledResolution scaledResolution) { if(right < left) { int temp = right; right = left; left = temp; } if(bottom < top) { int temp = bottom; bottom = top; top = temp; } if(boundsStack.isEmpty()) { boundsStack.push(new Bounds(left, top, right, bottom)); } else { boundsStack.push(boundsStack.peek().createSubBound(left, top, right, bottom)); } if(!boundsStack.isEmpty()) { boundsStack.peek().set(scaledResolution); } GL11.glEnable(GL11.GL_SCISSOR_TEST); } public static void pop(ScaledResolution scaledResolution) { if(!boundsStack.isEmpty()) { boundsStack.pop(); } if(boundsStack.isEmpty()) { GL11.glDisable(GL11.GL_SCISSOR_TEST); } else { boundsStack.peek().set(scaledResolution); } } public static void clear() { boundsStack.clear(); GL11.glDisable(GL11.GL_SCISSOR_TEST); } }
/* * Copyright (c) 2006 SibInco Inc. All Rights Reserved. */ package ru.sibinco.scag.scmon.snap; import ru.sibinco.lib.backend.util.SnapBufferReader; import java.io.IOException; import java.io.DataOutputStream; import java.io.DataInputStream; /** * The <code>SnapAbstract</code> class represents * <p><p/> * Date: 11.01.2006 * Time: 12:41:20 * * @author &lt;a href="mailto:igor@sibinco.ru"&gt;Igor Klimenko&lt;/a&gt; */ public abstract class SnapAbstract { public static final int COUNTERS = 5; public static final int ACCEPTED_INDEX = 0; public static final int REJECTED_INDEX = 1; public static final int DELIVERED_INDEX = 2; public static final int GW_REJECTED_INDEX = 3; public static final int FAILED_INDEX = 4; static final int MAX_STR_LEN = 16; public abstract void init(SnapBufferReader in) throws IOException; public abstract void write(DataOutputStream out) throws IOException; public abstract void read(DataInputStream in) throws IOException; }
package com.pietrogon.springbootrestapi.entity; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import javax.persistence.*; import java.util.Set; @Entity @JsonIgnoreProperties({"users",}) public class Project { @GeneratedValue(strategy = GenerationType.AUTO) @Id @Column(name = "id") private Long id; @Column(name = "name") private String name; @ManyToMany @JoinTable( name = "user_in_project", joinColumns = @JoinColumn(name = "project_id"), inverseJoinColumns = @JoinColumn(name = "user_id")) @JsonIgnoreProperties("users") private Set<User> users; @OneToMany(mappedBy = "project") private Set<Appointments> appointments; public Set<User> getUsers() { return users; } public void setUsers(Set<User> users) { this.users = users; } public Set<Appointments> getAppointments() { return appointments; } public void setAppointments(Set<Appointments> appointments) { this.appointments = appointments; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
/*- * #%L * anchor-image * %% * Copyright (C) 2010 - 2020 Owen Feehan, ETH Zurich, University of Zurich, Hoffmann-La Roche * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ package org.anchoranalysis.math.histogram; import static java.lang.Math.toIntExact; import java.util.Arrays; import java.util.function.DoublePredicate; import java.util.function.IntFunction; import java.util.function.IntPredicate; import java.util.function.LongUnaryOperator; import lombok.Getter; import org.anchoranalysis.core.exception.OperationFailedException; import org.anchoranalysis.core.exception.friendly.AnchorImpossibleSituationException; import org.anchoranalysis.math.statistics.VarianceCalculatorLong; import org.apache.commons.lang.ArrayUtils; /** * A histogram of integer values. * * <p>The bin-size is always 1, so each bin corresponds to a discrete integer. * * <p>See <a href="https://en.wikipedia.org/wiki/Histogram">histogram on Wikipedia</a>. * * <p>This can be used to record a discrete probability distribution, and is typically used in the * Anchor software to record the distribution of image voxel intensity values. * * <p>Note that this is dense implementation and memory is allocated to store all values from {@code * minValue} to {@code maxValue} (inclusive). This can be a lot of memory for e.g. unsigned-short * value types. However, it allows for a maximally efficient incrementing through voxels in an * image, without intermediate structures. * * @author Owen Feehan */ public final class Histogram { /** Consumes a bin and corresponding count. */ @FunctionalInterface public interface BinConsumer { /** * Accepts a particular bin and corresponding count. * * @param bin the bin. * @param count the corresponding count. */ void accept(int bin, int count); } /** Minimum possible value in the histogram (inclusive). */ private int minValue; /** Maximum possible value in the histogram (inclusive). */ @Getter private int maxValue; private int[] counts; private long sumCount = 0; /** * Constructs with a maximum value, and assuming a minimum value of 0. * * @param maxValue maximum possible value in the histogram (inclusive). */ public Histogram(int maxValue) { this(0, maxValue); } /** * Constructs with a maximum value, and assuming a minimum value of 0. * * @param minValue minimum possible value in the histogram (inclusive). * @param maxValue maximum possible value in the histogram (inclusive). */ public Histogram(int minValue, int maxValue) { counts = new int[maxValue - minValue + 1]; sumCount = 0; this.maxValue = maxValue; this.minValue = minValue; } /** * Creates a deep-copy of the current object. * * @return a deep-copy. */ public Histogram duplicate() { Histogram out = new Histogram(minValue, maxValue); out.counts = ArrayUtils.clone(counts); out.sumCount = sumCount; return out; } /** Sets the count for all values to 0. */ public void reset() { sumCount = 0; for (int i = minValue; i <= maxValue; i++) { set(i, 0); } } /** * Sets the count for a particular value to 0. * * @param value the value whose count is zeroed. */ public void zeroValue(int value) { int index = index(value); sumCount -= counts[index]; counts[index] = 0; } /** * Moves all count for a particular value and adds it to the count for another. * * @param valueFrom the value whose count is moved, after which it's count is set to zero. * @param valueTo the value to which the count for {@code valueFrom} is added. */ public void transferCount(int valueFrom, int valueTo) { int indexFrom = index(valueFrom); incrementCount(valueTo, counts[indexFrom]); counts[indexFrom] = 0; } /** * Increments the count for a particular value by one. * * @param value the value whose count will be incremented by one. */ public void incrementValue(int value) { incrementCount(value, 1); sumCount++; } /** * Increments the count for a particular value. * * @param value the value whose count will be incremented. * @param increase how much to increase the count by. */ public void incrementValueBy(int value, int increase) { incrementCount(value, increase); sumCount += increase; } /** * Like {@link #incrementValueBy(int, int)} but accepts a {@code long} as the {@code increase} * argument. * * @param value the value whose count will be incremented. * @param increase how much to increase the count by. * @throws ArithmeticException if increase cannot be converted to an {@code int} safely. */ public void incrementValueBy(int value, long increase) { incrementValueBy(value, toIntExact(increase)); } /** * All values less than {@code threshold} are removed. * * @param threshold values greater or equal to this are kept in the histogram, lesser values are * removed. */ public void removeBelowThreshold(int threshold) { for (int bin = minValue; bin < threshold; bin++) { zeroValue(bin); } // Now chop off the unneeded values and set a new minimum chopBefore(index(threshold)); this.minValue = threshold; } /** * If no value exists in the histogram with a count greater than zero. * * @return true iff the histogram has zero-count for all values. */ public boolean isEmpty() { return sumCount == 0; } /** * The count corresponding to a particular value. * * @param value the value (the bin) to find a count for. * @return the corresponding count. */ public int getCount(int value) { return counts[index(value)]; } /** * The size of the range of values in the histogram. * * <p>This is equivalent to {@code (maxValue - minValue + 1)}. * * @return the number of values represented in the histogram. */ public int size() { return counts.length; } /** * Adds the counts from another histogram to the current object. * * <p>Both histograms must have identical minimum and maximum values, and therefore represent * the same range of values. * * @param other the histogram to add. * @throws OperationFailedException if the histograms do have identical minimum and maximum * values. */ public void addHistogram(Histogram other) throws OperationFailedException { if (this.getMaxValue() != other.getMaxValue()) { throw new OperationFailedException( "Cannot add histograms with different max-bin-values"); } if (this.minValue != other.minValue) { throw new OperationFailedException( "Cannot add histograms with different min-bin-values"); } for (int bin = minValue; bin <= getMaxValue(); bin++) { int otherCount = other.getCount(bin); incrementCount(bin, otherCount); sumCount += otherCount; } } /** * Calculates the <b>mean</b> of the histogram values, considering their frequency. * * <p>Specifically, this is the mean of {@code value * countFor(value)} across all values. * * @return the mean. * @throws OperationFailedException if the histogram has no values. */ public double mean() throws OperationFailedException { checkAtLeastOneItemExists(); long sum = 0; for (int bin = minValue; bin <= maxValue; bin++) { sum += getCountAsLong(bin) * bin; } return ((double) sum) / sumCount; } /** * Calculates the corresponding value for a particular <b>quantile</b> in the distribution of * values in the histogram. * * <p>See <a href="https://en.wikipedia.org/wiki/Quantile">Quantile on wikipedia</a>. * * <p>A quantile of 0.3, would return the minimal value, greater or equal to at least 30% of the * count. * * @param quantile the quantile, in the interval {@code [0, 1]}. * @return the mean. * @throws OperationFailedException if the histogram has no values, or the quantile is outside * acceptable bounds. */ public int quantile(double quantile) throws OperationFailedException { checkAtLeastOneItemExists(); if (quantile < 0 || quantile > 1) { throw new OperationFailedException( String.format("The quantile must be >= 0 and <= 1 but is %f", quantile)); } double threshold = quantile * sumCount; long sum = 0; for (int bin = minValue; bin <= maxValue; bin++) { sum += getCount(bin); if (sum > threshold) { return bin; } } return calculateMaximum(); } /** * Whether at least one value, greater or equal to {@code startMin} has non-zero count? * * @param threshold only values greater or equal to {@code threshold} are considered. Use 0 for * all values. * @return true iff at least one value in this range has a non-zero count, false if all values * in the range are zero. */ public boolean hasNonZeroCount(int threshold) { for (int bin = threshold; bin <= maxValue; bin++) { if (getCount(bin) > 0) { return true; } } return false; } /** * Calculates the <b>mode</b> of the histogram values. * * <p>The mode is the most frequently occurring item. * * @return the mode. * @throws OperationFailedException if the histogram has no values. */ public int calculateMode() throws OperationFailedException { checkAtLeastOneItemExists(); int maxIndex = -1; int maxCount = -1; for (int bin = minValue; bin <= maxValue; bin++) { int count = getCount(bin); if (count > maxCount) { maxCount = count; maxIndex = bin; } } return maxIndex; } /** * Calculates the <b>maximum value with non zero-count</b> among the histogram values. * * @return the maximal value with non-zero count. * @throws OperationFailedException if the histogram has no values. */ public int calculateMaximum() throws OperationFailedException { checkAtLeastOneItemExists(); for (int bin = maxValue; bin >= minValue; bin--) { if (getCount(bin) > 0) { return bin; } } throw new AnchorImpossibleSituationException(); } /** * Calculates the <b>minimum value with non zero-count</b> among the histogram values. * * @return the minimal value with non-zero count. * @throws OperationFailedException if the histogram has no values. */ public int calculateMinimum() throws OperationFailedException { checkAtLeastOneItemExists(); for (int bin = minValue; bin <= maxValue; bin++) { if (getCount(bin) > 0) { return bin; } } throw new AnchorImpossibleSituationException(); } /** * Calculates the <b>sum of all values</b> in the distribution considering their counts. * * <p>Specifically, the sum is {@code value * countFor(value)} across all values. * * @return the sum. */ public long calculateSum() { return calculateSumHelper(value -> value); } /** * Calculates the <b>sum of the squares of all values</b> in the distribution considering their * counts. * * <p>Specifically, the sum is {@code value^2 * countFor(value)} across all values. * * @return the sum of squares. */ public long calculateSumSquares() { return calculateSumHelper(value -> value * value); } /** * Calculates the <b>cube of the squares of all values</b> in the distribution considering their * counts. * * <p>Specifically, the sum is {@code value^3 * countFor(value)} across all values. * * @return the sum of cubes. */ public long calculateSumCubes() { return calculateSumHelper(value -> value * value * value); } /** * Calculates the <b>standard-deviation</b> of the distribution represented by the histogram. * * @return the standard-deviation. * @throws OperationFailedException if the histogram has no values. */ public double standardDeviation() throws OperationFailedException { checkAtLeastOneItemExists(); return Math.sqrt(variance()); } /** * Calculates the <b>variance</b> of the distribution represented by the histogram. * * @return the variance. * @throws OperationFailedException if the histogram has no values. */ public double variance() throws OperationFailedException { checkAtLeastOneItemExists(); return new VarianceCalculatorLong(calculateSum(), calculateSumSquares(), getTotalCount()) .variance(); } /** * Gets the total count of all values that match a predicate. * * @param predicate the predicate a value must match to be included in the count. * @return the sum of the counts corresponding to all values that match the predicate. */ public long countMatching(IntPredicate predicate) { long sum = 0; for (int bin = minValue; bin <= maxValue; bin++) { if (predicate.test(bin)) { sum += getCountAsLong(bin); } } return sum; } /** * Generates a new histogram containing only values that match a predicate. * * <p>This is an <i>immutable operation</i>. The existing histogram's values are unchanged. * * @param predicate a condition that must hold on the value for it to be included in the created * histogram. * @return a newly created {@link Histogram} containing values and corresponding counts from * this object, but only if they fulfill the predicate. */ public Histogram threshold(DoublePredicate predicate) { Histogram out = new Histogram(maxValue); out.sumCount = 0; for (int bin = minValue; bin <= maxValue; bin++) { if (predicate.test(bin)) { int count = getCount(bin); out.set(bin, count); out.sumCount += count; } } return out; } /** A string representation of what's in the histogram. */ @Override public String toString() { return concatenateForEachBin(value -> String.format("%d: %d%n", value, getCount(value))); } /** * The total count across values in the histogram. * * <p>This is pre-calculated, so calling this operation occurs no computational expense. * * @return the total count. */ public long getTotalCount() { return sumCount; } /** * Creates a {@link Histogram} reusing the bins in the current histogram, but with an upper * limit on the total count. * * <p>If more total count exists than {@code maxCount}, values are removed in <b>ascending * order</b>, until the count is under the limit. * * @param maxCount the maximum allowable total-count for the extracted histogram. * @return a newly created {@link Histogram} either a copy of the existing (if the total count * is less than {@code maxCount} or cropped as per above rules. */ public Histogram cropRemoveSmallerValues(long maxCount) { Histogram out = new Histogram(maxValue); long remaining = maxCount; // We keep taking pixels from the histogram until we have reached our quota for (int bin = getMaxValue(); bin >= minValue; bin--) { int count = getCount(bin); // Skip if there's nothing there if (count != 0) { remaining = extractBin(out, bin, count, remaining); if (remaining == 0) { break; } } } return out; } /** * Like {@link #cropRemoveSmallerValues(long)} but larger values are removed rather than smaller * values if the total count is too high. * * @param maxCount the maximum allowable total-count for the extracted histogram. * @return a newly created {@link Histogram} either a copy of the existing (if the total count * is less than {@code maxCount} or cropped as per above rules. */ public Histogram cropRemoveLargerValues(long maxCount) { Histogram out = new Histogram(maxValue); long remaining = maxCount; // We keep taking pixels from the histogram until we have reached our quota for (int bin = minValue; bin <= getMaxValue(); bin++) { int count = getCount(bin); // Skip if there's nothing there if (count != 0) { remaining = extractBin(out, bin, count, remaining); if (remaining == 0) { break; } } } return out; } /** * Calculates the mean of the values in the distribution, if each value is raised to a power. * * <p>Specifically, it calculates the mean of {@code countFor(value) * value^power} across all * values. * * @param power the power to raise each value to. * @return the calculated mean. * @throws OperationFailedException if the histogram has no values. */ public double mean(double power) throws OperationFailedException { checkAtLeastOneItemExists(); return mean(power, 0.0); } /** * Like {@link #mean(double)} but a value may be subtracted before raising to a power. * * <p>Specifically, it calculates the mean of {@code countFor(value) * (value - * subtractValue)^power} across all values. * * @param power the power to raise each value to (after subtraction). * @param subtractValue a value subtracted before raising to a power. * @return the calculated mean. * @throws OperationFailedException if the histogram has no values. */ public double mean(double power, double subtractValue) throws OperationFailedException { checkAtLeastOneItemExists(); double sum = 0; for (int bin = minValue; bin <= maxValue; bin++) { double binSubtracted = (bin - subtractValue); sum += getCountAsLong(bin) * Math.pow(binSubtracted, power); } return sum / sumCount; } /** * Calls {@code consumer} for every value, <i>increasing</i> from min to max. * * @param consumer called for every bin. */ public void iterateValues(BinConsumer consumer) { for (int bin = minValue; bin <= maxValue; bin++) { consumer.accept(bin, getCount(bin)); } } /** * Calls {@code consumer} for every value until a limit, <i>increasing</i> from min to {@code * limit}. * * @param limit the maximum-value to consume (inclusive). * @param consumer called for every bin. */ public void iterateValuesUntil(int limit, BinConsumer consumer) { for (int bin = minValue; bin <= limit; bin++) { consumer.accept(bin, getCount(bin)); } } /** Calculates the sum of {@code function(value) * count)} across all values. */ private long calculateSumHelper(LongUnaryOperator function) { long sum = 0; for (int bin = minValue; bin <= maxValue; bin++) { long add = getCountAsLong(bin) * function.applyAsLong(bin); sum += add; } return sum; } // The index in the array the value is stored at private int index(int value) { return value - minValue; } /** Assigns a count for a particular value. */ private void set(int value, int countToAssign) { counts[index(value)] = countToAssign; } /** Increments the count for a particular value. */ private void incrementCount(int value, int incrementBy) { counts[index(value)] += incrementBy; } private long getCountAsLong(int value) { return getCount(value); } private void chopBefore(int index) { counts = Arrays.copyOfRange(counts, index, counts.length); } private void checkAtLeastOneItemExists() throws OperationFailedException { if (isEmpty()) { throw new OperationFailedException( "There are no items in the histogram so this operation cannot occur"); } } /** * Places a particular bin in a destination histogram. * * <p>Either the whole bin is transferred or only some of the bin so that {@code remaining >= * 0}. * * @param destination the destination histogram. * @param bin the bin-value. * @param countForBin the count. * @param remaining the count remaining that can still be transferred. * @return an updated value for remaining after subtracting the transferred count. */ private static long extractBin( Histogram destination, int bin, int countForBin, long remaining) { // If there's more or just enough remaining than we have, we transfer the entire bin if (remaining >= countForBin) { destination.incrementValueBy(bin, countForBin); return remaining - countForBin; } else { // Otherwise partially transfer the bin destination.incrementValueBy(bin, remaining); return 0; } } /** * Builds a string that is a concatenation of strings generated for each bin in the histogram */ private String concatenateForEachBin(IntFunction<String> stringForBin) { StringBuilder builder = new StringBuilder(); for (int bin = minValue; bin <= maxValue; bin++) { builder.append(stringForBin.apply(bin)); } return builder.toString(); } }
/**Copyright 2015 Research Studios Austria Forschungsgesellschaft mBH * * This file is part of easyrec. * * easyrec is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * easyrec is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with easyrec. If not, see <http://www.gnu.org/licenses/>. */ package org.easyrec.model.web; import javax.xml.bind.annotation.XmlRootElement; /** * * @author Stephan */ @XmlRootElement public class JSONParams { private String apikey; private String tenantid; private String itemid; private String itemtype = "ITEM"; private String itemdescription; private String itemurl; private String itemimageurl; private String profile; private String key; private String value; private String path; public JSONParams() { } public String getApikey() { return apikey; } public void setApikey(String apikey) { this.apikey = apikey; } public String getTenantid() { return tenantid; } public void setTenantid(String tenantid) { this.tenantid = tenantid; } public String getItemid() { return itemid; } public void setItemid(String itemid) { this.itemid = itemid; } public String getItemtype() { return itemtype; } public void setItemtype(String itemtype) { this.itemtype = itemtype; } public String getProfile() { return profile; } public void setProfile(String profile) { this.profile = profile; } public String getItemdescription() { return itemdescription; } public void setItemdescription(String itemdescription) { this.itemdescription = itemdescription; } public String getItemurl() { return itemurl; } public void setItemurl(String itemurl) { this.itemurl = itemurl; } public String getItemimageurl() { return itemimageurl; } public void setItemimageurl(String itemimageurl) { this.itemimageurl = itemimageurl; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } }
/* * Copyright 2022 Linus Andera * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package me.linusdev.lapi.api.objects.user; import me.linusdev.data.Datable; import me.linusdev.data.so.SOData; import me.linusdev.lapi.api.communication.cdn.image.CDNImage; import me.linusdev.lapi.api.communication.cdn.image.CDNImageRetriever; import me.linusdev.lapi.api.communication.cdn.image.ImageQuery; import me.linusdev.lapi.api.communication.file.types.AbstractFileType; import me.linusdev.lapi.api.objects.HasLApi; import me.linusdev.lapi.api.lapiandqueue.LApi; import me.linusdev.lapi.api.communication.exceptions.InvalidDataException; import me.linusdev.lapi.api.objects.snowflake.Snowflake; import me.linusdev.lapi.api.objects.snowflake.SnowflakeAble; import me.linusdev.lapi.api.objects.user.abstracts.BasicUserInformation; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * <a style="margin-bottom:0;padding-bottom:0;font-size:20px;font-weight:'bold';" href="https://discord.com/developers/docs/resources/user#users-resource" target="_top">User Resource</a><br> * <p> * Users in Discord are generally considered the base entity. Users can spawn across the entire platform, be members of guilds, participate in text and voice chat, and much more. Users are separated by a distinction of "bot" vs "normal." Although they are similar, bot users are automated users that are "owned" by another user. Unlike normal users, bot users do not have a limitation on the number of Guilds they can be a part of. * </p> * <br> * <a style="margin-bottom:0;padding-bottom:0;font-size:15px;font-weight:'bold';" href="https://discord.com/developers/docs/resources/user#usernames-and-nicknames" target="_top">Usernames and Nicknames</a><br> * <p> * Discord enforces the following restrictions for usernames and nicknames: * </p> * <ol> * <li> * Names can contain most valid unicode characters. We limit some zero-width and non-rendering characters. * </li> * <li> * Usernames must be between 2 and 32 characters long. * </li> * <li> * Nicknames must be between 1 and 32 characters long. * </li> * <li> * Names are sanitized and trimmed of leading, trailing, and excessive internal whitespace. * </li> * </ol> * <p> * The following restrictions are additionally enforced for usernames: * </p> * <ol> * <li> * Names cannot contain the following substrings: '@', '#', ':', '```'. * </li> * <li> * Names cannot be: 'discordtag', 'everyone', 'here'. * </li> * </ol> * * @see <a href="https://discord.com/developers/docs/resources/user#users-resource" target="_top">User Resource</a> */ public class User implements BasicUserInformation, SnowflakeAble, Datable, HasLApi { public static final String ID_KEY = "id"; public static final String USERNAME_KEY = "username"; public static final String DISCRIMINATOR_KEY = "discriminator"; public static final String AVATAR_KEY = "avatar"; public static final String BOT_KEY = "bot"; public static final String SYSTEM_KEY = "system"; public static final String MFA_ENABLED_KEY = "mfa_enabled"; public static final String BANNER_KEY = "banner"; public static final String ACCENT_COLOR_KEY = "accent_color"; public static final String LOCALE_KEY = "locale"; public static final String VERIFIED_KEY = "verified"; public static final String EMAIL_KEY = "email"; public static final String FLAGS_KEY = "flags"; public static final String PREMIUM_TYPE_KEY = "premium_type"; public static final String PUBLIC_FLAGS_KEY = "public_flags"; private final @NotNull Snowflake id; private final @NotNull String username; private final @NotNull String discriminator; private final @Nullable String avatarHash; private final @Nullable Boolean bot; private final @Nullable Boolean system; private final @Nullable Boolean mfaEnabled; private final @Nullable String bannerHash; private final @Nullable Integer accentColor; private final @Nullable String locale; private final @Nullable Boolean verified; private final @Nullable String email; private final @Nullable Integer flagsAsInt; private final @NotNull UserFlag[] flags; private final @Nullable PremiumType premiumType; private final @Nullable Integer publicFlagsAsInt; private final @NotNull UserFlag[] publicFlags; private final @NotNull LApi lApi; /** * * @param lApi {@link LApi} * @param id the user's id * @param username the user's username, not unique across the platform * @param discriminator the user's 4-digit discord-tag * @param avatarHash the user's avatar hash * @param bot whether the user belongs to an OAuth2 application * @param system whether the user is an Official Discord System user (part of the urgent message system) * @param mfaEnabled whether the user has two factor enabled on their account * @param bannerHash the user's banner hash * @param accentColor the user's banner color encoded as an integer representation of hexadecimal color code * @param locale the user's chosen language option * @param verified whether the email on this account has been verified * @param email the user's email * @param flagsAsInt the flags as int on a user's account * @param flags the flags on a user's account * @param premiumType the type of Nitro subscription on a user's account * @param publicFlagsAsInt the public flags on a user's account * @param publicFlags the public flags on a user's account */ public User(@NotNull LApi lApi, @NotNull Snowflake id, @NotNull String username, @NotNull String discriminator, @Nullable String avatarHash, @Nullable Boolean bot, @Nullable Boolean system, @Nullable Boolean mfaEnabled, @Nullable String bannerHash, @Nullable Integer accentColor, @Nullable String locale, @Nullable Boolean verified, @Nullable String email, @Nullable Integer flagsAsInt, @NotNull UserFlag[] flags, @Nullable PremiumType premiumType, @Nullable Integer publicFlagsAsInt, @NotNull UserFlag[] publicFlags) { this.lApi = lApi; this.id = id; this.username = username; this.discriminator = discriminator; this.avatarHash = avatarHash; this.bot = bot; this.system = system; this.mfaEnabled = mfaEnabled; this.bannerHash = bannerHash; this.accentColor = accentColor; this.locale = locale; this.verified = verified; this.email = email; this.flagsAsInt = flagsAsInt; this.flags = flags; this.premiumType = premiumType; this.publicFlagsAsInt = publicFlagsAsInt; this.publicFlags = publicFlags; } /** * * @param lApi {@link LApi} * @param data {@link SOData} with required fields * @return {@link User} * @throws InvalidDataException if {@link #ID_KEY}, {@link #USERNAME_KEY}, {@link #DISCRIMINATOR_KEY} are missing or null */ @Contract("_, null -> null; _, !null -> !null") public static @Nullable User fromData(@NotNull LApi lApi, @Nullable SOData data) throws InvalidDataException { if(data == null) return null; String id = (String) data.get(ID_KEY); String username = (String) data.get(USERNAME_KEY); String discriminator = (String) data.get(DISCRIMINATOR_KEY); String avatarHash = (String) data.get(AVATAR_KEY); Boolean bot = (Boolean) data.get(BOT_KEY); Boolean system = (Boolean) data.get(SYSTEM_KEY); Boolean mfaEnabled = (Boolean) data.get(MFA_ENABLED_KEY); String bannerHash = (String) data.get(BANNER_KEY); Number accentColor = (Number) data.get(ACCENT_COLOR_KEY); String locale = (String) data.get(LOCALE_KEY); Boolean verified = (Boolean) data.get(VERIFIED_KEY); String email = (String) data.get(EMAIL_KEY); Number flags = (Number) data.get(FLAGS_KEY); Number premiumType = (Number) data.get(PREMIUM_TYPE_KEY); Number publicFlags = (Number) data.get(PUBLIC_FLAGS_KEY); if(id == null || username == null || discriminator == null){ InvalidDataException.throwException(data, null, User.class, new Object[]{id, username, discriminator}, new String[]{ID_KEY, USERNAME_KEY, DISCRIMINATOR_KEY}); return null; //this will never happen, because above method will throw an exception } return new User(lApi, Snowflake.fromString(id), username, discriminator, avatarHash, bot, system, mfaEnabled, bannerHash, accentColor == null ? null : accentColor.intValue(), locale, verified, email, flags == null ? null : flags.intValue(), UserFlag.getFlagsFromInt(flags == null ? null : flags.intValue()), premiumType == null ? null : PremiumType.fromValue(premiumType.intValue()), publicFlags == null ? null : publicFlags.intValue(), UserFlag.getFlagsFromInt(publicFlags == null ? null : publicFlags.intValue())); } /** * the user's username, not unique across the platform */ @Override public @NotNull String getUsername() { return username; } /** * the user's 4-digit discord-tag */ @Override public @NotNull String getDiscriminator() { return discriminator; } /** * the user's id as {@link Snowflake} * @see #getId() */ @Override public @NotNull Snowflake getIdAsSnowflake() { return id; } /** * the user's avatar hash */ @Override public @Nullable String getAvatarHash() { return avatarHash; } /** * whether the user belongs to an OAuth2 application */ public @Nullable Boolean getBot() { return bot; } /** * whether the user belongs to an OAuth2 application * @return {@code false} if {@link #getBot()} is {@code null}, otherwise {@link #getBot()} */ public boolean isBot(){ return !(bot == null) && bot; } /** * whether the user is an Official Discord System user (part of the urgent message system) */ public @Nullable Boolean getSystem() { return system; } /** * whether the user is an Official Discord System user (part of the urgent message system) * @return {@code false} if {@link #getSystem()} is {@code null}, otherwise {@link #getSystem()} */ public boolean isSystem(){ return !(system == null) && system; } /** * whether the user has two factor enabled on their account */ public @Nullable Boolean getMfaEnabled() { return mfaEnabled; } /** * the user's banner hash */ public @Nullable String getBannerHash() { return bannerHash; } /** * * @param desiredSize the desired file size, a power of 2 between {@value ImageQuery#SIZE_QUERY_PARAM_MIN} and {@value ImageQuery#SIZE_QUERY_PARAM_MAX} * @param fileType see {@link CDNImage#ofUserBanner(LApi, String, String, AbstractFileType) restrictions} and {@link me.linusdev.lapi.api.communication.file.types.FileType FileType} * @return {@link me.linusdev.lapi.api.lapiandqueue.Queueable Queueable} to retrieve the banner */ public @NotNull CDNImageRetriever getBanner(int desiredSize, @NotNull AbstractFileType fileType){ if(getBannerHash() == null) throw new IllegalArgumentException("This user object has no banner hash"); return new CDNImageRetriever(CDNImage.ofUserBanner(lApi, getId(), getBannerHash(), fileType), desiredSize, true); } /** * * @param fileType see {@link CDNImage#ofUserBanner(LApi, String, String, AbstractFileType) restrictions} and {@link me.linusdev.lapi.api.communication.file.types.FileType FileType} * @return {@link me.linusdev.lapi.api.lapiandqueue.Queueable Queueable} to retrieve the banner */ public @NotNull CDNImageRetriever getBanner(@NotNull AbstractFileType fileType){ if(getBannerHash() == null) throw new IllegalArgumentException("This user object has no banner hash"); return new CDNImageRetriever(CDNImage.ofUserBanner(lApi, getId(), getBannerHash(), fileType)); } /** * the user's banner color encoded as an integer representation of hexadecimal color code */ public @Nullable Integer getAccentColor() { return accentColor; } /** * the user's chosen language option */ public @Nullable String getLocale() { return locale; } /** * whether the email on this account has been verified */ public @Nullable Boolean getVerified() { return verified; } /** * the user's email */ public @Nullable String getEmail() { return email; } /** * the flags on a user's account */ public @Nullable Integer getFlagsAsInt() { return flagsAsInt; } /** * the flags on a user's account as {@link UserFlag UserFlag[]} */ public @NotNull UserFlag[] getFlags() { return flags; } /** * the type of Nitro subscription on a user's account * @see PremiumType */ public @Nullable PremiumType getPremiumType() { return premiumType; } /** * the public flags on a user's account */ public @Nullable Integer getPublicFlagsAsInt() { return publicFlagsAsInt; } /** * the public flags on a user's account as {@link UserFlag UserFlag[]} */ public @NotNull UserFlag[] getPublicFlags() { return publicFlags; } /** * * @return {@link SOData} for this {@link User} */ @Override public @NotNull SOData getData() { SOData data = SOData.newOrderedDataWithKnownSize(15); data.add(ID_KEY, id); data.add(USERNAME_KEY, username); data.add(DISCRIMINATOR_KEY, discriminator); data.add(AVATAR_KEY, avatarHash); if(bot != null) data.add(BOT_KEY, bot); if(system != null) data.add(SYSTEM_KEY, system); if(mfaEnabled != null) data.add(MFA_ENABLED_KEY, mfaEnabled); if(bannerHash != null) data.add(BANNER_KEY, bannerHash); if(accentColor != null) data.add(ACCENT_COLOR_KEY, accentColor); if(locale != null) data.add(LOCALE_KEY, locale); if(verified != null) data.add(VERIFIED_KEY, verified); if(email != null) data.add(EMAIL_KEY, email); if(flagsAsInt != null) data.add(FLAGS_KEY, flagsAsInt); if(premiumType != null) data.add(PREMIUM_TYPE_KEY, premiumType); if(publicFlagsAsInt != null) data.add(PUBLIC_FLAGS_KEY, publicFlagsAsInt); return data; } @Override public @NotNull LApi getLApi() { return lApi; } }