repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
Gaduo/hapi-fhir
hapi-fhir-structures-dstu2.1/src/test/java/ca/uhn/fhir/rest/server/UpdateDstu2_1Test.java
7116
package ca.uhn.fhir.rest.server; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import java.nio.charset.StandardCharsets; import java.util.concurrent.TimeUnit; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.hl7.fhir.dstu2016may.model.IdType; import org.hl7.fhir.dstu2016may.model.OperationOutcome; import org.hl7.fhir.dstu2016may.model.Patient; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.annotation.ConditionalUrlParam; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.util.PortUtil; import ca.uhn.fhir.util.TestUtil; public class UpdateDstu2_1Test { private static CloseableHttpClient ourClient; private static String ourConditionalUrl; private static FhirContext ourCtx = FhirContext.forDstu2_1(); private static IdType ourId; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UpdateDstu2_1Test.class); private static int ourPort; private static Server ourServer; @Before public void before() { ourConditionalUrl = null; ourId = null; } @Test public void testUpdateConditional() throws Exception { Patient patient = new Patient(); patient.setId("001"); patient.addIdentifier().setValue("002"); HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient?_id=001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); CloseableHttpResponse status = ourClient.execute(httpPost); try { String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8); ourLog.info("Response was:\n{}", responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals("Patient?_id=001",ourConditionalUrl); assertEquals(null, ourId); } finally { IOUtils.closeQuietly(status.getEntity().getContent()); } } @Test public void testUpdateMissingIdInBody() throws Exception { Patient patient = new Patient(); patient.addIdentifier().setValue("002"); HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info("Response was:\n{}", responseContent); assertEquals(400, status.getStatusLine().getStatusCode()); OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, responseContent); assertEquals("Can not update resource, resource body must contain an ID element for update (PUT) operation", oo.getIssue().get(0).getDiagnostics()); } @Test public void testUpdateNormal() throws Exception { Patient patient = new Patient(); patient.setId("001"); patient.addIdentifier().setValue("002"); HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); CloseableHttpResponse status = ourClient.execute(httpPost); try { String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8); ourLog.info("Response was:\n{}", responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); assertNull(ourConditionalUrl); assertEquals("Patient/001", ourId.getValue()); } finally { IOUtils.closeQuietly(status.getEntity().getContent()); } } @Test public void testUpdateWrongIdInBody() throws Exception { Patient patient = new Patient(); patient.setId("Patient/3/_history/4"); patient.addIdentifier().setValue("002"); HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/1/_history/2"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info("Response was:\n{}", responseContent); assertEquals(400, status.getStatusLine().getStatusCode()); assertThat(responseContent, containsString("Resource body ID of &quot;3&quot; does not match")); } @AfterClass public static void afterClassClearContext() throws Exception { ourServer.stop(); TestUtil.clearAllStaticFieldsForUnitTest(); } @BeforeClass public static void beforeClass() throws Exception { ourPort = PortUtil.findFreePort(); ourServer = new Server(ourPort); ServletHandler proxyHandler = new ServletHandler(); RestfulServer servlet = new RestfulServer(ourCtx); servlet.setResourceProviders(new PatientProvider()); ServletHolder servletHolder = new ServletHolder(servlet); proxyHandler.addServletWithMapping(servletHolder, "/*"); ourServer.setHandler(proxyHandler); ourServer.start(); PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); HttpClientBuilder builder = HttpClientBuilder.create(); builder.setConnectionManager(connectionManager); ourClient = builder.build(); } public static class PatientProvider implements IResourceProvider { @Override public Class<Patient> getResourceType() { return Patient.class; } @Update() public MethodOutcome updatePatient(@IdParam IdType theId, @ResourceParam Patient thePatient, @ConditionalUrlParam String theConditionalUrl) { ourId = theId; ourConditionalUrl = theConditionalUrl; IdType id = theId != null ? theId.withVersion(thePatient.getIdentifier().get(0).getValue()) : new IdType("Patient/1"); OperationOutcome oo = new OperationOutcome(); oo.addIssue().setDiagnostics("OODETAILS"); if (id.getValueAsString().contains("CREATE")) { return new MethodOutcome(id, oo, true); } return new MethodOutcome(id, oo); } } }
apache-2.0
ThiagoGarciaAlves/intellij-community
platform/lang-impl/src/com/intellij/codeInsight/generation/CommentByLineCommentHandler.java
33028
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.generation; import com.intellij.codeInsight.CommentUtil; import com.intellij.codeInsight.actions.MultiCaretCodeInsightActionHandler; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.ide.highlighter.custom.SyntaxTable; import com.intellij.injected.editor.EditorWindow; import com.intellij.psi.impl.source.tree.injected.InjectedCaret; import com.intellij.lang.Commenter; import com.intellij.lang.Language; import com.intellij.lang.LanguageCommenters; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.actionSystem.ex.ActionManagerEx; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.impl.AbstractFileType; import com.intellij.openapi.fileTypes.impl.CustomSyntaxTableFileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.*; import com.intellij.psi.util.PsiUtilBase; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.DocumentUtil; import com.intellij.util.containers.IntArrayList; import com.intellij.util.text.CharArrayUtil; import gnu.trove.THashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; public class CommentByLineCommentHandler extends MultiCaretCodeInsightActionHandler { private Project myProject; private CodeStyleManager myCodeStyleManager; private final List<Block> myBlocks = new ArrayList<>(); @Override // first pass - adjacent carets are grouped into blocks public void invoke(@NotNull Project project, @NotNull Editor editor, @NotNull Caret caret, @NotNull PsiFile file) { myProject = project; file = file.getViewProvider().getPsi(file.getViewProvider().getBaseLanguage()); PsiElement context = InjectedLanguageManager.getInstance(file.getProject()).getInjectionHost(file); if (context != null && (context.textContains('\'') || context.textContains('\"') || context.textContains('/'))) { String s = context.getText(); if (StringUtil.startsWith(s, "\"") || StringUtil.startsWith(s, "\'") || StringUtil.startsWith(s, "/")) { file = context.getContainingFile(); editor = editor instanceof EditorWindow ? ((EditorWindow)editor).getDelegate() : editor; caret = caret instanceof InjectedCaret ? ((InjectedCaret)caret).getDelegate() : caret; } } Document document = editor.getDocument(); boolean hasSelection = caret.hasSelection(); int startOffset = caret.getSelectionStart(); int endOffset = caret.getSelectionEnd(); FoldRegion fold = editor.getFoldingModel().getCollapsedRegionAtOffset(startOffset); if (fold != null && fold.shouldNeverExpand() && fold.getStartOffset() == startOffset && fold.getEndOffset() == endOffset) { // Foldings that never expand are automatically selected, so the fact it is selected must not interfere with commenter's logic hasSelection = false; } if (document.getTextLength() == 0) return; while (true) { int firstLineStart = DocumentUtil.getLineStartOffset(startOffset, document); FoldRegion collapsedAt = editor.getFoldingModel().getCollapsedRegionAtOffset(firstLineStart - 1); if (collapsedAt == null) break; int regionStartOffset = collapsedAt.getStartOffset(); if (regionStartOffset >= startOffset) break; startOffset = regionStartOffset; } while (true) { int lastLineEnd = DocumentUtil.getLineEndOffset(endOffset, document); FoldRegion collapsedAt = editor.getFoldingModel().getCollapsedRegionAtOffset(lastLineEnd); if (collapsedAt == null) break; int regionEndOffset = collapsedAt.getEndOffset(); if (regionEndOffset <= endOffset) break; endOffset = regionEndOffset; } int startLine = document.getLineNumber(startOffset); int endLine = document.getLineNumber(endOffset); if (endLine > startLine && document.getLineStartOffset(endLine) == endOffset) { endLine--; } Block lastBlock = myBlocks.isEmpty() ? null : myBlocks.get(myBlocks.size() - 1); Block currentBlock; if (lastBlock == null || lastBlock.editor != editor || lastBlock.psiFile != file || startLine > (lastBlock.endLine + 1)) { currentBlock = new Block(); currentBlock.editor = editor; currentBlock.psiFile = file; currentBlock.startLine = startLine; myBlocks.add(currentBlock); } else { currentBlock = lastBlock; } currentBlock.carets.add(caret); currentBlock.endLine = endLine; boolean wholeLinesSelected = !hasSelection || startOffset == document.getLineStartOffset(document.getLineNumber(startOffset)) && endOffset == document.getLineEndOffset(document.getLineNumber(endOffset - 1)) + 1; boolean startingNewLineComment = !hasSelection && isLineEmpty(document, document.getLineNumber(startOffset)) && !Comparing.equal(IdeActions.ACTION_COMMENT_LINE, ActionManagerEx.getInstanceEx().getPrevPreformedActionId()); currentBlock.caretUpdate = startingNewLineComment ? CaretUpdate.PUT_AT_COMMENT_START : !hasSelection ? CaretUpdate.SHIFT_DOWN : wholeLinesSelected ? CaretUpdate.RESTORE_SELECTION : null; } @Override public void postInvoke() { FeatureUsageTracker.getInstance().triggerFeatureUsed("codeassists.comment.line"); myCodeStyleManager = CodeStyleManager.getInstance(myProject); CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getSettings(myProject); // second pass - determining whether we need to comment or to uncomment boolean allLinesCommented = true; for (Block block : myBlocks) { int startLine = block.startLine; int endLine = block.endLine; Document document = block.editor.getDocument(); PsiFile psiFile = block.psiFile; block.startOffsets = new int[endLine - startLine + 1]; block.endOffsets = new int[endLine - startLine + 1]; block.commenters = new Commenter[endLine - startLine + 1]; block.commenterStateMap = new THashMap<>(); CharSequence chars = document.getCharsSequence(); boolean singleline = startLine == endLine; int offset = document.getLineStartOffset(startLine); offset = CharArrayUtil.shiftForward(chars, offset, " \t"); int endOffset = CharArrayUtil.shiftBackward(chars, document.getLineEndOffset(endLine), " \t\n"); block.blockSuitableCommenter = getBlockSuitableCommenter(psiFile, offset, endOffset); Language lineStartLanguage = getLineStartLanguage(block.editor, psiFile, startLine); CommonCodeStyleSettings languageSettings = codeStyleSettings.getCommonSettings(lineStartLanguage); block.commentWithIndent = !languageSettings.LINE_COMMENT_AT_FIRST_COLUMN; block.addSpace = languageSettings.LINE_COMMENT_ADD_SPACE; for (int line = startLine; line <= endLine; line++) { Commenter commenter = block.blockSuitableCommenter != null ? block.blockSuitableCommenter : findCommenter(block.editor, psiFile, line); if (commenter == null || commenter.getLineCommentPrefix() == null && (commenter.getBlockCommentPrefix() == null || commenter.getBlockCommentSuffix() == null)) { block.skip = true; break; } if (commenter instanceof SelfManagingCommenter && block.commenterStateMap.get(commenter) == null) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; CommenterDataHolder state = selfManagingCommenter.createLineCommentingState(startLine, endLine, document, psiFile); if (state == null) state = SelfManagingCommenter.EMPTY_STATE; block.commenterStateMap.put(selfManagingCommenter, state); } block.commenters[line - startLine] = commenter; if (!isLineCommented(block, line, commenter) && (singleline || !isLineEmpty(document, line))) { allLinesCommented = false; if (commenter instanceof IndentedCommenter) { final Boolean value = ((IndentedCommenter)commenter).forceIndentedLineComment(); if (value != null) { block.commentWithIndent = value; } } break; } } } boolean moveCarets = true; for (Block block : myBlocks) { if (block.carets.size() > 1 && block.startLine != block.endLine) { moveCarets = false; break; } } // third pass - actual change Collections.reverse(myBlocks); for (Block block : myBlocks) { if (!block.skip) { if (!allLinesCommented) { if (!block.commentWithIndent) { doDefaultCommenting(block); } else { doIndentCommenting(block); } } else { for (int line = block.endLine; line >= block.startLine; line--) { uncommentLine(block, line, block.addSpace); } } } if (!moveCarets || block.caretUpdate == null) { continue; } Document document = block.editor.getDocument(); for (Caret caret : block.carets) { switch (block.caretUpdate) { case PUT_AT_COMMENT_START: final Commenter commenter = block.commenters[0]; if (commenter != null) { String prefix; if (commenter instanceof SelfManagingCommenter) { prefix = ((SelfManagingCommenter)commenter).getCommentPrefix(block.startLine, document, block.commenterStateMap.get((SelfManagingCommenter)commenter)); if (prefix == null) prefix = ""; // TODO } else { prefix = commenter.getLineCommentPrefix(); if (prefix == null) prefix = commenter.getBlockCommentPrefix(); } int lineStart = document.getLineStartOffset(block.startLine); lineStart = CharArrayUtil.shiftForward(document.getCharsSequence(), lineStart, " \t"); lineStart += prefix.length(); lineStart = CharArrayUtil.shiftForward(document.getCharsSequence(), lineStart, " \t"); if (lineStart > document.getTextLength()) lineStart = document.getTextLength(); caret.moveToOffset(lineStart); } break; case SHIFT_DOWN: // Don't tweak caret position if we're already located on the last document line. LogicalPosition position = caret.getLogicalPosition(); if (position.line < document.getLineCount() - 1) { int verticalShift = 1 + block.editor.getSoftWrapModel().getSoftWrapsForLine(position.line).size() - EditorUtil.getSoftWrapCountAfterLineStart(block.editor, position); caret.moveCaretRelatively(0, verticalShift, false, true); } break; case RESTORE_SELECTION: caret.setSelection(document.getLineStartOffset(document.getLineNumber(caret.getSelectionStart())), caret.getSelectionEnd()); } } } } private static Commenter getBlockSuitableCommenter(final PsiFile file, int offset, int endOffset) { final Language languageSuitableForCompleteFragment; if (offset >= endOffset) { // we are on empty line PsiElement element = file.findElementAt(offset); if (element != null) languageSuitableForCompleteFragment = element.getParent().getLanguage(); else languageSuitableForCompleteFragment = null; } else { languageSuitableForCompleteFragment = PsiUtilBase.reallyEvaluateLanguageInRange(offset, endOffset, file); } Commenter blockSuitableCommenter = languageSuitableForCompleteFragment == null ? LanguageCommenters.INSTANCE.forLanguage(file.getLanguage()) : null; if (blockSuitableCommenter == null && file.getFileType() instanceof CustomSyntaxTableFileType) { blockSuitableCommenter = new Commenter() { final SyntaxTable mySyntaxTable = ((CustomSyntaxTableFileType)file.getFileType()).getSyntaxTable(); @Override @Nullable public String getLineCommentPrefix() { return mySyntaxTable.getLineComment(); } @Override @Nullable public String getBlockCommentPrefix() { return mySyntaxTable.getStartComment(); } @Override @Nullable public String getBlockCommentSuffix() { return mySyntaxTable.getEndComment(); } @Override public String getCommentedBlockCommentPrefix() { return null; } @Override public String getCommentedBlockCommentSuffix() { return null; } }; } return blockSuitableCommenter; } private static boolean isLineEmpty(Document document, final int line) { final CharSequence chars = document.getCharsSequence(); int start = document.getLineStartOffset(line); int end = Math.min(document.getLineEndOffset(line), document.getTextLength() - 1); for (int i = start; i <= end; i++) { if (!Character.isWhitespace(chars.charAt(i))) return false; } return true; } private static boolean isLineCommented(Block block, final int line, final Commenter commenter) { boolean commented; int lineEndForBlockCommenting = -1; Document document = block.editor.getDocument(); int lineStart = document.getLineStartOffset(line); CharSequence chars = document.getCharsSequence(); lineStart = CharArrayUtil.shiftForward(chars, lineStart, " \t"); if (commenter instanceof SelfManagingCommenter) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; commented = selfManagingCommenter.isLineCommented(line, lineStart, document, block.commenterStateMap.get(selfManagingCommenter)); } else { String prefix = commenter.getLineCommentPrefix(); if (prefix != null) { commented = CharArrayUtil.regionMatches(chars, lineStart, StringUtil.trimTrailing(prefix)); } else { prefix = commenter.getBlockCommentPrefix(); String suffix = commenter.getBlockCommentSuffix(); final int textLength = document.getTextLength(); lineEndForBlockCommenting = document.getLineEndOffset(line); if (lineEndForBlockCommenting == textLength) { final int shifted = CharArrayUtil.shiftBackward(chars, textLength - 1, " \t"); if (shifted < textLength - 1) lineEndForBlockCommenting = shifted; } else { lineEndForBlockCommenting = CharArrayUtil.shiftBackward(chars, lineEndForBlockCommenting, " \t"); } commented = lineStart == lineEndForBlockCommenting && block.startLine != block.endLine || CharArrayUtil.regionMatches(chars, lineStart, prefix) && CharArrayUtil.regionMatches(chars, lineEndForBlockCommenting - suffix.length(), suffix); } } if (commented) { block.startOffsets[line - block.startLine] = lineStart; block.endOffsets[line - block.startLine] = lineEndForBlockCommenting; } return commented; } @Nullable private static Commenter findCommenter(@NotNull Editor editor, @NotNull PsiFile file, final int line) { final FileType fileType = file.getFileType(); if (fileType instanceof AbstractFileType) { return ((AbstractFileType)fileType).getCommenter(); } final Language lineStartLanguage = getLineStartLanguage(editor, file, line); final Language lineEndLanguage = getLineEndLanguage(file, editor, line); return CommentByBlockCommentHandler.getCommenter(file, editor, lineStartLanguage, lineEndLanguage); } @NotNull private static Language getLineStartLanguage(@NotNull Editor editor, @NotNull PsiFile file, int line) { Document document = editor.getDocument(); int lineStartOffset = document.getLineStartOffset(line); lineStartOffset = Math.max(0, CharArrayUtil.shiftForward(document.getCharsSequence(), lineStartOffset, " \t")); return PsiUtilCore.getLanguageAtOffset(file, lineStartOffset); } @NotNull private static Language getLineEndLanguage(@NotNull PsiFile file, @NotNull Editor editor, int line) { Document document = editor.getDocument(); int lineEndOffset = document.getLineEndOffset(line) - 1; lineEndOffset = Math.max(0, CharArrayUtil.shiftBackward(document.getCharsSequence(), lineEndOffset < 0 ? 0 : lineEndOffset, " \t")); return PsiUtilCore.getLanguageAtOffset(file, lineEndOffset); } private Indent computeMinIndent(Editor editor, PsiFile psiFile, int line1, int line2, FileType fileType) { Document document = editor.getDocument(); Indent minIndent = CommentUtil.getMinLineIndent(myProject, document, line1, line2, fileType); if (line1 > 0) { int commentOffset = getCommentStart(editor, psiFile, line1 - 1); if (commentOffset >= 0) { int lineStart = document.getLineStartOffset(line1 - 1); String space = document.getCharsSequence().subSequence(lineStart, commentOffset).toString(); Indent indent = myCodeStyleManager.getIndent(space, fileType); minIndent = minIndent != null ? indent.min(minIndent) : indent; } } if (minIndent == null) { minIndent = myCodeStyleManager.zeroIndent(); } return minIndent; } private static int getCommentStart(Editor editor, PsiFile psiFile, int line) { int offset = editor.getDocument().getLineStartOffset(line); CharSequence chars = editor.getDocument().getCharsSequence(); offset = CharArrayUtil.shiftForward(chars, offset, " \t"); final Commenter commenter = findCommenter(editor, psiFile, line); if (commenter == null) return -1; String prefix = commenter.getLineCommentPrefix(); if (prefix == null) prefix = commenter.getBlockCommentPrefix(); if (prefix == null) return -1; return CharArrayUtil.regionMatches(chars, offset, prefix) ? offset : -1; } public void doDefaultCommenting(final Block block) { final Document document = block.editor.getDocument(); DocumentUtil.executeInBulk( document, block.endLine - block.startLine >= Registry.intValue("comment.by.line.bulk.lines.trigger"), () -> { for (int line = block.endLine; line >= block.startLine; line--) { int offset = document.getLineStartOffset(line); commentLine(block, line, offset); } }); } private void doIndentCommenting(final Block block) { final Document document = block.editor.getDocument(); final CharSequence chars = document.getCharsSequence(); final FileType fileType = block.psiFile.getFileType(); final Indent minIndent = computeMinIndent(block.editor, block.psiFile, block.startLine, block.endLine, fileType); DocumentUtil.executeInBulk( document, block.endLine - block.startLine > Registry.intValue("comment.by.line.bulk.lines.trigger"), () -> { for (int line = block.endLine; line >= block.startLine; line--) { int lineStart = document.getLineStartOffset(line); int offset = lineStart; final StringBuilder buffer = new StringBuilder(); while (true) { String space = buffer.toString(); Indent indent = myCodeStyleManager.getIndent(space, fileType); if (indent.isGreaterThan(minIndent) || indent.equals(minIndent)) break; char c = chars.charAt(offset); if (c != ' ' && c != '\t') { String newSpace = myCodeStyleManager.fillIndent(minIndent, fileType); document.replaceString(lineStart, offset, newSpace); offset = lineStart + newSpace.length(); break; } buffer.append(c); offset++; } commentLine(block, line, offset); } }); } private static void uncommentRange(Document document, int startOffset, int endOffset, @NotNull Commenter commenter) { final String commentedSuffix = commenter.getCommentedBlockCommentSuffix(); final String commentedPrefix = commenter.getCommentedBlockCommentPrefix(); final String prefix = commenter.getBlockCommentPrefix(); final String suffix = commenter.getBlockCommentSuffix(); if (prefix == null || suffix == null) { return; } if (endOffset >= suffix.length() && CharArrayUtil.regionMatches(document.getCharsSequence(), endOffset - suffix.length(), suffix)) { document.deleteString(endOffset - suffix.length(), endOffset); endOffset -= suffix.length(); } if (commentedPrefix != null && commentedSuffix != null) { CommentByBlockCommentHandler.commentNestedComments(document, new TextRange(startOffset, endOffset), commenter); } document.deleteString(startOffset, startOffset + prefix.length()); } private static void uncommentLine(Block block, int line, boolean removeSpace) { Document document = block.editor.getDocument(); Commenter commenter = block.commenters[line - block.startLine]; if (commenter == null) commenter = findCommenter(block.editor, block.psiFile, line); if (commenter == null) return; final int startOffset = block.startOffsets[line - block.startLine]; final int endOffset = block.endOffsets[line - block.startLine]; if (startOffset == endOffset) { return; } if (commenter instanceof SelfManagingCommenter) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; selfManagingCommenter.uncommentLine(line, startOffset, document, block.commenterStateMap.get(selfManagingCommenter)); return; } RangeMarker marker = endOffset > startOffset ? block.editor.getDocument().createRangeMarker(startOffset, endOffset) : null; try { if (doUncommentLine(line, document, commenter, startOffset, endOffset, removeSpace)) return; if (marker != null) { CommentByBlockCommentHandler.processDocument(document, marker, commenter, false); } } finally { if (marker != null) { marker.dispose(); } } } private static boolean doUncommentLine(int line, Document document, Commenter commenter, int startOffset, int endOffset, boolean removeSpace) { String prefix = commenter.getLineCommentPrefix(); if (prefix != null) { if (removeSpace) prefix += ' '; CharSequence chars = document.getCharsSequence(); if (commenter instanceof CommenterWithLineSuffix) { CommenterWithLineSuffix commenterWithLineSuffix = (CommenterWithLineSuffix)commenter; String suffix = commenterWithLineSuffix.getLineCommentSuffix(); int theEnd = endOffset > 0 ? endOffset : document.getLineEndOffset(line); while (theEnd > startOffset && Character.isWhitespace(chars.charAt(theEnd - 1))) { theEnd--; } String lineText = document.getText(new TextRange(startOffset, theEnd)); if (lineText.indexOf(suffix) != -1) { int start = startOffset + lineText.indexOf(suffix); document.deleteString(start, start + suffix.length()); } } boolean matchesTrimmed = false; boolean commented = CharArrayUtil.regionMatches(chars, startOffset, prefix) || (matchesTrimmed = prefix.endsWith(" ") && CharArrayUtil.regionMatches(chars, startOffset, prefix.trim())); assert commented; int charsToDelete = matchesTrimmed ? prefix.trim().length() : prefix.length(); document.deleteString(startOffset, startOffset + charsToDelete); // delete whitespace on line if that's all that left after uncommenting int lineStartOffset = document.getLineStartOffset(line); int lineEndOffset = document.getLineEndOffset(line); if (CharArrayUtil.isEmptyOrSpaces(chars, lineStartOffset, lineEndOffset)) document.deleteString(lineStartOffset, lineEndOffset); return true; } String text = document.getCharsSequence().subSequence(startOffset, endOffset).toString(); prefix = commenter.getBlockCommentPrefix(); final String suffix = commenter.getBlockCommentSuffix(); if (prefix == null || suffix == null) { return true; } IntArrayList prefixes = new IntArrayList(); IntArrayList suffixes = new IntArrayList(); for (int position = 0; position < text.length(); ) { int prefixPos = text.indexOf(prefix, position); if (prefixPos == -1) { break; } prefixes.add(prefixPos); position = prefixPos + prefix.length(); int suffixPos = text.indexOf(suffix, position); if (suffixPos == -1) { suffixPos = text.length() - suffix.length(); } suffixes.add(suffixPos); position = suffixPos + suffix.length(); } assert prefixes.size() == suffixes.size(); for (int i = prefixes.size() - 1; i >= 0; i--) { uncommentRange(document, startOffset + prefixes.get(i), Math.min(startOffset + suffixes.get(i) + suffix.length(), endOffset), commenter); } return false; } private static void commentLine(Block block, int line, int offset) { Commenter commenter = block.blockSuitableCommenter; Document document = block.editor.getDocument(); if (commenter == null) commenter = findCommenter(block.editor, block.psiFile, line); if (commenter == null) return; if (commenter instanceof SelfManagingCommenter) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; selfManagingCommenter.commentLine(line, offset, document, block.commenterStateMap.get(selfManagingCommenter)); return; } int endOffset = document.getLineEndOffset(line); RangeMarker marker = document.createRangeMarker(offset, endOffset); marker.setGreedyToLeft(true); marker.setGreedyToRight(true); try { if (doCommentLine(block, line, offset, endOffset, commenter, document)) return; CommentByBlockCommentHandler.processDocument(document, marker, commenter, true); } finally { marker.dispose(); } } private static boolean doCommentLine(Block block, int line, int offset, int endOffset, Commenter commenter, Document document) { String prefix = commenter.getLineCommentPrefix(); int shiftedStartOffset = CharArrayUtil.shiftForward(document.getCharsSequence(), offset, " \t"); if (prefix != null) { if (commenter instanceof CommenterWithLineSuffix) { endOffset = CharArrayUtil.shiftBackward(document.getCharsSequence(), endOffset, " \t"); String lineSuffix = ((CommenterWithLineSuffix)commenter).getLineCommentSuffix(); if (!CharArrayUtil.regionMatches(document.getCharsSequence(), shiftedStartOffset, prefix)) { if (!CharArrayUtil.regionMatches(document.getCharsSequence(), endOffset - lineSuffix.length(), lineSuffix)) { document.insertString(endOffset, lineSuffix); } document.insertString(offset, prefix); } } else { if (block.addSpace && shiftedStartOffset < document.getTextLength() && document.getCharsSequence().charAt(shiftedStartOffset) != '\n') { prefix += ' '; } document.insertString(offset, prefix); } } else { prefix = commenter.getBlockCommentPrefix(); String suffix = commenter.getBlockCommentSuffix(); if (prefix == null || suffix == null) return true; if (endOffset == offset && block.startLine != block.endLine) return true; final int textLength = document.getTextLength(); final CharSequence chars = document.getCharsSequence(); offset = CharArrayUtil.shiftForward(chars, offset, " \t"); if (endOffset == textLength) { final int shifted = CharArrayUtil.shiftBackward(chars, textLength - 1, " \t") + 1; if (shifted < textLength) endOffset = shifted; } else { endOffset = CharArrayUtil.shiftBackward(chars, endOffset, " \t"); } if (endOffset < offset || offset == textLength - 1 && line != document.getLineCount() - 1) { return true; } final String text = chars.subSequence(offset, endOffset).toString(); final IntArrayList prefixes = new IntArrayList(); final IntArrayList suffixes = new IntArrayList(); final String commentedSuffix = commenter.getCommentedBlockCommentSuffix(); final String commentedPrefix = commenter.getCommentedBlockCommentPrefix(); for (int position = 0; position < text.length(); ) { int nearestPrefix = text.indexOf(prefix, position); if (nearestPrefix == -1) { nearestPrefix = text.length(); } int nearestSuffix = text.indexOf(suffix, position); if (nearestSuffix == -1) { nearestSuffix = text.length(); } if (Math.min(nearestPrefix, nearestSuffix) == text.length()) { break; } if (nearestPrefix < nearestSuffix) { prefixes.add(nearestPrefix); position = nearestPrefix + prefix.length(); } else { suffixes.add(nearestSuffix); position = nearestSuffix + suffix.length(); } } if (!(commentedSuffix == null && !suffixes.isEmpty() && offset + suffixes.get(suffixes.size() - 1) + suffix.length() >= endOffset)) { document.insertString(endOffset, suffix); } int nearestPrefix = prefixes.size() - 1; int nearestSuffix = suffixes.size() - 1; while (nearestPrefix >= 0 || nearestSuffix >= 0) { if (nearestSuffix == -1 || nearestPrefix != -1 && prefixes.get(nearestPrefix) > suffixes.get(nearestSuffix)) { final int position = prefixes.get(nearestPrefix); nearestPrefix--; if (commentedPrefix != null) { document.replaceString(offset + position, offset + position + prefix.length(), commentedPrefix); } else if (position != 0) { document.insertString(offset + position, suffix); } } else { final int position = suffixes.get(nearestSuffix); nearestSuffix--; if (commentedSuffix != null) { document.replaceString(offset + position, offset + position + suffix.length(), commentedSuffix); } else if (offset + position + suffix.length() < endOffset) { document.insertString(offset + position + suffix.length(), prefix); } } } if (!(commentedPrefix == null && !prefixes.isEmpty() && prefixes.get(0) == 0)) { document.insertString(offset, prefix); } } return false; } private static class Block { private Editor editor; private PsiFile psiFile; private List<Caret> carets = new ArrayList<>(); private int startLine; private int endLine; private int[] startOffsets; private int[] endOffsets; private Commenter blockSuitableCommenter; private Commenter[] commenters; private Map<SelfManagingCommenter, CommenterDataHolder> commenterStateMap; private boolean commentWithIndent; private CaretUpdate caretUpdate; private boolean skip; private boolean addSpace; } private enum CaretUpdate { PUT_AT_COMMENT_START, SHIFT_DOWN, RESTORE_SELECTION } }
apache-2.0
dkhwangbo/druid
extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchMergeAggregatorFactory.java
2524
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.aggregation.datasketches.quantiles; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.yahoo.sketches.quantiles.DoublesSketch; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorUtil; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnValueSelector; import org.apache.druid.segment.NilColumnValueSelector; public class DoublesSketchMergeAggregatorFactory extends DoublesSketchAggregatorFactory { @JsonCreator public DoublesSketchMergeAggregatorFactory( @JsonProperty("name") final String name, @JsonProperty("k") final Integer k) { super(name, name, k, AggregatorUtil.QUANTILES_DOUBLES_SKETCH_MERGE_CACHE_TYPE_ID); } @Override public Aggregator factorize(final ColumnSelectorFactory metricFactory) { final ColumnValueSelector<DoublesSketch> selector = metricFactory.makeColumnValueSelector(getFieldName()); if (selector instanceof NilColumnValueSelector) { return new DoublesSketchNoOpAggregator(); } return new DoublesSketchMergeAggregator(selector, getK()); } @Override public BufferAggregator factorizeBuffered(final ColumnSelectorFactory metricFactory) { final ColumnValueSelector<DoublesSketch> selector = metricFactory.makeColumnValueSelector(getFieldName()); if (selector instanceof NilColumnValueSelector) { return new DoublesSketchNoOpBufferAggregator(); } return new DoublesSketchMergeBufferAggregator(selector, getK(), getMaxIntermediateSizeWithNulls()); } }
apache-2.0
mr253727942/DSC
src/test/java/net/floodlightcontroller/core/test/MockFloodlightProvider.java
13271
/** * Copyright 2011, Big Switch Networks, Inc. * Originally created by David Erickson, Stanford University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.core.test; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import net.dsc.cluster.HAListenerTypeMarker; import net.dsc.cluster.HARole; import net.dsc.cluster.RoleInfo; import net.dsc.cluster.model.ControllerModel; import net.dsc.hazelcast.listener.IHAListener; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IInfoProvider; import net.floodlightcontroller.core.IListener.Command; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.internal.Controller.IUpdate; import net.floodlightcontroller.core.internal.Controller.ModuleLoaderState; import net.floodlightcontroller.core.internal.RoleManager; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.core.util.ListenerDispatcher; import net.floodlightcontroller.packet.Ethernet; import org.jboss.netty.util.Timer; import org.projectfloodlight.openflow.protocol.OFMessage; import org.projectfloodlight.openflow.protocol.OFPacketIn; import org.projectfloodlight.openflow.protocol.OFType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author David Erickson (daviderickson@cs.stanford.edu) */ public class MockFloodlightProvider implements IFloodlightModule, IFloodlightProviderService { private final static Logger log = LoggerFactory.getLogger(MockFloodlightProvider.class); protected ConcurrentMap<OFType, ListenerDispatcher<OFType,IOFMessageListener>> listeners; protected ListenerDispatcher<HAListenerTypeMarker, IHAListener> haListeners; private HARole role; private final String openFlowHostname = "127.0.0.1"; private final int openFlowPort = 6653; private final boolean useAsyncUpdates; private volatile ExecutorService executorService; private volatile Future<?> mostRecentUpdateFuture; /** * */ public MockFloodlightProvider(boolean useAsyncUpdates) { listeners = new ConcurrentHashMap<OFType, ListenerDispatcher<OFType, IOFMessageListener>>(); haListeners = new ListenerDispatcher<HAListenerTypeMarker, IHAListener>(); role = null; this.useAsyncUpdates = useAsyncUpdates; } public MockFloodlightProvider() { this(false); } @Override public synchronized void addOFMessageListener(OFType type, IOFMessageListener listener) { ListenerDispatcher<OFType, IOFMessageListener> ldd = listeners.get(type); if (ldd == null) { ldd = new ListenerDispatcher<OFType, IOFMessageListener>(); listeners.put(type, ldd); } ldd.addListener(type, listener); } @Override public synchronized void removeOFMessageListener(OFType type, IOFMessageListener listener) { ListenerDispatcher<OFType, IOFMessageListener> ldd = listeners.get(type); if (ldd != null) { ldd.removeListener(listener); } } /** * @return the listeners */ @Override public Map<OFType, List<IOFMessageListener>> getListeners() { Map<OFType, List<IOFMessageListener>> lers = new HashMap<OFType, List<IOFMessageListener>>(); for(Entry<OFType, ListenerDispatcher<OFType, IOFMessageListener>> e : listeners.entrySet()) { lers.put(e.getKey(), e.getValue().getOrderedListeners()); } return Collections.unmodifiableMap(lers); } public void clearListeners() { this.listeners.clear(); } public void dispatchMessage(IOFSwitch sw, OFMessage msg) { dispatchMessage(sw, msg, new FloodlightContext()); } public void dispatchMessage(IOFSwitch sw, OFMessage msg, FloodlightContext bc) { List<IOFMessageListener> theListeners = listeners.get(msg.getType()).getOrderedListeners(); if (theListeners != null) { Command result = Command.CONTINUE; Iterator<IOFMessageListener> it = theListeners.iterator(); if (OFType.PACKET_IN.equals(msg.getType())) { OFPacketIn pi = (OFPacketIn)msg; Ethernet eth = new Ethernet(); eth.deserialize(pi.getData(), 0, pi.getData().length); IFloodlightProviderService.bcStore.put(bc, IFloodlightProviderService.CONTEXT_PI_PAYLOAD, eth); } while (it.hasNext() && !Command.STOP.equals(result)) { result = it.next().receive(sw, msg, bc); } } } @Override public void handleOutgoingMessage(IOFSwitch sw, OFMessage m) { FloodlightContext bc = new FloodlightContext(); List<IOFMessageListener> msgListeners = null; if (listeners.containsKey(m.getType())) { msgListeners = listeners.get(m.getType()).getOrderedListeners(); } if (msgListeners != null) { for (IOFMessageListener listener : msgListeners) { if (Command.STOP.equals(listener.receive(sw, m, bc))) { break; } } } } public void handleOutgoingMessages(IOFSwitch sw, List<OFMessage> msglist, FloodlightContext bc) { for (OFMessage m:msglist) { handleOutgoingMessage(sw, m); } } @Override public void run() { logListeners(); if (useAsyncUpdates) executorService = Executors.newSingleThreadExecutor(); } public void shutdown() { if (executorService != null) { executorService.shutdownNow(); executorService = null; mostRecentUpdateFuture = null; } } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> services = new ArrayList<Class<? extends IFloodlightService>>(1); services.add(IFloodlightProviderService.class); return services; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(IFloodlightProviderService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { return null; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { // do nothing. } @Override public void startUp(FloodlightModuleContext context) { // do nothing. } @Override public void addInfoProvider(String type, IInfoProvider provider) { // do nothing. } @Override public void removeInfoProvider(String type, IInfoProvider provider) { // do nothing. } @Override public Map<String, Object> getControllerInfo(String type) { // mock up something Map<String, Object> summary = new HashMap<String, Object>(); summary.put("test-summary-1", 2); summary.put("test-summary-2", 5); return summary; } @Override public void addUpdateToQueue(final IUpdate update) { if (useAsyncUpdates) { mostRecentUpdateFuture = executorService.submit(new Runnable() { @Override public void run() { update.dispatch(); } }); } else { update.dispatch(); } } public void waitForUpdates(long timeout, TimeUnit unit) throws InterruptedException { long timeoutNanos = unit.toNanos(timeout); long start = System.nanoTime(); for (;;) { Future<?> future = mostRecentUpdateFuture; if ((future == null) || future.isDone()) break; Thread.sleep(100); long now = System.nanoTime(); if (now > start + timeoutNanos) { fail("Timeout waiting for update tasks to complete"); } } } @Override public void addHAListener(IHAListener listener) { haListeners.addListener(null,listener); } @Override public void removeHAListener(IHAListener listener) { haListeners.removeListener(listener); } @Override public HARole getRole() { /* DISABLE THIS CHECK FOR NOW. OTHER UNIT TESTS NEED TO BE UPDATED * FIRST if (this.role == null) throw new IllegalStateException("You need to call setRole on " + "MockFloodlightProvider before calling startUp on " + "other modules"); */ return this.role; } @Override public void setRole(HARole role, String roleChangeDescription) { this.role = role; } /** * Dispatches a new role change notification * @param oldRole * @param newRole */ public void transitionToActive() { IUpdate update = new IUpdate() { @Override public void dispatch() { for (IHAListener rl : haListeners.getOrderedListeners()) { rl.transitionToActive(); } } }; addUpdateToQueue(update); } @Override public Map<String, String> getControllerNodeIPs() { return null; } @Override public long getSystemStartTime() { return 0; } private void logListeners() { for (Map.Entry<OFType, ListenerDispatcher<OFType, IOFMessageListener>> entry : listeners.entrySet()) { OFType type = entry.getKey(); ListenerDispatcher<OFType, IOFMessageListener> ldd = entry.getValue(); StringBuffer sb = new StringBuffer(); sb.append("OFListeners for "); sb.append(type); sb.append(": "); for (IOFMessageListener l : ldd.getOrderedListeners()) { sb.append(l.getName()); sb.append(","); } log.debug(sb.toString()); } } @Override public RoleInfo getRoleInfo() { // TODO Auto-generated method stub return null; } @Override public Map<String, Long> getMemory() { Map<String, Long> m = new HashMap<String, Long>(); m.put("total", 1000000000L); m.put("free", 20000000L); return m; } @Override public Long getUptime() { return 1000000L; } @Override public String getOFHostname() { return openFlowHostname; } @Override public int getOFPort() { return openFlowPort; } @Override public void handleMessage(IOFSwitch sw, OFMessage m, FloodlightContext bContext) { // do nothing } @Override public Timer getTimer() { return null; } @Override public RoleManager getRoleManager() { return null; } @Override public ModuleLoaderState getModuleLoaderState() { return null; } @Override public ControllerModel getControllerModel() { return null; } @Override public Set<String> getUplinkPortPrefixSet() { return null; } @Override public int getWorkerThreads() { return 0; } }
apache-2.0
apache/empire-db
empire-db/src/main/java/org/apache/empire/dbms/sqlserver/MSSqlDBModelParser.java
2057
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.empire.dbms.sqlserver; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import org.apache.empire.commons.StringUtils; import org.apache.empire.db.validation.DBModelParser; /** * MSSqlDBModelChecker * DataModel checker implementation for Microsoft SQLServer * @author doebele */ public class MSSqlDBModelParser extends DBModelParser { /** * create a MSSqlDBModelChecker * @param db the database * @param catalog the catalog */ public MSSqlDBModelParser(String catalog, String schema) { super(catalog, StringUtils.coalesce(schema, "DBO")); } /** * collects all column information at once */ @Override protected int collectColumns(DatabaseMetaData dbMeta) throws SQLException { return super.collectColumns(dbMeta, null); } @Override protected boolean isIdentityColumn(ResultSet rs) { try { int i = rs.findColumn("TYPE_NAME"); return rs.getString(i).matches(".*(?i:identity).*"); } catch(SQLException e) { log.warn("Missing column TYPE_NAME. Unable to detect Identity column"); return false; } } }
apache-2.0
apache/sis
core/sis-utility/src/main/java/org/apache/sis/math/MathFunctions.java
54703
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.math; import java.util.Arrays; import org.apache.sis.util.Static; import org.apache.sis.util.ArraysExt; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.resources.Errors; import org.apache.sis.internal.util.DoubleDouble; import static java.lang.Math.abs; import static java.lang.Math.sqrt; import static java.lang.Float.intBitsToFloat; import static java.lang.Float.floatToIntBits; import static java.lang.Float.floatToRawIntBits; import static java.lang.Double.longBitsToDouble; import static java.lang.Double.doubleToLongBits; import static java.lang.Double.doubleToRawLongBits; import static org.apache.sis.internal.jdk9.JDK9.multiplyFull; import static org.apache.sis.internal.util.Numerics.SIGN_BIT_MASK; import static org.apache.sis.internal.util.Numerics.SIGNIFICAND_SIZE; /** * Simple mathematical functions in addition to the ones provided in {@link Math}. * Some methods in this class are very similar to the standard {@link Math} methods * or could be implemented with straightforward formulas. * However the methods in this class put an emphasis on: * * <ul> * <li>Rounding errors: * {@link #magnitude(double[]) magnitude}, * {@link #pow10(double) pow10}.</li> * <li>Distinguishing positive zero from negative zero: * {@link #isPositive(double) isPositive}, * {@link #isNegative(double) isNegative}, * {@link #isSameSign(double, double) isSameSign}, * {@link #xorSign(double, double) xorSign}.</li> * <li>Distinguishing the different kinds of NaN numbers: * {@link #toNanFloat(int) toNanFloat}, * {@link #toNanOrdinal(float) toNanOrdinal}.</li> * </ul> * * Some additional functions not found in {@code Math} are: * {@link #atanh(double) atanh}, * {@link #nextPrimeNumber(int) nextPrimeNumber}. * * @author Martin Desruisseaux (MPO, IRD, Geomatys) * @author Johann Sorel (Geomatys) * @version 1.2 * * @see DecimalFunctions * @see org.apache.sis.util.Numbers * * @since 0.3 * @module */ public final class MathFunctions extends Static { /** * The square root of 2, which is approximated by {@value}. * * @see Math#sqrt(double) */ public static final double SQRT_2 = 1.4142135623730951; /** * The logarithm of 2 in base 10, which is approximated by {@value}. * This constant is useful for converting a power of 2 to a power of 10 as below: * * {@preformat java * double exp10 = exp2 * LOG10_2; * } * * @see Math#log10(double) * @see #getExponent(double) * * @since 0.4 */ public static final double LOG10_2 = 0.3010299956639812; /** * The minimal ordinal value for {@code NaN} numbers created by {@link #toNanFloat(int)}. * The current value is {@value}. * * @since 1.0 */ public static final int MIN_NAN_ORDINAL = -0x200000; /** * The maximal ordinal value for {@code NaN} numbers created by {@link #toNanFloat(int)}. * The current value is {@value}. * * @since 1.0 */ public static final int MAX_NAN_ORDINAL = 0x1FFFFF; /** * The beginning of ranges of quiet NaN values. * The range is selected in way to restrict ourselves to <cite>quiet</cite> NaN values. * The following is an adaptation of evaluator's comments for bug #4471414 * (http://developer.java.sun.com/developer/bugParade/bugs/4471414.html): * * <blockquote> * There are actually two types of NaNs, signaling NaNs and quiet NaNs. Java doesn't support the features necessary * to reliably distinguish the two. However, the relevant point is that copying a signaling NaN may (or may not, at * the implementers discretion) yield a quiet NaN — a NaN with a different bit pattern (IEEE 754 6.2). Therefore, on * IEEE 754 compliant platforms it may be impossible to find a signaling NaN stored in an array since a signaling NaN * passed as an argument to binarySearch may get replaced by a quiet NaN. * </blockquote> * * The relevant thresholds are: * <ul> * <li>{@code 7F800000}: positive infinity.</li> * <li>{@code 7F800001}: first signaling NaN in the range of positive values.</li> * <li>{@code 7FBFFFFF}: last signaling NaN.</li> * <li>{@code 7FC00000}: first quiet NaN. Also the standard {@link Double#NaN} value.</li> * <li>{@code 7FFFFFFF}: last quiet NaN.</li> * <li>{@code FF800000}: negative infinity.</li> * <li>{@code FF800001}: first signaling NaN in the range of negative values.</li> * <li>{@code FFBFFFFF}: last signaling NaN.</li> * <li>{@code FFC00000}: first quiet NaN in the range of negative values.</li> * <li>{@code FFFFFFFF}: last quiet NaN.</li> * </ul> * * @see #toNanFloat(int) * @see #toNanOrdinal(float) */ static final int POSITIVE_NAN = 0x7FC00000, NEGATIVE_NAN = 0xFFC00000; /** * The highest prime number supported by the {@link #nextPrimeNumber(int)} method. * In the current implementation, this value is {@value}. However this limit may * change in any future Apache SIS version. * * <div class="note"><b>Note:</b> * The current value is the highest prime number representable as an unsigned 16 bits integer. * This is enough for current needs because 16 bits prime numbers are sufficient for finding * the divisors of any 32 bits integers.</div> * * @see #nextPrimeNumber(int) */ public static final int HIGHEST_SUPPORTED_PRIME_NUMBER = 65521; /** * Maximal length needed for the {@link #primes} array in order to store prime numbers * from 2 to 32749 (15 bits) or {@value #HIGHEST_SUPPORTED_PRIME_NUMBER} (16 bits). * * @see #primeNumberAt(int) */ static final int PRIMES_LENGTH_15_BITS = 3512, PRIMES_LENGTH_16_BITS = 6542; /** * The sequence of prime numbers computed so far. Will be expanded as needed. * We limit ourself to 16 bits numbers because they are sufficient for computing * divisors of any 32 bits number. * * @see #primeNumberAt(int) */ @SuppressWarnings("VolatileArrayField") // Because we will not modify array content. private static volatile short[] primes = new short[] {2, 3}; /** * Do not allow instantiation of this class. */ private MathFunctions() { } /** * Computes the averages of two signed integers without overflow. The calculation is performed with * {@code long} arithmetic before to convert the result to the {@code double} floating point number. * This function may be more accurate than the classical (x+y)/2 formula when <var>x</var> and/or * <var>y</var> are very large, because it will avoid the lost of last digits before averaging. * If exactly one of <var>x</var> and <var>y</var> is odd, the result will contain the 0.5 fraction digit. * * <div class="note"><b>Source:</b> this function is adapted from * <a href="http://aggregate.org/MAGIC/#Average%20of%20Integers">The Aggregate Magic Algorithms</a> * from University of Kentucky.</div> * * @param x the first value to average. * @param y the second value to average. * @return average of given values without integer overflow. * * @since 1.1 */ public static double average(final long x, final long y) { final long xor = (x ^ y); double c = (x & y) + (xor >> 1); // Really need >> 1, not /2 (they differ with negative numbers). if ((xor & 1) != 0) c += 0.5; return c; } /** * Truncates the given value toward zero. Invoking this method is equivalent to invoking * {@link Math#floor(double)} if the value is positive, or {@link Math#ceil(double)} if * the value is negative. * * @param value the value to truncate. * @return the largest in magnitude (further from zero) integer value which is equals * or less in magnitude than the given value. */ public static double truncate(final double value) { return (doubleToRawLongBits(value) & SIGN_BIT_MASK) == 0 ? Math.floor(value) : Math.ceil(value); } /** * Returns the magnitude of the given vector. This is defined by: * * {@preformat math * sqrt(vector[0]² + vector[1]² + … + vector[length-1]²) * } * * If the given vector contains a NaN value, then the result is NaN. * * <h4>Implementation note</h4> * In the special case where only one element is different than zero, this method * returns directly the {@linkplain Math#abs(double) absolute value} of that element * without computing {@code sqrt(v²)}, in order to avoid rounding error. This special case * has been implemented because this method is often invoked for computing the length of * offset vectors, * typically aligned with the axes of a {@linkplain org.opengis.referencing.cs.CartesianCS * Cartesian coordinate system}. * * @param vector the vector for which to compute the magnitude. * @return the magnitude of the given vector as a positive number, of NaN. * * @see Math#hypot(double, double) */ public static double magnitude(final double... vector) { int i = vector.length; // If every elements in the array are zero, returns zero. double v1; do if (i == 0) return 0; while ((v1 = vector[--i]) == 0); // We have found a non-zero element. If it is the only one, returns its absolute value. double v2; do if (i == 0) return abs(v1); while ((v2 = vector[--i]) == 0); // If there is exactly 2 elements, use Math.hypot which is more robust than our algorithm. double v3; do if (i == 0) return Math.hypot(v1, v2); while ((v3 = vector[--i]) == 0); // Usual magnitude computation, but using double-double arithmetic. final DoubleDouble sum = new DoubleDouble(); final DoubleDouble dot = new DoubleDouble(); sum.setToProduct(v1, v1); dot.setToProduct(v2, v2); sum.add(dot); dot.setToProduct(v3, v3); sum.add(dot); while (i != 0) { v1 = vector[--i]; dot.setToProduct(v1, v1); sum.add(dot); } sum.sqrt(); return sum.doubleValue(); } /** * Returns the unbiased exponent used in the representation of a {@code double}, with correction for * sub-normal numbers. This method is related to {@link Math#getExponent(double)} in the following ways: * * <ul> * <li>For NaN and all values equal or greater than {@link Double#MIN_NORMAL} in magnitude (including * infinities), this method returns results that are identical to {@code Math.getExponent(double)}.</li> * <li>For values smaller than {@link Double#MIN_NORMAL} in magnitude (including zero), the correction * for sub-normal numbers results in return values smaller than what {@code Math.getExponent(double)} * would return.</li> * </ul> * * Special cases: * <ul> * <li>If the argument is NaN or infinite, then the result is {@link Double#MAX_EXPONENT} + 1.</li> * <li>If the argument is {@link Double#MAX_VALUE}, then the result is {@value java.lang.Double#MAX_EXPONENT}.</li> * <li>If the argument is {@link Double#MIN_NORMAL}, then the result is {@value java.lang.Double#MIN_EXPONENT}.</li> * <li>If the argument is {@link Double#MIN_VALUE}, then the result is -1074.</li> * <li>If the argument is zero, then the result is -1075.</li> * </ul> * * <h4>Identities</h4> * For any <var>p</var> values in the [-1075 … 1024] range and <var>value</var> = 2<sup>p</sup>: * <ul> * <li><code>getExponent(Math.scalb(1.0, p)) == p</code></li> * <li><code>Math.scalb(1.0, getExponent(value)) == value</code></li> * <li><code>Math.floor({@linkplain #LOG10_2} * getExponent(value)) == Math.floor(Math.log10(value))</code></li> * </ul> * * @param value the value for which to get the exponent. * @return the unbiased exponent, corrected for sub-normal numbers if needed. * Values will be in the [-1075 … 1024] range, inclusive. * * @see Math#getExponent(double) * @see Math#scalb(double, int) * * @since 0.4 */ public static int getExponent(final double value) { final long bits = doubleToRawLongBits(value); int exponent = (int) ((bits >>> SIGNIFICAND_SIZE) & 0x7FFL); if (exponent == 0) { /* * Number is sub-normal: there is no implicit 1 bit before the significand. * We need to search for the position of the first real 1 bit, and fix the * exponent accordingly. Note that numberOfLeadingZeros(…) is relative to * 64 bits while the significand size is only 52 bits. The last term below * is for fixing this difference. */ exponent -= Long.numberOfLeadingZeros(bits & ((1L << SIGNIFICAND_SIZE) - 1)) - (Long.SIZE - SIGNIFICAND_SIZE); } return exponent - Double.MAX_EXPONENT; } /** * Computes the result of {@code base} argument raised to the power given by {@code exponent} argument. * This method computes the same value than {@link Math#pow(double, double)} but using only integer arithmetic. * The result must be representable as a 64 bits integers ({@code long} primitive type), * otherwise an {@link ArithmeticException} is thrown. The result is guaranteed exact, * in contrast to results represented as {@code double} floating point values * which may be approximate for magnitudes greater than 2<sup>52</sup>. * This method may also be faster. * * <div class="note"><b>Implementation note:</b> this method uses * <a href="https://en.wikipedia.org/wiki/Exponentiation_by_squaring">exponentiation by squaring</a> technic.</div> * * The type of the {@code base} argument is {@code long} for convenience, since this method is used in contexts * where relatively large integers are handled. However any value greater than the capacity of {@code int} type * is guaranteed to fail with {@link ArithmeticException} unless {@code exponent} is 0 or 1. * Likewise any {@code exponent} value greater than 62 is guaranteed to fail unless {@code base} is 0 or 1. * * @param base the value to raise to an exponent. * @param exponent the exponent, as zero or positive number. * @return the value <var>base</var><sup><var>exponent</var></sup> as a 64 bits integer. * @throws ArithmeticException if the given exponent is negative, or if the result overflow integer arithmetic. * * @see Math#pow(double, double) * * @since 1.0 */ public static long pow(long base, int exponent) { long result = 1; if (exponent >= 1) { if ((exponent & 1) != 0) { result = base; } while ((exponent >>>= 1) != 0) { base = Math.multiplyExact(base, base); if ((exponent & 1) != 0) { result = Math.multiplyExact(result, base); } } } else if (exponent < 0) { throw new ArithmeticException(Errors.format(Errors.Keys.NegativeArgument_2, "exponent", exponent)); } return result; } /** * Computes 10 raised to the power of <var>x</var>. This method is faster and slightly more accurate * than invoking <code>{@linkplain Math#pow(double, double) Math.pow}(10, x)</code>. * * <div class="note"><b>Note:</b> * This method has been defined because the standard {@code Math.pow(10, x)} method does not always return * the closest IEEE floating point representation. Slight departures (1 or 2 ULP) are often allowed in math * functions for performance reasons. The most accurate calculations are usually not necessary, but the base * 10 is a special case since it is used for scaling axes or formatting human-readable output.</div> * * Special cases: * <ul> * <li>If <var>x</var> is equals or lower than -324, then the result is 0.</li> * <li>If <var>x</var> is equals or greater than 309, then the result is {@linkplain Double#POSITIVE_INFINITY positive infinity}.</li> * <li>If <var>x</var> is in the [0 … 18] range inclusive, then the result is exact.</li> * <li>For all other <var>x</var> values, the result is the closest IEEE 754 approximation.</li> * </ul> * * @param x the exponent. * @return 10 raised to the given exponent. * * @see #pow10(double) * @see #LOG10_2 * @see DecimalFunctions */ public static double pow10(final int x) { return DecimalFunctions.pow10(x); } /** * Computes 10 raised to the power of <var>x</var>. Invoking this method is equivalent to invoking * <code>{@linkplain Math#pow(double, double) Math.pow}(10, x)</code>, but is slightly more accurate * in the special case where the given argument is an integer. * * @param x the exponent. * @return 10 raised to the given exponent. * * @see #pow10(int) * @see Math#pow(double, double) * @see Math#log10(double) */ public static double pow10(final double x) { final int ix = (int) x; if (ix == x) { return DecimalFunctions.pow10(ix); } else { return Math.pow(10, x); } } /** * Returns the inverse hyperbolic sine of the given value. * This is the inverse of the {@link Math#sinh(double)} method. * * @param x the value for which to compute the inverse hyperbolic sine. * @return the inverse hyperbolic sine of the given value. * * @see Math#sinh(double) * * @since 0.6 */ public static double asinh(final double x) { return Math.log(x + sqrt(x*x + 1)); } /** * Returns the inverse hyperbolic cosine of the given value. * This is the inverse of the {@link Math#cosh(double)} method. * * @param x the value for which to compute the inverse hyperbolic cosine. * @return the inverse hyperbolic cosine of the given value. * * @see Math#cosh(double) * * @since 0.6 */ public static double acosh(final double x) { return Math.log(x + sqrt(x*x - 1)); } /** * Returns the inverse hyperbolic tangent of the given value. * This is the inverse of the {@link Math#tanh(double)} method. * The range of input values shall be in the [-1 … 1]. * Special cases: * * <ul> * <li>For <var>x</var> = NaN, this method returns a {@linkplain Double#isNaN(double) NaN} value.</li> * <li>For <var>x</var> = -1, this method returns {@linkplain Double#NEGATIVE_INFINITY negative infinity}.</li> * <li>For <var>x</var> = +1, this method returns {@linkplain Double#POSITIVE_INFINITY positive infinity}.</li> * </ul> * * @param x the value for which to compute the inverse hyperbolic tangent. * @return the inverse hyperbolic tangent of the given value. * * @see Math#tanh(double) */ public static double atanh(final double x) { /* * The classical formulas is log((1+x)/(1-x))/2, but the following is more * accurate if the (1+x)/(1-x) ratio is close to 1, i.e. if x is close to 0. * This is often the case in Apache SIS since x is often a value close to the * Earth excentricity, which is a small value (0 would be a perfect sphere). */ return 0.5 * Math.log1p(2*x / (1-x)); } /** * Returns {@code true} if the given value is positive, <em>excluding</em> negative zero. * Special cases: * * <ul> * <li>If the value is {@code +0.0}, returns {@code true}</li> * <li>If the value is {@code -0.0}, returns <b>{@code false}</b></li> * <li>If the value is {@link Double#isNaN(double) NaN}, returns {@code false}</li> * </ul> * * As seen from the above cases, this method distinguishes positive zero from negative zero. * The handling of zero values is the difference between invoking {@code isPositive(double)} * and testing if (<var>value</var> {@literal >= 0}). * * @param value the value to test. * @return {@code true} if the given value is positive, excluding negative zero. * * @see #isPositiveZero(double) * @see #isNegative(double) */ public static boolean isPositive(final double value) { return (doubleToRawLongBits(value) & SIGN_BIT_MASK) == 0 && !Double.isNaN(value); } /** * Returns {@code true} if the given value is the positive zero ({@code +0.0}). * This method returns {@code false} for the negative zero ({@code -0.0}). * This method is equivalent to the following code, but potentially faster: * * {@preformat java * return (value == 0) && isPositive(value); * } * * @param value the value to test. * @return {@code true} if the given value is +0.0 (not -0.0). * * @see #isPositive(double) * @see #isNegativeZero(double) * * @since 0.4 */ public static boolean isPositiveZero(final double value) { return doubleToRawLongBits(value) == 0L; } /** * Returns {@code true} if the given value is negative, <em>including</em> negative zero. * Special cases: * * <ul> * <li>If the value is {@code +0.0}, returns {@code false}</li> * <li>If the value is {@code -0.0}, returns <b>{@code true}</b></li> * <li>If the value is {@link Double#isNaN(double) NaN}, returns {@code false}</li> * </ul> * * As seen from the above cases, this method distinguishes positive zero from negative zero. * The handling of zero values is the difference between invoking {@code isNegative(double)} * and testing if (<var>value</var> {@literal < 0}). * * @param value the value to test. * @return {@code true} if the given value is negative, including negative zero. * * @see #isNegativeZero(double) * @see #isPositive(double) */ public static boolean isNegative(final double value) { return (doubleToRawLongBits(value) & SIGN_BIT_MASK) != 0 && !Double.isNaN(value); } /** * Returns {@code true} if the given value is the negative zero ({@code -0.0}). * This method returns {@code false} for the positive zero ({@code +0.0}). * This method is equivalent to the following code, but potentially faster: * * {@preformat java * return (value == 0) && isNegative(value); * } * * @param value the value to test. * @return {@code true} if the given value is -0.0 (not +0.0). * * @see #isNegative(double) * @see #isPositiveZero(double) * * @since 0.4 */ public static boolean isNegativeZero(final double value) { return doubleToRawLongBits(value) == SIGN_BIT_MASK; } /** * Returns {@code true} if the given values have the same sign, differentiating positive * and negative zeros. * Special cases: * * <ul> * <li>{@code +0.0} and {@code -0.0} are considered to have opposite sign</li> * <li>If any value is {@link Double#isNaN(double) NaN}, returns {@code false}</li> * </ul> * * @param v1 the first value. * @param v2 the second value, to compare the sign with the first value. * @return {@code true} if the given values are not NaN and have the same sign. * * @see Math#signum(double) */ public static boolean isSameSign(final double v1, final double v2) { return !Double.isNaN(v1) && !Double.isNaN(v2) && ((doubleToRawLongBits(v1) ^ doubleToRawLongBits(v2)) & SIGN_BIT_MASK) == 0; } /** * Returns the first floating-point argument with the sign reversed if the second floating-point * argument is negative. This method is similar to <code>{@linkplain Math#copySign(double,double) * Math.copySign}(value, sign)</code> except that the sign is combined with an <cite>exclusive * or</cite> operation instead of being copied. * * <p>This method makes no guarantee about whether {@code NaN} values are handled as positive * or negative numbers. This is the same policy than {@link Math#copySign(double, double)}.</p> * * @param value the parameter providing the value that may need a sign change. * @param sign the parameter providing the sign to <cite>xor</cite> with the value. * @return the provided value with its sign reversed if the {@code sign} parameter is negative. * * @see Math#copySign(double, double) */ public static double xorSign(final double value, final double sign) { return longBitsToDouble(doubleToRawLongBits(value) ^ (doubleToRawLongBits(sign) & SIGN_BIT_MASK)); } /** * Returns {@code true} if the given values are {@linkplain Float#equals(Object) equal} * or if their difference is not greater than the given threshold. More specifically: * * <ul> * <li>If both values are {@linkplain Float#POSITIVE_INFINITY positive infinity}, or * if both values are {@linkplain Float#NEGATIVE_INFINITY negative infinity}, * then this method returns {@code true}.</li> * <li>If both values {@linkplain Float#isNaN(float) are NaN}, then this method returns {@code true}. * Note that this method does not differentiate the various NaN values.</li> * <li>Otherwise, this method returns the result of the {@code abs(v1 - v2) <= ε} comparison.</li> * </ul> * * @param v1 the first value to compare. * @param v2 the second value to compare. * @param ε the tolerance threshold, which must be positive. * @return {@code true} if both values are equal given the tolerance threshold. */ public static boolean epsilonEqual(final float v1, final float v2, final float ε) { return (abs(v1 - v2) <= ε) || floatToIntBits(v1) == floatToIntBits(v2); } /** * Returns {@code true} if the given values are {@linkplain Double#equals(Object) equal} * or if their difference is not greater than the given threshold. More specifically: * * <ul> * <li>If both values are {@linkplain Double#POSITIVE_INFINITY positive infinity}, or * if both values are {@linkplain Double#NEGATIVE_INFINITY negative infinity}, * then this method returns {@code true}.</li> * <li>If both values {@linkplain Double#isNaN(double) are NaN}, then this method returns {@code true}. * Note that this method does not differentiate the various NaN values.</li> * <li>Otherwise, this method returns the result of the {@code abs(v1 - v2) <= ε} comparison.</li> * </ul> * * @param v1 the first value to compare. * @param v2 the second value to compare. * @param ε the tolerance threshold, which must be positive. * @return {@code true} if both values are equal given the tolerance threshold. */ public static boolean epsilonEqual(final double v1, final double v2, final double ε) { return (abs(v1 - v2) <= ε) || doubleToLongBits(v1) == doubleToLongBits(v2); } /** * Returns a {@linkplain Float#isNaN(float) NaN} number for the specified ordinal value. * Valid NaN numbers in Java can have bit fields in the ranges listed below: * * <ul> * <li>[{@code 0x7F800001} … {@code 0x7FFFFFFF}], with * {@code 0x7FC00000} as the bit fields of the standard {@link Float#NaN} value</li> * <li>[{@code 0xFF800001} … {@code 0xFFFFFFFF}]</li> * </ul> * * Some of those bits, named the <cite>payload</cite>, can be used for storing custom information. * This method maps some of the payload values to each ordinal value. * * <p>This method guarantees that {@code toNanFloat(0)} returns the standard {@link Float#NaN} value. * For all other {@code ordinal} values, the relationship to the payload values is implementation dependent * and may change in any future version of the SIS library. The current implementation restricts the * range of allowed ordinal values to a smaller one than the range of all possible values.</p> * * @param ordinal the NaN ordinal value, from {@value #MIN_NAN_ORDINAL} to {@value #MAX_NAN_ORDINAL} inclusive. * @return one of the legal {@linkplain Float#isNaN(float) NaN} values as a float. * @throws IllegalArgumentException if the specified ordinal is out of range. * * @see Float#intBitsToFloat(int) */ public static float toNanFloat(final int ordinal) throws IllegalArgumentException { ArgumentChecks.ensureBetween("ordinal", MIN_NAN_ORDINAL, MAX_NAN_ORDINAL, ordinal); int bits = (ordinal >= 0) ? ordinal : ~ordinal; bits = (bits + POSITIVE_NAN) | (ordinal & Integer.MIN_VALUE); assert Integer.compareUnsigned(bits, ordinal >= 0 ? POSITIVE_NAN : NEGATIVE_NAN) >= 0 : ordinal; final float value = intBitsToFloat(bits); assert Float.isNaN(value) && toNanOrdinal(value) == ordinal : ordinal; return value; } /** * Returns the ordinal value of the given NaN number. * This method is the converse of {@link #toNanFloat(int)}. * * <p>If the given float is the standard {@link Float#NaN} value, then this method returns 0. * For all other values, the relationship between the float payload and the returned ordinal * is implementation dependent and may change in any future Apache SIS version.</p> * * @param value the value from which to get the NaN ordinal value. * @return the NaN ordinal value of the given floating point value. * @throws IllegalArgumentException if the given value is not a NaN value, * or does not use a supported bits pattern. */ public static int toNanOrdinal(final float value) throws IllegalArgumentException { final int bits = floatToRawIntBits(value); int ordinal = (bits & Integer.MAX_VALUE) - POSITIVE_NAN; if (bits < 0) ordinal = ~ordinal; if (ordinal >= MIN_NAN_ORDINAL && ordinal <= MAX_NAN_ORDINAL) { return ordinal; } final short resourceKey; final Object obj; if (Float.isNaN(value)) { resourceKey = Errors.Keys.IllegalBitsPattern_1; obj = Integer.toHexString(bits); } else { resourceKey = Errors.Keys.IllegalArgumentValue_2; obj = new Object[] {"value", value}; } throw new IllegalArgumentException(Errors.format(resourceKey, obj)); } /** * Converts two long bits values containing a IEEE 754 quadruple precision floating point number * to a double precision floating point number. About 17 decimal digits of precision may be lost * due to the {@code double} type having only half the capacity of quadruple precision type. * * <p>Some quadruple precision values can not be represented in double precision and are mapped * to {@code double} values as below:</p> * <ul> * <li>Values having a magnitude less than {@link Double#MIN_VALUE} are mapped to * positive or negative zero.</li> * <li>Values having a magnitude greater than {@link Double#MAX_VALUE} are mapped to * {@link Double#POSITIVE_INFINITY} or {@link Double#NEGATIVE_INFINITY}.</li> * <li>All NaN values are currently collapsed to the single "canonical" {@link Double#NaN} value * (this policy may be revisited in future SIS version).</li> * </ul> * * @param l0 upper part of the quadruple precision floating point number. * @param l1 lower part of the quadruple precision floating point number. * @return double precision approximation. * * @see <a href="https://en.wikipedia.org/wiki/Quadruple-precision_floating-point_format">Quadruple-precision floating-point format on Wikipedia</a> * * @since 0.7 */ public static double quadrupleToDouble(long l0, long l1) { // Build double long sig = (l0 & 0x8000000000000000L); long exp = (l0 & 0x7FFF000000000000L) >> 48; l0 = (l0 & 0x0000FFFFFFFFFFFFL); if (exp == 0) { /* * Subnormal number. * Since we convert them to double precision, subnormal numbers can not be represented * as they are smaller than Double.MIN_VALUE. We map them to zero preserving the sign. */ return Double.longBitsToDouble(sig); } if (exp == 0x7FFF) { /* * NaN of infinite number. * Mantissa with all bits at 0 is used for infinite. * This is the only special number that we can preserve. */ if (l0 == 0 && l1 == 0) { return Double.longBitsToDouble(sig | 0x7FF0000000000000L); } /* * Other NaN values might have a meaning (e.g. NaN(1) = forest, NaN(2) = lake, etc.) * See above toNanFloat(int) and toNaNOrdinal(float) methods. When truncating the value we * might change the meaning, which could cause several issues later. Therefor we conservatively * collapse all NaNs to the default NaN for now (this may be revisited in a future SIS version). */ return Double.NaN; } exp -= (16383 - 1023); // Change from 15 bias to 11 bias. // Check cases where mantissa excess what double can support. if (exp < 0) return Double.NEGATIVE_INFINITY; if (exp > 2046) return Double.POSITIVE_INFINITY; return Double.longBitsToDouble(sig | (exp << 52) | (l0 << 4) | (l1 >>> 60)); } /** * Returns the <var>i</var><sup>th</sup> prime number. * This method returns (2, 3, 5, 7, 11, …) for index (0, 1, 2, 3, 4, …). * * @param index the prime number index, starting at index 0 for prime number 2. * @return the prime number at the specified index. * @throws IndexOutOfBoundsException if the specified index is too large. * * @see java.math.BigInteger#isProbablePrime(int) */ static int primeNumberAt(final int index) throws IndexOutOfBoundsException { ArgumentChecks.ensureValidIndex(PRIMES_LENGTH_16_BITS, index); short[] primes = MathFunctions.primes; if (index >= primes.length) { synchronized (MathFunctions.class) { primes = MathFunctions.primes; if (index >= primes.length) { int i = primes.length; int n = Short.toUnsignedInt(primes[i - 1]); // Compute by block of 16 values, for reducing the amount of array resize. primes = Arrays.copyOf(primes, Math.min((index | 0xF) + 1, PRIMES_LENGTH_16_BITS)); do { testNextNumber: while (true) { // Simulate a "goto" statement (usually not recommanded...) final int stopAt = (int) sqrt(n += 2); int prime; int j = 0; do { prime = Short.toUnsignedInt(primes[++j]); if (n % prime == 0) { continue testNextNumber; } } while (prime <= stopAt); primes[i] = (short) n; break; } } while (++i < primes.length); MathFunctions.primes = primes; } } } return Short.toUnsignedInt(primes[index]); } /** * Returns the first prime number equals or greater than the given value. * Current implementation accepts only values in the * [2 … {@value #HIGHEST_SUPPORTED_PRIME_NUMBER}] range. * * @param number the number for which to find the next prime. * @return the given number if it is a prime number, or the next prime number otherwise. * @throws IllegalArgumentException if the given value is outside the supported range. * * @see java.math.BigInteger#isProbablePrime(int) */ public static int nextPrimeNumber(final int number) throws IllegalArgumentException { ArgumentChecks.ensureBetween("number", 2, HIGHEST_SUPPORTED_PRIME_NUMBER, number); final short[] primes = MathFunctions.primes; int lower = 0; int upper = Math.min(PRIMES_LENGTH_15_BITS, primes.length); if (number > Short.MAX_VALUE) { lower = upper; upper = primes.length; } int i = Arrays.binarySearch(primes, lower, upper, (short) number); if (i < 0) { i = ~i; if (i >= primes.length) { int p; do p = primeNumberAt(i++); while (p < number); return p; } } return Short.toUnsignedInt(primes[i]); } /** * Returns the divisors of the specified number as positive integers. For any value other * than {@code O} (which returns an empty array), the first element in the returned array * is always {@code 1} and the last element is always the absolute value of {@code number}. * * @param number the number for which to compute the divisors. * @return the divisors in strictly increasing order. */ public static int[] divisors(int number) { if (number == 0) { return ArraysExt.EMPTY_INT; } number = abs(number); int[] divisors = new int[16]; divisors[0] = 1; int count = 1; /* * Searches for the first divisors among the prime numbers. We stop the search at the * square root of `n` because every values above that point can be inferred from the * values before that point, i.e. if n=p₁⋅p₂ and p₂ is greater than `sqrt`, than p₁ * must be lower than `sqrt`. */ for (int p,i=0; multiplyFull(p=primeNumberAt(i), p) <= number; i++) { if (number % p == 0) { if (count == divisors.length) { divisors = Arrays.copyOf(divisors, count*2); } divisors[count++] = p; } } /* * Completes the divisors past `sqrt`. The numbers added here may or may not be prime * numbers. Side note: checking that they are prime numbers would be costly, but this * algorithm doesn't need that. */ int source = count; if (count*2 > divisors.length) { divisors = Arrays.copyOf(divisors, count*2); } int d1 = divisors[--source]; int d2 = number / d1; if (d1 != d2) { divisors[count++] = d2; } while (--source >= 0) { divisors[count++] = number / divisors[source]; } /* * Checks the products of divisors found so far. For example if 2 and 3 are divisors, * checks if 6 is a divisor as well. The products found will themself be used for * computing new products. */ for (int i=1; i<count; i++) { d1 = divisors[i]; for (int j=i; j<count; j++) { final long m = multiplyFull(d1, divisors[j]); if (m > number) break; d2 = (int) m; if (number % d2 == 0) { int p = Arrays.binarySearch(divisors, j, count, d2); if (p < 0) { p = ~p; // tild (~) operator, not minus if (count == divisors.length) { divisors = Arrays.copyOf(divisors, count*2); } System.arraycopy(divisors, p, divisors, p+1, count-p); divisors[p] = d2; count++; } } } } divisors = ArraysExt.resize(divisors, count); assert ArraysExt.isSorted(divisors, true); return divisors; } /** * Returns the positive divisors which are common to all the specified numbers. * The returned array always starts with value 1, unless the given value is 0 * in which case this method returns an empty array. * * @param numbers the numbers for which to compute the divisors, in any order. * @return the divisors common to all the given numbers, in strictly increasing order. */ public static int[] commonDivisors(final int... numbers) { if (numbers.length == 0) { return ArraysExt.EMPTY_INT; } /* * Get the smallest value. We will compute the divisors only for this value, * since we know that any value greater that the minimal value can not be a * common divisor. */ int minValue = Integer.MAX_VALUE; for (int i=0; i<numbers.length; i++) { final int n = abs(numbers[i]); if (n <= minValue) { minValue = n; } } int[] divisors = divisors(minValue); /* * Tests if the divisors we just found are also divisors of all other numbers. * Removes those which are not. */ int count = divisors.length; for (int i=0; i<numbers.length; i++) { final int n = abs(numbers[i]); if (n != minValue) { for (int j=count; --j>0;) { // Do not test j==0, since divisors[0] == 1. if (n % divisors[j] != 0) { System.arraycopy(divisors, j+1, divisors, j, --count - j); } } } } return ArraysExt.resize(divisors, count); } /** * Returns the real (non-complex) roots of a polynomial equation having the given coefficients. * This method returns the <var>x</var> values for which <var>y</var>=0 in the following equation: * * <blockquote><var>y</var> = * <var>c<sub>0</sub></var> + * <var>c<sub>1</sub></var>⋅<var>x</var> + * <var>c<sub>2</sub></var>⋅<var>x</var><sup>2</sup> + * <var>c<sub>3</sub></var>⋅<var>x</var><sup>3</sup> + … + * <var>c<sub>n</sub></var>⋅<var>x</var><sup>n</sup> * </blockquote> * * Current implementation can resolve polynomials described by a maximum of 5 coefficients, ignoring * leading and trailing zeros. They correspond to linear, quadratic, cubic and quartic polynomials. * * @param coefficients the <var>c<sub>0</sub></var>, <var>c<sub>1</sub></var>, <var>c<sub>2</sub></var>, … * <var>c<sub>n</sub></var> coefficients, in that order. * @return the non-complex roots, or an empty array if none. * @throws UnsupportedOperationException if given arguments contain more non-zero coefficients than this method can handle. * * @see java.awt.geom.QuadCurve2D#solveQuadratic(double[]) * @see java.awt.geom.CubicCurve2D#solveCubic(double[]) * * @since 1.0 */ public static double[] polynomialRoots(final double... coefficients) { int upper = coefficients.length; while (upper > 0) { double a = coefficients[--upper]; // Coefficient of the term with highest exponent. if (a == 0) { continue; // Search the last non-zero coefficient. } double c; // Coefficient of the term with lowest exponent. int lower = 0; while ((c = coefficients[lower]) == 0) lower++; switch (upper - lower) { /* * c = 0 * * Can not compute x. We could return an arbitrary value if c = 0, but we rather return * an empty array for keeping the number of root equals to the highest exponent. */ case 0: { break; } /* * ax + c = 0 → x = -c/a */ case 1: { final double x = -c / a; if (Double.isNaN(x)) break; return new double[] {x}; } /* * ax² + bx + c = 0 → x = (-b ± √[b² - 4ac]) / (2a) * * Above equation is numerically unstable. More stable algorithm is given * by Numerical Recipes §5.6 (quadratic equation), which is applied below. */ case 2: { final double b = coefficients[lower + 1]; final double q = -0.5 * (b + Math.copySign(sqrt(b*b - 4*a*c), b)); final double x1 = q/a; final double x2 = c/q; if (Double.isNaN(x1) && Double.isNaN(x2)) break; return (x1 != x2) ? new double[] {x1, x2} : new double[] {x1}; } /* * x³ + ax² + bx + c = 0 * * Numerical Recipes §5.6 (cubic equation) is applied below. * Solution usually have either 1 or 3 roots. */ case 3: { return refineRoots(coefficients, solveCubic( coefficients[lower + 2] / a, coefficients[lower + 1] / a, c / a, false)); } /* * x⁴ + ax³ + bx² + cx + d = 0 * * https://dlmf.nist.gov/1.11 in "Quartic equations" section. * This algorithm reduces the equation to a cubic equation. */ case 4: { double b,d; d = c / a; c = coefficients[lower + 1] / a; b = coefficients[lower + 2] / a; a = coefficients[lower + 3] / a; final double a2 = a*a; final double p = -3./8 * (a2) + b; final double q = 1./8 * (a2*a) - 1./2 * (a*b) + c; final double r = -3./256 * (a2*a2) + 1./16 * (a2*b) - 1./4 * (a*c) + d; final double[] roots = solveCubic(-2*p, p*p-4*r, q*q, true); if (roots.length != 4) break; for (int i=0; i<3; i++) { roots[i] = sqrt(-roots[i]); } if (isPositive(q)) { for (int i=0; i<3; i++) { roots[i] = -roots[i]; } } final double α = roots[0], β = roots[1], γ = roots[2]; final double s = α + β + γ; if (Double.isNaN(s)) break; roots[0] = s/2 - (a /= 4); roots[1] = (+α - β - γ)/2 - a; roots[2] = (-α + β - γ)/2 - a; roots[3] = (-α - β + γ)/2 - a; return refineRoots(coefficients, removeDuplicated(roots)); } default: { throw new UnsupportedOperationException(); } } break; } return ArraysExt.EMPTY_DOUBLE; } /** * Solves cubic equation x³ + ax² + bx + c = 0. The solution before simplification has either 1 or 3 roots. * The {@code quartic} argument specifies whether this cubic equation is used as a step for solving a quartic equation: * * <ul> * <li>If {@code true}, then we are interested only in the 3 roots solution and we do not check for duplicated values. * The length of returned array is 4 for allowing the caller to reuse the same array.</li> * <li>If {@code false}, then this method may simplify the 3 roots to 2 roots if two of them are equal, * or may return the 1 root solution. The length of returned array is the number of roots.</li> * </ul> */ private static double[] solveCubic(double a, double b, double c, final boolean quartic) { final double Q = (a*a - 3*b) / 9; // Q from Numerical Recipes 5.6.10. final double R = (a*(a*a - 4.5*b) + 13.5*c) / 27; // R from Numerical Recipes 5.6.10. final double Q3 = Q*Q*Q; final double R2 = R*R; a /= 3; // Last term of Numerical Recipes 5.6.12, 17 and 18. if (R2 < Q3) { /* * Numerical Recipes 5.6.11 and 5.6.12 uses acos(R/sqrt(Q³)). It is possible to rewrite as * atan2(sqrt(Q3 - R2), R) using the cos(θ) = 1/√[1 + tan²θ] trigonometric identity, but * this substitution seems to decrease accuracy instead of increasing it in our tests. */ b = Math.acos(R/sqrt(Q3)) / 3; // θ from Numerical recipes 5.6.11, then b = θ/3. c = -2 * sqrt(Q); // First part of Numerical Recipes 5.6.12. double[] roots = new double[quartic ? 4 : 3]; roots[2] = c*Math.cos(b - 2*Math.PI/3) - a; // TODO: try Math.fma with JDK9. roots[1] = c*Math.cos(b + 2*Math.PI/3) - a; roots[0] = c*Math.cos(b) - a; if (!quartic) { roots = removeDuplicated(roots); } return roots; } if (!quartic) { b = -Math.copySign(Math.cbrt(abs(R) + sqrt(R2 - Q3)), R); // A from Numerical Recipes 5.6.15. final double x = (b == 0 ? 0 : b + Q/b) - a; if (!Double.isNaN(x)) { return new double[] {x}; } } return ArraysExt.EMPTY_DOUBLE; } /** * Remove duplicated values in the given array. This method is okay only for very small arrays (3 or 4 elements). * Duplicated values should be very rare and occur mostly as a consequence of rounding errors while computing the * roots of polynomial equations. Because if the algebraic solution has less roots than what we would expect from * the largest exponent (for example ax² + bx = 0 has only one root instead of two), then {@link #polynomialRoots} * should have reduced the equation to a lower degrees (ax + b = 0 in above example), in which case there is no * duplicated roots to remove. */ private static double[] removeDuplicated(double[] roots) { int i = 1; next: while (i < roots.length) { for (int j=i; --j >= 0;) { if (roots[j] == roots[i]) { roots = ArraysExt.remove(roots, i, 1); continue next; } } i++; } return roots; } /** * Tries to improves accuracy of polynomial roots by applying small displacements * to the <var>x</var> values using ∂y/∂x derivative around those values. * * <div class="note"><b>Purpose:</b> * this refinement is significant in a {@link org.apache.sis.referencing.GeodesicsOnEllipsoid} * test checking the value of an μ(x²,y²) function.</div> * * @param coefficients the user-specified coefficients. * @param roots the roots. This array will be modified in place. * @return {@code roots}. */ private static double[] refineRoots(final double[] coefficients, final double[] roots) { for (int i=0; i < roots.length; i++) { double ymin = Double.POSITIVE_INFINITY; double x = roots[i]; double dx; do { double px = 1; // Power of x: 1, x¹, x², x³, … double dy = 0; // First derivative of polynomial at x. double y = coefficients[0]; // Value of polynomial at x. double ey = 0, edy = 0; // Error terms for Kahan summation algorithm. for (int j=1; j<coefficients.length; j++) { final double c = coefficients[j]; double s; s = c * (px * i) + edy; edy = s + (dy - (dy += s)); // Kahan summation of dy. s = c * (px *= x) + ey; ey = s + ( y - ( y += s)); // Kahan summation of y. } if (!(ymin > (ymin = abs(y)))) break; // If result not better than previous result, stop. roots[i] = x; dx = y/dy; } while (x != (x -= dx) && Double.isFinite(x)); } return roots; } }
apache-2.0
jonvestal/open-kilda
src-java/isllatency-topology/isllatency-storm-topology/src/main/java/org/openkilda/wfm/topology/isllatency/model/LatencyRecord.java
771
/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openkilda.wfm.topology.isllatency.model; import lombok.Value; @Value public class LatencyRecord { long latency; long timestamp; }
apache-2.0
Sage-Bionetworks/SynapseWebClient
src/main/java/org/sagebionetworks/web/client/widget/asynch/UserProfileAsyncHandler.java
279
package org.sagebionetworks.web.client.widget.asynch; import org.sagebionetworks.repo.model.UserProfile; import com.google.gwt.user.client.rpc.AsyncCallback; public interface UserProfileAsyncHandler { void getUserProfile(String userId, AsyncCallback<UserProfile> callback); }
apache-2.0
adufilie/flex-falcon
compiler/src/org/apache/flex/compiler/asdoc/IMetadataParserASDocDelegate.java
2718
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.asdoc; import antlr.Token; import org.apache.flex.compiler.internal.parsing.as.MetadataParser; import org.apache.flex.compiler.tree.as.IDocumentableDefinitionNode; /** * Interface used by the {@link MetadataParser} to recored information about * ASDoc comments encountered while parsing meta-data. * <p> * Implementations of this interface that record ASDoc data are stateful and can * not be shared between {@link MetadataParser} instances. */ public interface IMetadataParserASDocDelegate { /** * Called by the {@link MetadataParser} whenever a {@link Token} containing an ASDoc * comment is encountered. * @param asDocToken A {@link Token} containing an ASDoc comment. */ void setCurrentASDocToken(Token asDocToken); /** * Called by the {@link MetadataParser} after an {@link IDocumentableDefinitionNode} has been constructed * and fully parsed. * @param definitionNode {@link IDocumentableDefinitionNode} that has been parsed. * @return An {@link IASDocComment} that should be attached to the {@link IDocumentableDefinitionNode}. */ IASDocComment afterDefinition(IDocumentableDefinitionNode definitionNode); /** * Called by the {@link MetadataParser} after parsing a meta-data tag * that should prevent the current ASDoc comment from attaching to any * subsequent meta-data tag. * * @param metaDataTagName The name of the meta-data tag prevents the * current ASDoc comment from attaching to any subsequent meta-data tag. */ void clearMetadataComment(String metaDataTagName); /** * Called by the {@link MetadataParser} any time a meta-data tag is * parsed. * * @param metaDataEndOffset The end offset of the meta-data tag that has * been parsed. */ void afterMetadata(int metaDataEndOffset); }
apache-2.0
aaudiber/alluxio
core/server/common/src/main/java/alluxio/underfs/AbstractUfsManager.java
6542
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.underfs; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.PropertyKey; import alluxio.exception.status.NotFoundException; import alluxio.exception.status.UnavailableException; import alluxio.util.IdUtils; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.io.Closer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * Basic implementation of {@link UfsManager}. */ public abstract class AbstractUfsManager implements UfsManager { private static final Logger LOG = LoggerFactory.getLogger(AbstractUfsManager.class); private final Object mLock = new Object(); /** * The key of the UFS cache. */ public static class Key { private final String mScheme; private final String mAuthority; private final Map<String, String> mProperties; Key(AlluxioURI uri, Map<String, String> properties) { mScheme = uri.getScheme() == null ? "" : uri.getScheme().toLowerCase(); mAuthority = uri.getAuthority().toString().toLowerCase(); mProperties = (properties == null || properties.isEmpty()) ? null : properties; } @Override public int hashCode() { return Objects.hashCode(mScheme, mAuthority, mProperties); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (!(object instanceof Key)) { return false; } Key that = (Key) object; return Objects.equal(mAuthority, that.mAuthority) && Objects .equal(mProperties, that.mProperties) && Objects.equal(mScheme, that.mScheme); } @Override public String toString() { return Objects.toStringHelper(this) .add("authority", mAuthority) .add("scheme", mScheme) .add("properties", mProperties) .toString(); } } // TODO(binfan): Add refcount to the UFS instance. Once the refcount goes to zero, // we could close this UFS instance. /** * Maps from key to {@link UnderFileSystem} instances. This map keeps the entire set of UFS * instances, each keyed by their unique combination of Uri and conf information. This map * helps efficiently identify if a UFS instance in request should be created or can be reused. */ protected final ConcurrentHashMap<Key, UnderFileSystem> mUnderFileSystemMap = new ConcurrentHashMap<>(); /** * Maps from mount id to {@link UfsClient} instances. This map helps efficiently retrieve * existing UFS info given its mount id. */ private final ConcurrentHashMap<Long, UfsClient> mMountIdToUfsInfoMap = new ConcurrentHashMap<>(); private UfsClient mRootUfsClient; protected final Closer mCloser; AbstractUfsManager() { mCloser = Closer.create(); } /** * Return a UFS instance if it already exists in the cache, otherwise, creates a new instance and * return this. * * @param ufsUri the UFS path * @param ufsConf the UFS configuration * @return the UFS instance */ private UnderFileSystem getOrAdd(AlluxioURI ufsUri, UnderFileSystemConfiguration ufsConf) { Key key = new Key(ufsUri, ufsConf.getMountSpecificConf()); UnderFileSystem cachedFs = mUnderFileSystemMap.get(key); if (cachedFs != null) { return cachedFs; } // On cache miss, synchronize the creation to ensure ufs is only created once synchronized (mLock) { cachedFs = mUnderFileSystemMap.get(key); if (cachedFs != null) { return cachedFs; } UnderFileSystem fs = UnderFileSystem.Factory.create(ufsUri.toString(), ufsConf); mUnderFileSystemMap.putIfAbsent(key, fs); mCloser.register(fs); return fs; } } @Override public void addMount(long mountId, final AlluxioURI ufsUri, final UnderFileSystemConfiguration ufsConf) { Preconditions.checkArgument(mountId != IdUtils.INVALID_MOUNT_ID, "mountId"); Preconditions.checkNotNull(ufsUri, "ufsUri"); Preconditions.checkNotNull(ufsConf, "ufsConf"); mMountIdToUfsInfoMap.put(mountId, new UfsClient(() -> getOrAdd(ufsUri, ufsConf), ufsUri)); } @Override public void removeMount(long mountId) { Preconditions.checkArgument(mountId != IdUtils.INVALID_MOUNT_ID, "mountId"); // TODO(binfan): check the refcount of this ufs in mUnderFileSystemMap and remove it if this is // no more used. Currently, it is possibly used by out mount too. mMountIdToUfsInfoMap.remove(mountId); } @Override public UfsClient get(long mountId) throws NotFoundException, UnavailableException { UfsClient ufsClient = mMountIdToUfsInfoMap.get(mountId); if (ufsClient == null) { throw new NotFoundException( String.format("Mount Id %d not found in cached mount points", mountId)); } return ufsClient; } @Override public UfsClient getRoot() { synchronized (this) { if (mRootUfsClient == null) { String rootUri = Configuration.get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS); boolean rootReadOnly = Configuration.getBoolean(PropertyKey.MASTER_MOUNT_TABLE_ROOT_READONLY); boolean rootShared = Configuration.getBoolean(PropertyKey.MASTER_MOUNT_TABLE_ROOT_SHARED); Map<String, String> rootConf = Configuration.getNestedProperties(PropertyKey.MASTER_MOUNT_TABLE_ROOT_OPTION); addMount(IdUtils.ROOT_MOUNT_ID, new AlluxioURI(rootUri), UnderFileSystemConfiguration.defaults().setReadOnly(rootReadOnly).setShared(rootShared) .setMountSpecificConf(rootConf)); try { mRootUfsClient = get(IdUtils.ROOT_MOUNT_ID); } catch (NotFoundException | UnavailableException e) { throw new RuntimeException("We should never reach here", e); } } return mRootUfsClient; } } @Override public void close() throws IOException { mCloser.close(); } }
apache-2.0
crazycode/weixin-java-tools
weixin-java-mp/src/main/java/me/chanjar/weixin/mp/api/impl/WxMpDeviceServiceImpl.java
3269
package me.chanjar.weixin.mp.api.impl; import me.chanjar.weixin.common.exception.WxErrorException; import me.chanjar.weixin.mp.api.WxMpDeviceService; import me.chanjar.weixin.mp.api.WxMpService; import me.chanjar.weixin.mp.bean.device.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by keungtung on 10/12/2016. */ public class WxMpDeviceServiceImpl implements WxMpDeviceService { private static final String API_URL_PREFIX = "https://api.weixin.qq.com/device"; private static Logger log = LoggerFactory.getLogger(WxMpMenuServiceImpl.class); private WxMpService wxMpService; public WxMpDeviceServiceImpl(WxMpService wxMpService) { this.wxMpService = wxMpService; } @Override public TransMsgResp transMsg(WxDeviceMsg msg) throws WxErrorException { String url = API_URL_PREFIX + "/transmsg"; String response = this.wxMpService.post(url, msg.toJson()); return TransMsgResp.fromJson(response); } @Override public WxDeviceQrCodeResult getQrCode(String productId) throws WxErrorException { String url = API_URL_PREFIX + "/getqrcode"; String response = this.wxMpService.get(url, "product_id=" + productId); return WxDeviceQrCodeResult.fromJson(response); } @Override public WxDeviceAuthorizeResult authorize(WxDeviceAuthorize wxDeviceAuthorize) throws WxErrorException { String url = API_URL_PREFIX + "/authorize_device"; String response = this.wxMpService.post(url, wxDeviceAuthorize.toJson()); return WxDeviceAuthorizeResult.fromJson(response); } @Override public WxDeviceBindResult bind(WxDeviceBind wxDeviceBind) throws WxErrorException { String url = API_URL_PREFIX + "/bind"; String response = this.wxMpService.post(url, wxDeviceBind.toJson()); return WxDeviceBindResult.fromJson(response); } @Override public WxDeviceBindResult compelBind(WxDeviceBind wxDeviceBind) throws WxErrorException { String url = API_URL_PREFIX + "/compel_bind"; String response = this.wxMpService.post(url, wxDeviceBind.toJson()); return WxDeviceBindResult.fromJson(response); } @Override public WxDeviceBindResult unbind(WxDeviceBind wxDeviceBind) throws WxErrorException { String url = API_URL_PREFIX + "/unbind?"; String response = this.wxMpService.post(url, wxDeviceBind.toJson()); return WxDeviceBindResult.fromJson(response); } @Override public WxDeviceBindResult compelUnbind(WxDeviceBind wxDeviceBind) throws WxErrorException { String url = API_URL_PREFIX + "/compel_unbind?"; String response = this.wxMpService.post(url, wxDeviceBind.toJson()); return WxDeviceBindResult.fromJson(response); } @Override public WxDeviceOpenIdResult getOpenId(String deviceType, String deviceId) throws WxErrorException { String url = API_URL_PREFIX + "/get_openid"; String response = this.wxMpService.get(url, "device_type=" + deviceType + "&device_id=" + deviceId); return WxDeviceOpenIdResult.fromJson(response); } @Override public WxDeviceBindDeviceResult getBindDevice(String openId) throws WxErrorException { String url = API_URL_PREFIX + "/get_bind_device"; String response = this.wxMpService.get(url, "openid=" + openId); return WxDeviceBindDeviceResult.fromJson(response); } }
apache-2.0
fschueler/incubator-systemml
src/main/java/org/apache/sysml/api/mlcontext/Matrix.java
5551
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.api.mlcontext; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.rdd.RDD; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.sysml.runtime.controlprogram.caching.MatrixObject; import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext; import org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtils; /** * Matrix encapsulates a SystemML matrix. It allows for easy conversion to * various other formats, such as RDDs, JavaRDDs, DataFrames, * BinaryBlockMatrices, and double[][]s. After script execution, it offers a * convenient format for obtaining SystemML matrix data in Scala tuples. * */ public class Matrix { private MatrixObject matrixObject; private SparkExecutionContext sparkExecutionContext; public Matrix(MatrixObject matrixObject, SparkExecutionContext sparkExecutionContext) { this.matrixObject = matrixObject; this.sparkExecutionContext = sparkExecutionContext; } /** * Obtain the matrix as a SystemML MatrixObject. * * @return the matrix as a SystemML MatrixObject */ public MatrixObject toMatrixObject() { return matrixObject; } /** * Obtain the matrix as a two-dimensional double array * * @return the matrix as a two-dimensional double array */ public double[][] to2DDoubleArray() { return MLContextConversionUtil.matrixObjectTo2DDoubleArray(matrixObject); } /** * Obtain the matrix as a {@code JavaRDD<String>} in IJV format * * @return the matrix as a {@code JavaRDD<String>} in IJV format */ public JavaRDD<String> toJavaRDDStringIJV() { return MLContextConversionUtil.matrixObjectToJavaRDDStringIJV(matrixObject); } /** * Obtain the matrix as a {@code JavaRDD<String>} in CSV format * * @return the matrix as a {@code JavaRDD<String>} in CSV format */ public JavaRDD<String> toJavaRDDStringCSV() { return MLContextConversionUtil.matrixObjectToJavaRDDStringCSV(matrixObject); } /** * Obtain the matrix as a {@code RDD<String>} in CSV format * * @return the matrix as a {@code RDD<String>} in CSV format */ public RDD<String> toRDDStringCSV() { return MLContextConversionUtil.matrixObjectToRDDStringCSV(matrixObject); } /** * Obtain the matrix as a {@code RDD<String>} in IJV format * * @return the matrix as a {@code RDD<String>} in IJV format */ public RDD<String> toRDDStringIJV() { return MLContextConversionUtil.matrixObjectToRDDStringIJV(matrixObject); } /** * Obtain the matrix as a {@code DataFrame} of doubles with an ID column * * @return the matrix as a {@code DataFrame} of doubles with an ID column */ public Dataset<Row> toDF() { return MLContextConversionUtil.matrixObjectToDataFrame(matrixObject, sparkExecutionContext, false); } /** * Obtain the matrix as a {@code DataFrame} of doubles with an ID column * * @return the matrix as a {@code DataFrame} of doubles with an ID column */ public Dataset<Row> toDFDoubleWithIDColumn() { return MLContextConversionUtil.matrixObjectToDataFrame(matrixObject, sparkExecutionContext, false); } /** * Obtain the matrix as a {@code DataFrame} of doubles with no ID column * * @return the matrix as a {@code DataFrame} of doubles with no ID column */ public Dataset<Row> toDFDoubleNoIDColumn() { Dataset<Row> df = MLContextConversionUtil.matrixObjectToDataFrame(matrixObject, sparkExecutionContext, false); return df.drop(RDDConverterUtils.DF_ID_COLUMN); } /** * Obtain the matrix as a {@code DataFrame} of vectors with an ID column * * @return the matrix as a {@code DataFrame} of vectors with an ID column */ public Dataset<Row> toDFVectorWithIDColumn() { return MLContextConversionUtil.matrixObjectToDataFrame(matrixObject, sparkExecutionContext, true); } /** * Obtain the matrix as a {@code DataFrame} of vectors with no ID column * * @return the matrix as a {@code DataFrame} of vectors with no ID column */ public Dataset<Row> toDFVectorNoIDColumn() { Dataset<Row> df = MLContextConversionUtil.matrixObjectToDataFrame(matrixObject, sparkExecutionContext, true); return df.drop(RDDConverterUtils.DF_ID_COLUMN); } /** * Obtain the matrix as a {@code BinaryBlockMatrix} * * @return the matrix as a {@code BinaryBlockMatrix} */ public BinaryBlockMatrix toBinaryBlockMatrix() { return MLContextConversionUtil.matrixObjectToBinaryBlockMatrix(matrixObject, sparkExecutionContext); } /** * Obtain the matrix metadata * * @return the matrix metadata */ public MatrixMetadata getMatrixMetadata() { return new MatrixMetadata(matrixObject.getMatrixCharacteristics()); } @Override public String toString() { return matrixObject.toString(); } }
apache-2.0
apache/geronimo
plugins/clustering/geronimo-farm/src/main/java/org/apache/geronimo/farm/config/NodeInfo.java
1110
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.geronimo.farm.config; import java.io.IOException; import org.apache.geronimo.kernel.Kernel; /** * * @version $Rev:$ $Date:$ */ public interface NodeInfo { String getName(); ExtendedJMXConnectorInfo getConnectorInfo(); Kernel newKernel() throws IOException; }
apache-2.0
rankinc/Resteasy
resteasy-jaxrs/src/main/java/org/jboss/resteasy/spi/MethodNotAllowedException.java
1136
package org.jboss.resteasy.spi; import javax.ws.rs.core.Response; /** * Thrown by RESTEasy when HTTP Method Not Allowed (405) is encountered * JAX-RS now has this exception * * @deprecated Replaced by javax.ws.rs.NotAllowedException in jaxrs-api module. * * @see jaxrs-api (https://jcp.org/en/jsr/detail?id=339) * @see javax.ws.rs.NotAllowedException */ @Deprecated public class MethodNotAllowedException extends LoggableFailure { public MethodNotAllowedException(String s) { super(s, 405); } public MethodNotAllowedException(String s, Response response) { super(s, response); } public MethodNotAllowedException(String s, Throwable throwable, Response response) { super(s, throwable, response); } public MethodNotAllowedException(String s, Throwable throwable) { super(s, throwable, 405); } public MethodNotAllowedException(Throwable throwable) { super(throwable, 405); } public MethodNotAllowedException(Throwable throwable, Response response) { super(throwable, response); } }
apache-2.0
mariuszs/assertj-core
src/main/java/org/assertj/core/api/AbstractPathAssert.java
46021
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2015 the original author or authors. */ package org.assertj.core.api; import static java.lang.String.format; import java.nio.charset.Charset; import java.nio.file.ClosedFileSystemException; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.ProviderMismatchException; import java.nio.file.spi.FileSystemProvider; import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.Paths; import org.assertj.core.util.VisibleForTesting; /** * Assertions for {@link Path} objects * * <p> * Note that some assertions have two versions: a normal one and a "raw" one (for instance, {@code hasParent()} and * {@code hasParentRaw()}. The difference is that normal assertions will {@link Path#toRealPath(LinkOption...) * canonicalize} or {@link Path#normalize() normalize} the tested path and, where applicable, the path argument, before * performing the actual test. Canonicalization includes normalization. * </p> * * <p> * Canonicalization may lead to an I/O error if a path does not exist, in which case the given assertions will fail with * a {@link PathsException}. Also note that {@link Files#isSymbolicLink(Path) symbolic links} will be followed if the * filesystem supports them. Finally, if a path is not {@link Path#isAbsolute() absolute}, canonicalization will * resolve the path against the process' current working directory. * </p> * * <p> * These assertions are filesystem independent. You may use them on {@code Path} instances issued from the default * filesystem (ie, instances you get when using {@link java.nio.file.Paths#get(String, String...)}) or from other * filesystems. For more information, see the {@link FileSystem javadoc for {@code FileSystem} . * </p> * * <p> * Furthermore: * </p> * * <ul> * <li>Unless otherwise noted, assertions which accept arguments will not accept {@code null} arguments; if a null * argument is passed, these assertions will throw a {@link NullPointerException}.</li> * <li>It is the caller's responsibility to ensure that paths used in assertions are issued from valid filesystems which * are not {@link FileSystem#close() closed}. If a filesystems is closed, assertions will throw a * {@link ClosedFileSystemException}.</li> * <li>Some assertions take another {@link Path} as an argument. If this path is not issued from the same * {@link FileSystemProvider provider} as the tested path, assertions will throw a {@link ProviderMismatchException}.</li> * <li>Some assertions may need to perform I/O on the path's underlying filesystem; if an I/O error occurs when * accessing the filesystem, these assertions will throw a {@link PathsException}.</li> * </ul> * * @param <S> self type * * @see Path * @see java.nio.file.Paths#get(String, String...) * @see FileSystem * @see FileSystem#getPath(String, String...) * @see FileSystems#getDefault() * @see Files */ public abstract class AbstractPathAssert<S extends AbstractPathAssert<S>> extends AbstractAssert<S, Path> { @VisibleForTesting protected Paths paths = Paths.instance(); @VisibleForTesting Charset charset = Charset.defaultCharset(); protected AbstractPathAssert(final Path actual, final Class<?> selfType) { super(actual, selfType); } /** * Verifies that the content of the actual {@code Path} is the same as the given one (both paths must be a readable * files). The default charset is used to read each files. * * <p> * Examples: * </p> * * <pre><code class="java"> * // use the default charset * Path xFile = Files.write(Paths.get("xfile.txt"), "The Truth Is Out There".getBytes()); * Path xFileClone = Files.write(Paths.get("xfile-clone.txt"), "The Truth Is Out There".getBytes()); * Path xFileFrench = Files.write(Paths.get("xfile-french.txt"), "La Vérité Est Ailleurs".getBytes()); * * // The following assertion succeeds (default charset is used): * assertThat(xFile).hasSameContentAs(xFileClone); * * // The following assertion fails: * assertThat(xFile).hasSameContentAs(xFileFrench); * </code></pre> * * @param expected the given {@code Path} to compare the actual {@code Path} to. * @return {@code this} assertion object. * @throws NullPointerException if the given {@code Path} is {@code null}. * @throws AssertionError if the actual or given {@code Path} is not an existing readable file. * @throws AssertionError if the actual {@code Path} is {@code null}. * @throws AssertionError if the content of the actual {@code Path} is not equal to the content of the given one. * @throws PathsException if an I/O error occurs. */ public S hasSameContentAs(Path expected) { paths.assertHasSameContentAs(info, actual, expected); return myself; } /** * Verifies that the binary content of the actual {@code Path} is <b>exactly</b> equal to the given one. * * <p> * Examples: * </p> * * <pre><code class="java"> * // using the default charset, the following assertion succeeds: * Path xFile = Files.write(Paths.get("xfile.txt"), "The Truth Is Out There".getBytes()); * assertThat(xFile).hasBinaryContent("The Truth Is Out There".getBytes()); * * // using a specific charset * Charset turkishCharset = Charset.forName("windows-1254"); * Path xFileTurkish = Files.write(Paths.get("xfile.turk"), Collections.singleton("Gerçek Başka bir yerde mi"), turkishCharset); * * // The following assertion succeeds: * String expectedContent = "Gerçek Başka bir yerde mi" + System.lineSeparator(); * byte[] binaryContent = expectedContent.getBytes(turkishCharset.name()); * assertThat(xFileTurkish).hasBinaryContent(binaryContent); * * // The following assertion fails ... unless you are in Turkey ;-): * assertThat(xFileTurkish).hasBinaryContent("Gerçek Başka bir yerde mi".getBytes()); * </code></pre> * * @param expected the expected binary content to compare the actual {@code File}'s content to. * @return {@code this} assertion object. * @throws NullPointerException if the given content is {@code null}. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. * @throws RuntimeIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the given binary content. */ public S hasBinaryContent(byte[] expected) { paths.assertHasBinaryContent(info, actual, expected); return myself; } /** * Specifies the name of the charset to use for text-based assertions on the path's contents (path must be a readable * file). * * <p> * Examples: * </p> * * <pre><code class="java"> * Charset turkishCharset = Charset.forName("windows-1254"); * Path xFileTurkish = Files.write(Paths.get("xfile.turk"), Collections.singleton("Gerçek Başka bir yerde mi"), turkishCharset); * * // The following assertion succeeds: * assertThat(xFileTurkish).usingCharset("windows-1254").hasContent("Gerçek Başka bir yerde mi"); * </code></pre> * * @param charsetName the name of the charset to use. * @return {@code this} assertion object. * @throws IllegalArgumentException if the given encoding is not supported on this platform. */ public S usingCharset(String charsetName) { if (!Charset.isSupported(charsetName)) throw new IllegalArgumentException(format("Charset:<'%s'> is not supported on this system", charsetName)); return usingCharset(Charset.forName(charsetName)); } /** * Specifies the charset to use for text-based assertions on the path's contents (path must be a readable file). * * <p> * Examples: * </p> * * <pre><code class="java"> * Charset turkishCharset = Charset.forName("windows-1254"); * Path xFileTurkish = Files.write(Paths.get("xfile.turk"), Collections.singleton("Gerçek Başka bir yerde mi"), turkishCharset); * * // The following assertion succeeds: * assertThat(xFileTurkish).usingCharset(turkishCharset).hasContent("Gerçek Başka bir yerde mi"); * </code></pre> * * @param charset the charset to use. * @return {@code this} assertion object. * @throws NullPointerException if the given charset is {@code null}. */ public S usingCharset(Charset charset) { if (charset == null) throw new NullPointerException("The charset should not be null"); this.charset = charset; return myself; } /** * Verifies that the text content of the actual {@code Path} (which must be a readable file) is <b>exactly</b> equal * to the given one.<br/> * The charset to use when reading the file should be provided with {@link #usingCharset(Charset)} or * {@link #usingCharset(String)} prior to calling this method; if not, the platform's default charset (as returned by * {@link Charset#defaultCharset()}) will be used. * * <p> * Examples: * </p> * * <pre><code class="java"> * // use the default charset * Path xFile = Files.write(Paths.get("xfile.txt"), "The Truth Is Out There".getBytes()); * * // The following assertion succeeds (default charset is used): * assertThat(xFile).hasContent("The Truth Is Out There"); * * // The following assertion fails: * assertThat(xFile).hasContent("La Vérité Est Ailleurs"); * * // using a specific charset * Charset turkishCharset = Charset.forName("windows-1254"); * * Path xFileTurkish = Files.write(Paths.get("xfile.turk"), Collections.singleton("Gerçek Başka bir yerde mi"), turkishCharset); * * // The following assertion succeeds: * assertThat(xFileTurkish).usingCharset(turkishCharset).hasContent("Gerçek Başka bir yerde mi"); * * // The following assertion fails ... unless you are in Turkey ;-): * assertThat(xFileTurkish).hasContent("Gerçek Başka bir yerde mi"); * </code></pre> * * @param expected the expected text content to compare the actual {@code File}'s content to. * @return {@code this} assertion object. * @throws NullPointerException if the given content is {@code null}. * @throws RuntimeIOException if an I/O error occurs. * @throws AssertionError if the actual {@code Path} is {@code null}. * @throws AssertionError if the actual {@code Path} is not a {@link Files#isReadable(Path) readable} file. * @throws AssertionError if the content of the actual {@code File} is not equal to the given content. */ public S hasContent(String expected) { paths.assertHasContent(info, actual, expected, charset); return myself; } /** * Assert that the tested {@link Path} is a readable file, it checks that the file exists (according to * {@link Files#exists(Path, LinkOption...)}) and that it is readable(according to {@link Files#isReadable(Path)}). * * <p> * Examples: * </p> * * <pre><code class="java"> * // Create a file and set permissions to be readable by all. * Path readableFile = Paths.get("readableFile"); * Set&lt;PosixFilePermission&gt; perms = PosixFilePermissions.fromString("r--r--r--"); * Files.createFile(readableFile, PosixFilePermissions.asFileAttribute(perms)); * * final Path symlinkToReadableFile = FileSystems.getDefault().getPath("symlinkToReadableFile"); * Files.createSymbolicLink(symlinkToReadableFile, readableFile); * * // Create a file and set permissions not to be readable. * Path nonReadableFile = Paths.get("nonReadableFile"); * Set&lt;PosixFilePermission&gt; notReadablePerms = PosixFilePermissions.fromString("-wx------"); * Files.createFile(nonReadableFile, PosixFilePermissions.asFileAttribute(notReadablePerms)); * * final Path nonExistentPath = FileSystems.getDefault().getPath("nonexistent"); * * // The following assertions succeed: * assertThat(readableFile).isReadable(); * assertThat(symlinkToReadableFile).isReadable(); * * // The following assertions fail: * assertThat(nonReadableFile).isReadable(); * assertThat(nonExistentPath).isReadable(); * </code></pre> * * @return self * * @see Files#isReadable(Path) */ public S isReadable() { paths.assertIsReadable(info, actual); return myself; } /** * Assert that the tested {@link Path} is a writable file, it checks that the file exists (according to * {@link Files#exists(Path, LinkOption...)}) and that it is writable(according to {@link Files#isWritable(Path)}). * * <p> * Examples: * </p> * * <pre><code class="java"> * // Create a file and set permissions to be writable by all. * Path writableFile = Paths.get("writableFile"); * Set&lt;PosixFilePermission&gt; perms = PosixFilePermissions.fromString("rw-rw-rw-"); * Files.createFile(writableFile, PosixFilePermissions.asFileAttribute(perms)); * * final Path symlinkToWritableFile = FileSystems.getDefault().getPath("symlinkToWritableFile"); * Files.createSymbolicLink(symlinkToWritableFile, writableFile); * * // Create a file and set permissions not to be writable. * Path nonWritableFile = Paths.get("nonWritableFile"); * perms = PosixFilePermissions.fromString("r--r--r--"); * Files.createFile(nonWritableFile, PosixFilePermissions.asFileAttribute(perms)); * * final Path nonExistentPath = FileSystems.getDefault().getPath("nonexistent"); * * // The following assertions succeed: * assertThat(writableFile).isWritable(); * assertThat(symlinkToWritableFile).isWritable(); * * // The following assertions fail: * assertThat(nonWritableFile).isWritable(); * assertThat(nonExistentPath).isWritable(); * </code></pre> * * @return self * * @see Files#isWritable(Path) */ public S isWritable() { paths.assertIsWritable(info, actual); return myself; } /** * Assert that the tested {@link Path} is a executable file, it checks that the file exists (according to * {@link Files#exists(Path, LinkOption...)}) and that it is executable(according to {@link Files#isExecutable(Path)} * ). * * <p> * Examples: * </p> * * <pre><code class="java"> * // Create a file and set permissions to be executable by all. * Path executableFile = Paths.get("executableFile"); * Set&lt;PosixFilePermission&gt; perms = PosixFilePermissions.fromString("r-xr-xr-x"); * Files.createFile(executableFile, PosixFilePermissions.asFileAttribute(perms)); * * final Path symlinkToExecutableFile = FileSystems.getDefault().getPath("symlinkToExecutableFile"); * Files.createSymbolicLink(symlinkToExecutableFile, executableFile); * * // Create a file and set permissions not to be executable. * Path nonExecutableFile = Paths.get("nonExecutableFile"); * perms = PosixFilePermissions.fromString("rw-------"); * Files.createFile(nonExecutableFile, PosixFilePermissions.asFileAttribute(perms)); * * final Path nonExistentPath = FileSystems.getDefault().getPath("nonexistent"); * * // The following assertions succeed: * assertThat(executableFile).isExecutable(); * assertThat(symlinkToExecutableFile).isExecutable(); * * // The following assertions fail: * assertThat(nonExecutableFile).isExecutable(); * assertThat(nonExistentPath).isExecutable(); * </code></pre> * * @return self * * @see Files#isExecutable(Path) */ public S isExecutable() { paths.assertIsExecutable(info, actual); return myself; } /** * Assert that the tested {@link Path} exists according to {@link Files#exists(Path, LinkOption...) * Files#exists(Path)}) * * <p> * <strong>Note that this assertion will follow symbolic links before asserting the path's existence.</strong> * </p> * * <p> * On Windows system, this has no influence. On Unix systems, this means the assertion result will fail if the path is * a symbolic link whose target does not exist. If you want to assert the existence of the symbolic link itself, use * {@link #existsNoFollowLinks()} instead. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * // Create a regular file, and a symbolic link pointing to it * final Path existingFile = fs.getPath("somefile"); * Files.createFile(existingFile); * final Path symlinkToExistingFile = fs.getPath("symlinkToExistingFile"); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // Create a symbolic link whose target does not exist * final Path nonExistentPath = fs.getPath("nonexistent"); * final Path symlinkToNonExistentPath = fs.getPath("symlinkToNonExistentPath"); * Files.createSymbolicLink(symlinkToNonExistentPath, nonExistentPath); * * // The following assertions succeed: * assertThat(existingFile).exists(); * assertThat(symlinkToExistingFile).exists(); * * // The following assertions fail: * assertThat(nonExistentPath).exists(); * assertThat(symlinkToNonExistentPath).exists(); * </code></pre> * * @return self * * @see Files#exists(Path, LinkOption...) */ public S exists() { paths.assertExists(info, actual); return myself; } /** * Assert that the tested {@link Path} exists, not following symbolic links, by calling * {@link Files#exists(Path, LinkOption...) Files#exists(Path, LinkOption.NOFOLLOW_LINKS)}). * * <p> * This assertion behaves like {@link #exists()}, with the difference that it can be used to assert the existence of a * symbolic link even if its target is invalid. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * // Create a regular file, and a symbolic link pointing to it * final Path existingFile = fs.getPath("somefile"); * Files.createFile(existingFile); * final Path symlinkToExistingFile = fs.getPath("symlink"); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // Create a symbolic link whose target does not exist * final Path nonExistentPath = fs.getPath("nonexistent"); * final Path symlinkToNonExistentPath = fs.getPath("symlinkToNonExistentPath"); * Files.createSymbolicLink(symlinkToNonExistentPath, nonExistentPath); * * // The following assertions succeed * assertThat(existingFile).existsNoFollowLinks(); * assertThat(symlinkToExistingFile).existsNoFollowLinks(); * assertThat(symlinkToNonExistentPath).existsNoFollowLinks(); * * // The following assertion fails * assertThat(nonExistentPath).existsNoFollowLinks(); * </code></pre> * * @return self * * @see Files#exists(Path, LinkOption...) */ public S existsNoFollowLinks() { paths.assertExistsNoFollowLinks(info, actual); return myself; } /** * Assert that the tested {@link Path} does not exist. * * <p> * <strong>IMPORTANT NOTE:</strong> this method will NOT follow symbolic links (provided that the underlying * {@link FileSystem} of this path supports symbolic links at all). * </p> * * <p> * This means that even if the link exists this assertion will fail even if the link's target does not exists - note * that this is unlike the default behavior of {@link #exists()}. * </p> * * <p> * If you are a Windows user, the above does not apply to you; if you are a Unix user however, this is important. * Consider the following: * </p> * * <pre><code class="java"> * // fs is a FileSystem * // Create a regular file, and a symbolic link pointing to it * final Path existingFile = fs.getPath("somefile"); * Files.createFile(existingFile); * final Path symlinkToExistingFile = fs.getPath("symlink"); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // Create a symbolic link to a nonexistent target file. * final Path nonExistentPath = fs.getPath("nonExistentPath"); * final Path symlinkToNonExistentPath = fs.getPath("symlinkToNonExistentPath"); * Files.createSymbolicLink(symlinkToNonExistentPath, nonExistentPath); * * // The following assertion succeeds * assertThat(nonExistentPath).doesNotExist(); * * // The following assertions fail: * assertThat(existingFile).doesNotExist(); * assertThat(symlinkToExistingFile).doesNotExist(); * // fail because symlinkToNonExistentPath exists even though its target does not. * assertThat(symlinkToNonExistentPath).doesNotExist(); * </code></pre> * * @return self * * @see Files#notExists(Path, LinkOption...) * @see LinkOption#NOFOLLOW_LINKS */ public S doesNotExist() { paths.assertDoesNotExist(info, actual); return myself; } /** * Assert that the tested {@link Path} is a regular file. * * <p> * <strong>Note that this method will follow symbolic links.</strong> If you are a Unix user and wish to assert that a * path is a symbolic link instead, use {@link #isSymbolicLink()}. * </p> * * <p> * This assertion first asserts the existence of the path (using {@link #exists()}) then checks whether the path is a * regular file. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * * // Create a regular file, and a symbolic link to that regular file * final Path existingFile = fs.getPath("existingFile"); * final Path symlinkToExistingFile = fs.getPath("symlinkToExistingFile"); * Files.createFile(existingFile); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // Create a directory, and a symbolic link to that directory * final Path dir = fs.getPath("dir"); * final Path dirSymlink = fs.getPath("dirSymlink"); * Files.createDirectories(dir); * Files.createSymbolicLink(dirSymlink, dir); * * // Create a nonexistent entry, and a symbolic link to that entry * final Path nonExistentPath = fs.getPath("nonexistent"); * final Path symlinkToNonExistentPath = fs.getPath("symlinkToNonExistentPath"); * Files.createSymbolicLink(symlinkToNonExistentPath, nonExistentPath); * * // the following assertions succeed: * assertThat(existingFile).isRegularFile(); * assertThat(symlinkToExistingFile).isRegularFile(); * * // the following assertions fail because paths do not exist: * assertThat(nonExistentPath).isRegularFile(); * assertThat(symlinkToNonExistentPath).isRegularFile(); * * // the following assertions fail because paths exist but are not regular files: * assertThat(dir).isRegularFile(); * assertThat(dirSymlink).isRegularFile(); * </code></pre> * * @return self */ public S isRegularFile() { paths.assertIsRegularFile(info, actual); return myself; } /** * Assert that the tested {@link Path} is a directory. * <p> * <strong>Note that this method will follow symbolic links.</strong> If you are a Unix user and wish to assert that a * path is a symbolic link instead, use {@link #isSymbolicLink()}. * </p> * * <p> * This assertion first asserts the existence of the path (using {@link #exists()}) then checks whether the path is a * directory. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * * // Create a regular file, and a symbolic link to that regular file * final Path existingFile = fs.getPath("existingFile"); * final Path symlinkToExistingFile = fs.getPath("symlinkToExistingFile"); * Files.createFile(existingFile); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // Create a directory, and a symbolic link to that directory * final Path dir = fs.getPath("dir"); * final Path dirSymlink = fs.getPath("dirSymlink"); * Files.createDirectories(dir); * Files.createSymbolicLink(dirSymlink, dir); * * // Create a nonexistent entry, and a symbolic link to that entry * final Path nonExistentPath = fs.getPath("nonexistent"); * final Path symlinkToNonExistentPath = fs.getPath("symlinkToNonExistentPath"); * Files.createSymbolicLink(symlinkToNonExistentPath, nonExistentPath); * * // the following assertions succeed: * assertThat(dir).isDirectory(); * assertThat(dirSymlink).isDirectory(); * * // the following assertions fail because paths do not exist: * assertThat(nonExistentPath).isDirectory(); * assertThat(symlinkToNonExistentPath).isDirectory(); * * // the following assertions fail because paths exist but are not directories: * assertThat(existingFile).isDirectory(); * assertThat(symlinkToExistingFile).isDirectory(); * </code></pre> * * @return self */ public S isDirectory() { paths.assertIsDirectory(info, actual); return myself; } /** * Assert that the tested {@link Path} is a symbolic link. * <p> * This assertion first asserts the existence of the path (using {@link #existsNoFollowLinks()}) then checks whether * the path is a symbolic link. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * * // Create a regular file, and a symbolic link to that regular file * final Path existingFile = fs.getPath("existingFile"); * final Path symlinkToExistingFile = fs.getPath("symlinkToExistingFile"); * Files.createFile(existingFile); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // Create a directory, and a symbolic link to that directory * final Path dir = fs.getPath("dir"); * final Path dirSymlink = fs.getPath("dirSymlink"); * Files.createDirectories(dir); * Files.createSymbolicLink(dirSymlink, dir); * * // Create a nonexistent entry, and a symbolic link to that entry * final Path nonExistentPath = fs.getPath("nonexistent"); * final Path symlinkToNonExistentPath = fs.getPath("symlinkToNonExistentPath"); * Files.createSymbolicLink(symlinkToNonExistentPath, nonExistentPath); * * // the following assertions succeed: * assertThat(dirSymlink).isSymbolicLink(); * assertThat(symlinkToExistingFile).isSymbolicLink(); * assertThat(symlinkToNonExistentPath).isSymbolicLink(); * * // the following assertion fails because the path does not exist: * assertThat(nonExistentPath).isSymbolicLink(); * * // the following assertions fail because paths exist but are not symbolic links * assertThat(existingFile).isSymbolicLink(); * assertThat(dir).isSymbolicLink(); * </code></pre> * * @return self */ public S isSymbolicLink() { paths.assertIsSymbolicLink(info, actual); return myself; } /** * Assert that the tested {@link Path} is absolute (the path does not have to exist). * * <p> * Note that the fact that a path is absolute does not mean that it is {@link Path#normalize() normalized}: * {@code /foo/..} is absolute, for instance, but it is not normalized. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // unixFs is a Unix FileSystem * * // The following assertion succeeds: * assertThat(unixFs.getPath("/foo/bar")).isAbsolute(); * * // The following assertion fails: * assertThat(unixFs.getPath("foo/bar")).isAbsolute(); * * // windowsFs is a Windows FileSystem * * // The following assertion succeeds: * assertThat(windowsFs.getPath("c:\\foo")).isAbsolute(); * * // The following assertions fail: * assertThat(windowsFs.getPath("foo\\bar")).isAbsolute(); * assertThat(windowsFs.getPath("c:foo")).isAbsolute(); * assertThat(windowsFs.getPath("\\foo\\bar")).isAbsolute(); * </code></pre> * * @return self * * @see Path#isAbsolute() */ public S isAbsolute() { paths.assertIsAbsolute(info, actual); return myself; } /** * Assert that the tested {@link Path} is relative (opposite to {@link Path#isAbsolute()}). * * <p> * Examples: * </p> * * <pre><code class="java"> * // unixFs is a Unix FileSystem * * // The following assertions succeed: * assertThat(unixFs.getPath("./foo/bar")).isRelative(); * assertThat(unixFs.getPath("foo/bar")).isRelative(); * * // The following assertion fails: * assertThat(unixFs.getPath("/foo/bar")).isRelative(); * * // windowsFs is a Windows FileSystem * * // The following assertion succeeds: * assertThat(windowsFs.getPath("foo\\bar")).isRelative(); * assertThat(windowsFs.getPath("c:foo")).isRelative(); * assertThat(windowsFs.getPath("\\foo\\bar")).isRelative(); * * // The following assertions fail: * assertThat(windowsFs.getPath("c:\\foo")).isRelative(); * </code></pre> * * @return self * * @see Path#isAbsolute() */ public S isRelative() { paths.assertIsRelative(info, actual); return myself; } /** * Assert that the tested {@link Path} is normalized. * * <p> * A path is normalized if it has no redundant components; typically, on both Unix and Windows, this means that the * path has no "self" components ({@code .}) and that its only parent components ({@code ..}), if any, are at the * beginning of the path. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * * // the following assertions succeed: * assertThat(fs.getPath("/usr/lib")).isNormalized(); * assertThat(fs.getPath("a/b/c")).isNormalized(); * assertThat(fs.getPath("../d")).isNormalized(); * * // the following assertions fail: * assertThat(fs.getPath("/a/./b")).isNormalized(); * assertThat(fs.getPath("c/b/..")).isNormalized(); * assertThat(fs.getPath("/../../e")).isNormalized(); * </code></pre> * * @return self */ public S isNormalized() { paths.assertIsNormalized(info, actual); return myself; } /** * Assert that the tested {@link Path} is canonical by comparing it to its {@link Path#toRealPath(LinkOption...) real * path}. * * <p> * For Windows users, this assertion is no different than {@link #isAbsolute()} expect that the file must exist. For * Unix users, this assertion ensures that the tested path is the actual file system resource, that is, it is not a * {@link Files#isSymbolicLink(Path) symbolic link} to the actual resource, even if the path is absolute. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * // Create a directory * final Path basedir = fs.getPath("/tmp/foo"); * Files.createDirectories(basedir); * * // Create a file in this directory * final Path existingFile = basedir.resolve("existingFile"); * Files.createFile(existingFile); * * // Create a symbolic link to that file * final Path symlinkToExistingFile = basedir.resolve("symlinkToExistingFile"); * Files.createSymbolicLink(symlinkToExistingFile, existingFile); * * // The following assertion succeeds: * assertThat(existingFile).isCanonical(); * * // The following assertion fails: * assertThat(symlinkToExistingFile).isCanonical(); * </code></pre> * * @throws PathsException an I/O error occurred while evaluating the path * * @see Path#toRealPath(LinkOption...) * @see Files#isSameFile(Path, Path) */ public S isCanonical() { paths.assertIsCanonical(info, actual); return myself; } /** * Assert that the tested {@link Path} last element String representation is equal to the given filename. * * <p> * Note that the path does not need to exist to check its file name. * </p> * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * final Path file = fs.getPath("/foo/foo.txt"); * final Path symlink = fs.getPath("/home/symlink-to-foo"); * Files.createSymbolicLink(symlink, file); * * // the following assertions succeed: * assertThat(fs.getPath("/dir1/file.txt")).hasFileName("file.txt"); * assertThat(fs.getPath("/dir1/dir2")).hasFileName("dir2"); * // you can check file name on non existent paths * assertThat(file).hasFileName("foo.txt"); * assertThat(symlink).hasFileName("symlink-to-foo"); * * // the following assertions fail: * assertThat(fs.getPath("/dir1/file.txt").hasFileName("other.txt"); * // fail because, last element is "." * assertThat(fs.getPath("/dir1/.")).hasFileName("dir1"); * // fail because a link filename is not the same as its target filename * assertThat(symlink).hasFileName("file.txt"); * </code></pre> * * @param fileName the expected filename * @return self * * @throws NullPointerException if the given fileName is null. * @see Path#getFileName() */ public S hasFileName(final String fileName) { paths.assertHasFileName(info, actual, fileName); return myself; } /** * Assert that the tested {@link Path} has the expected parent path. * * <p> * <em>This assertion will perform canonicalization of the tested path and of the given argument before performing the test; see the class * description for more details. If this is not what you want, use {@link #hasParentRaw(Path)} instead.</em> * </p> * * <p> * Checks that the tested path has the given parent. This assertion will fail both if the tested path has no parent, * or has a different parent than what is expected. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * final Path actual = fs.getPath("/dir1/dir2/file"); * * // the following assertion succeeds: * assertThat(actual).hasParent(fs.getPath("/dir1/dir2/.")); * // this one too as this path will be normalized to "/dir1/dir2": * assertThat(actual).hasParent(fs.getPath("/dir1/dir3/../dir2/.")); * * // the following assertion fails: * assertThat(actual).hasParent(fs.getPath("/dir1")); * </code></pre> * * @param expected the expected parent path * @return self * * @throws NullPointerException if the given parent path is null. * @throws PathsException failed to canonicalize the tested path or the path given as an argument * * @see Path#getParent() */ public S hasParent(final Path expected) { paths.assertHasParent(info, actual, expected); return myself; } /** * Assert that the tested {@link Path} has the expected parent path. * * <p> * <em>This assertion will not perform any canonicalization of either the tested path or the path given as an argument; * see class description for more details. If this is not what you want, use {@link #hasParent(Path)} instead.</em> * </p> * * <p> * This assertion uses {@link Path#getParent()} with no modification, which means the only criterion for this * assertion's success is the path's components (its root and its name elements). * </p> * * <p> * This may lead to surprising results if the tested path and the path given as an argument are not normalized. For * instance, if the tested path is {@code /home/foo/../bar} and the argument is {@code /home}, the assertion will * <em>fail</em> since the parent of the tested path is not {@code /home} but... {@code /home/foo/..}. * </p> * * <p> * While this may seem counterintuitive, it has to be recalled here that it is not required for a {@link FileSystem} * to consider that {@code .} and {@code ..} are name elements for respectively the current directory and the parent * directory respectively. In fact, it is not even required that a {@link FileSystem} be hierarchical at all. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * final Path actual = fs.getPath("/dir1/dir2/file"); * * // the following assertion succeeds: * assertThat(actual).hasParentRaw(fs.getPath("/dir1/dir2")); * * // the following assertions fails: * assertThat(actual).hasParent(fs.getPath("/dir1")); * // ... and this one too as expected path is not canonicalized. * assertThat(actual).hasParentRaw(fs.getPath("/dir1/dir3/../dir2")); * </code></pre> * * @param expected the expected parent path * @return self * * @throws NullPointerException if the given parent path is null. * * @see Path#getParent() */ public S hasParentRaw(final Path expected) { paths.assertHasParentRaw(info, actual, expected); return myself; } /** * Assert that the tested {@link Path} has no parent. * * <p> * <em>This assertion will first canonicalize the tested path before performing the test; if this is not what you want, use {@link #hasNoParentRaw()} instead.</em> * </p> * * <p> * Check that the tested path, after canonicalization, has no parent. See the class description for more information * about canonicalization. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * * // the following assertion succeeds: * assertThat(fs.getPath("/")).hasNoParent(); * // this one too as path will be normalized to "/" * assertThat(fs.getPath("/usr/..")).hasNoParent(); * * // the following assertions fail: * assertThat(fs.getPath("/usr/lib")).hasNoParent(); * assertThat(fs.getPath("/usr")).hasNoParent(); * </code></pre> * * @return self * * @throws PathsException failed to canonicalize the tested path * * @see Path#getParent() */ public S hasNoParent() { paths.assertHasNoParent(info, actual); return myself; } /** * Assert that the tested {@link Path} has no parent. * * <p> * <em>This assertion will not canonicalize the tested path before performing the test; * if this is not what you want, use {@link #hasNoParent()} instead.</em> * </p> * * <p> * As canonicalization is not performed, this means the only criterion for this assertion's success is the path's * components (its root and its name elements). * </p> * * <p> * This may lead to surprising results. For instance, path {@code /usr/..} <em>does</em> have a parent, and this * parent is {@code /usr}. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * * // the following assertions succeed: * assertThat(fs.getPath("/")).hasNoParentRaw(); * assertThat(fs.getPath("foo")).hasNoParentRaw(); * * // the following assertions fail: * assertThat(fs.getPath("/usr/lib")).hasNoParentRaw(); * assertThat(fs.getPath("/usr")).hasNoParentRaw(); * // this one fails as canonicalization is not performed, leading to parent being /usr * assertThat(fs.getPath("/usr/..")).hasNoParent(); * </code></pre> * * @return self * * @see Path#getParent() */ public S hasNoParentRaw() { paths.assertHasNoParentRaw(info, actual); return myself; } /** * Assert that the tested {@link Path} starts with the given path. * * <p> * <em>This assertion will perform canonicalization of both the tested path and the path given as an argument; * see class description for more details. If this is not what you want, use {@link #startsWithRaw(Path)} instead.</em> * </p> * * <p> * Checks that the given {@link Path} starts with another path. Note that the name components matter, not the string * representation; this means that, for example, {@code /home/foobar/baz} <em>does not</em> start with * {@code /home/foo}. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * final Path tested = fs.getPath("/home/joe/myfile"); * * // the following assertion succeeds: * assertThat(tested).startsWith(fs.getPath("/home")); * assertThat(tested).startsWith(fs.getPath("/home/")); * assertThat(tested).startsWith(fs.getPath("/home/.")); * // assertion succeeds because this path will be canonicalized to "/home/joe" * assertThat(tested).startsWith(fs.getPath("/home/jane/../joe/.")); * * // the following assertion fails: * assertThat(tested).startsWith(fs.getPath("/home/harry")); * </code></pre> * * @param other the other path * @return self * * @throws NullPointerException if the given path is null. * @throws PathsException failed to canonicalize the tested path or the path given as an argument * * @see Path#startsWith(Path) * @see Path#toRealPath(LinkOption...) */ public S startsWith(final Path other) { paths.assertStartsWith(info, actual, other); return myself; } /** * Assert that the tested {@link Path} starts with the given path. * * <p> * <em>This assertions does not perform canonicalization on either the * tested path or the path given as an argument; see class description for * more details. If this is not what you want, use {@link #startsWith(Path)} * instead.</em> * </p> * * <p> * Checks that the given {@link Path} starts with another path, without performing canonicalization on its arguments. * This means that the only criterion to determine whether a path starts with another is the tested path's, and the * argument's, name elements. * </p> * * <p> * This may lead to some surprising results: for instance, path {@code /../home/foo} does <em>not</em> start with * {@code /home} since the first name element of the former ({@code ..}) is different from the first name element of * the latter ({@code home}). * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * final Path tested = fs.getPath("/home/joe/myfile"); * * // the following assertion succeeds: * assertThat(tested).startsWithRaw(fs.getPath("/home/joe")); * * // the following assertion fails: * assertThat(tested).startsWithRaw(fs.getPath("/home/harry")); * // .... and this one too as given path is not canonicalized * assertThat(tested).startsWithRaw(fs.getPath("/home/joe/..")); * </code></pre> * * @param other the other path * @return self * * @throws NullPointerException if the given path is null. * * @see Path#startsWith(Path) */ public S startsWithRaw(final Path other) { paths.assertStartsWithRaw(info, actual, other); return myself; } /** * Assert that the tested {@link Path} ends with the given path. * * <p> * This assertion will attempt to canonicalize the tested path and normalize the path given as an argument before * performing the actual test. * </p> * * <p> * Note that the criterion to determine success is determined by the path's name elements; therefore, * {@code /home/foobar/baz} does <em>not</em> end with {@code bar/baz}. * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem. * // the current directory is supposed to be /home. * final Path tested = fs.getPath("/home/joe/myfile"); * // as tested will be canonicalized, it could have been written: /home/jane/../joe/myfile * * // the following assertion succeeds: * assertThat(tested).endsWith(fs.getPath("joe/myfile")); * * // the following assertions fail: * assertThat(tested).endsWith(fs.getPath("joe/otherfile")); * // this path will be normalized to joe/otherfile * assertThat(tested).endsWith(fs.getPath("joe/myfile/../otherfile")); * </code></pre> * * @param other the other path * @return self * * @throws NullPointerException if the given path is null. * @throws PathsException failed to canonicalize the tested path (see class * description) * * @see Path#endsWith(Path) * @see Path#toRealPath(LinkOption...) */ public S endsWith(final Path other) { paths.assertEndsWith(info, actual, other); return myself; } /** * Assert that the tested {@link Path} ends with the given path. * * <p> * <em>This assertion will not perform any canonicalization (on the * tested path) or normalization (on the path given as an argument); see the * class description for more details. If this is not what you want, use * {@link #endsWith(Path)} instead.</em> * </p> * * <p> * This may lead to some surprising results; for instance, path {@code /home/foo} does <em>not</em> end with * {@code foo/.} since the last name element of the former ({@code foo}) is different from the last name element of * the latter ({@code .}). * </p> * * <p> * Examples: * </p> * * <pre><code class="java"> * // fs is a Unix filesystem * // the current directory is supposed to be /home. * final Path tested = fs.getPath("/home/joe/myfile"); * * // The following assertion succeeds: * assertThat(tested).endsWithRaw(fs.getPath("joe/myfile")); * * // But the following assertion fails: * assertThat(tested).endsWithRaw(fs.getPath("harry/myfile")); * // and this one too as the given path is not normalized * assertThat(tested).endsWithRaw(fs.getPath("harry/../joe/myfile")); * </code></pre> * * @param other the other path * @return self * * @throws NullPointerException if the given path is null. * * @see Path#endsWith(Path) */ public S endsWithRaw(final Path other) { paths.assertEndsWithRaw(info, actual, other); return myself; } }
apache-2.0
freeVM/freeVM
enhanced/java/classlib/modules/luni/src/test/api/common/org/apache/harmony/luni/tests/internal/net/www/protocol/file/FileURLConnectionTest.java
4893
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.luni.tests.internal.net.www.protocol.file; import java.io.IOException; import java.net.URL; import java.net.URLConnection; import junit.framework.TestCase; import org.apache.harmony.luni.internal.net.www.protocol.file.FileURLConnection; /** * Tests for <code>FileURLConnection</code> class constructors and methods. */ public class FileURLConnectionTest extends TestCase { static String getContentType(String fileName) throws IOException { String resourceName = "org/apache/harmony/luni/tests/" + fileName; URL url = ClassLoader.getSystemClassLoader().getResource(resourceName); assertNotNull("Cannot find test resource " + resourceName, url); return new FileURLConnection(url).getContentType(); } public void testGetContentType() throws IOException { // Regression for HARMONY-4699 assertEquals("application/rtf", getContentType("test.rtf")); assertEquals("text/plain", getContentType("test.java")); // RI would return "content/unknown" assertEquals("application/msword", getContentType("test.doc")); assertEquals("text/html", getContentType("test.htx")); assertEquals("application/xml", getContentType("test.xml")); assertEquals("text/plain", getContentType(".")); } public void testGetInputStream() throws IOException { // Regression for Harmony-5737 String resourceName = "org/apache/harmony/luni/tests/" + "test.rtf"; URL url = ClassLoader.getSystemClassLoader().getResource(resourceName); URL anchorUrl = new URL(url,"#anchor"); assertNotNull("Cannot find test resource " + resourceName, anchorUrl); FileURLConnection conn = new FileURLConnection(anchorUrl); assertNotNull(conn.getInputStream()); // Regression for Harmony-5779 String localURLString = "file://localhost/" + url.getFile(); URL localURL = new URL(localURLString); conn = new FileURLConnection(localURL); assertNotNull(conn.getInputStream()); assertEquals("file",conn.getURL().getProtocol()); } public void testHeaderFunctions() throws IOException { String resourceName = "org/apache/harmony/luni/tests/"; //folder name URL url = ClassLoader.getSystemClassLoader().getResource(resourceName); FileURLConnection conn = new FileURLConnection(url); assertNotNull(conn.getInputStream()); assertEquals(conn.getContentType(), conn.getHeaderField("content-type")) ; resourceName = "org/apache/harmony/luni/tests/" + "test.rtf";; //folder name url = ClassLoader.getSystemClassLoader().getResource(resourceName); conn = new FileURLConnection(url); assertNotNull(conn.getInputStream()); assertEquals(conn.getContentType(), conn.getHeaderField("content-type")) ; assertEquals(Integer.toString(conn.getContentLength()), conn.getHeaderField("content-length")) ; assertEquals(conn.getHeaderField(0), conn.getHeaderField("content-type")); assertEquals(conn.getHeaderField(1), conn.getHeaderField("content-length")); assertEquals(conn.getHeaderField(2), conn.getHeaderField("last-modified")); assertEquals("last-modified", conn.getHeaderFieldKey(2)); assertEquals("content-length", conn.getHeaderFieldKey(1)); assertEquals("content-type", conn.getHeaderFieldKey(0)); } public void testHeader_BoundaryCheck() throws IOException { String resourceName = "org/apache/harmony/luni/tests/"; URL url = ClassLoader.getSystemClassLoader().getResource(resourceName); URLConnection urlConnection = url.openConnection(); assertNull(urlConnection.getHeaderField(Integer.MIN_VALUE)); assertNull(urlConnection.getHeaderField(Integer.MAX_VALUE)); assertNull(urlConnection.getHeaderFieldKey(Integer.MIN_VALUE)); assertNull(urlConnection.getHeaderFieldKey(Integer.MAX_VALUE)); assertNull(urlConnection.getHeaderField(null)); } }
apache-2.0
jexp/idea2
plugins/cvs/cvs-plugin/src/com/intellij/cvsSupport2/ui/experts/importToCvs/ImportWizard.java
4007
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.cvsSupport2.ui.experts.importToCvs; import com.intellij.cvsSupport2.config.ImportConfiguration; import com.intellij.cvsSupport2.cvsBrowser.CvsElement; import com.intellij.cvsSupport2.cvsoperations.cvsImport.ImportDetails; import com.intellij.cvsSupport2.ui.experts.CvsWizard; import com.intellij.cvsSupport2.ui.experts.SelectCVSConfigurationStep; import com.intellij.cvsSupport2.ui.experts.SelectCvsElementStep; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.CvsBundle; import javax.swing.tree.TreeSelectionModel; /** * author: lesya */ public class ImportWizard extends CvsWizard { private final SelectCVSConfigurationStep mySelectCVSConfigurationStep; private final SelectCvsElementStep mySelectCvsElementStep; private final SelectImportLocationStep mySelectImportLocationStep; private final CustomizeKeywordSubstitutionStep myKeywordSubstitutionStep; private final ImportSettingsStep mySettingsStep; public ImportWizard(Project project, VirtualFile selectedFile) { super(CvsBundle.message("dialog.title.import.into.cvs"), project); ImportConfiguration importConfig = ImportConfiguration.getInstance(); mySelectCVSConfigurationStep = new SelectCVSConfigurationStep(project, this); mySelectCvsElementStep = new SelectCvsElementStep(CvsBundle.message("dialog.title.select.directory.to.import.into"),this, project, mySelectCVSConfigurationStep, false,TreeSelectionModel.SINGLE_TREE_SELECTION, true, false); mySelectImportLocationStep = new SelectImportLocationStep( CvsBundle.message("dialog.title.select.import.directory"), this, project, selectedFile); myKeywordSubstitutionStep = new CustomizeKeywordSubstitutionStep(CvsBundle.message("dialog.title.customize.keyword.substitutions"), this, importConfig); mySettingsStep = new ImportSettingsStep(this, mySelectImportLocationStep, importConfig); addStep(mySelectCVSConfigurationStep); addStep(mySelectCvsElementStep); addStep(mySelectImportLocationStep); addStep(myKeywordSubstitutionStep); addStep(mySettingsStep); init(); } public ImportDetails createImportDetails() { CvsElement module = mySelectCvsElementStep.getSelectedCvsElement(); String moduleName = mySettingsStep.getModuleName(); String importModuleName = module.getElementPath().equals(".") ? moduleName : module.getElementPath() + "/" + moduleName; return new ImportDetails(mySelectImportLocationStep.getSelectedFile(), mySettingsStep.getVendor(), mySettingsStep.getReleaseTag(), mySettingsStep.getLogMessage(), importModuleName, mySelectCVSConfigurationStep.getSelectedConfiguration(), myKeywordSubstitutionStep.getFileExtensions(), mySelectImportLocationStep.getIgnoreFileFilter()); } protected String getHelpID() { return "cvs.import"; } }
apache-2.0
Tamicer/Novate
example/src/main/java/com/tamic/excemple/ExempleActivity.java
22196
package com.tamic.excemple; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.Toast; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.tamic.excemple.model.MovieModel; import com.tamic.excemple.model.ResultModel; import com.tamic.excemple.model.SouguBean; import com.tamic.novate.NovateResponse; import com.tamic.novate.BaseSubscriber; import com.tamic.novate.Novate; import com.tamic.novate.RxApiManager; import com.tamic.novate.Throwable; import com.tamic.novate.callback.RxStringCallback; import com.tamic.novate.download.DownLoadCallBack; import com.tamic.novate.download.UpLoadCallback; import com.tamic.novate.request.NovateRequestBody; import com.tamic.novate.util.FileUtil; import com.tamic.novate.util.LogWraper; import com.tamic.novate.util.Utils; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Type; import java.util.HashMap; import java.util.Map; import okhttp3.MediaType; import okhttp3.MultipartBody; import okhttp3.RequestBody; import okhttp3.ResponseBody; import rx.Subscription; /** * Created by Tamic on 2016-06-15. * {@link # https://github.com/NeglectedByBoss/Novate * * @link # http://blog.csdn.net/sk719887916 * } */ public class ExempleActivity extends AppCompatActivity { String baseUrl = "http://ip.taobao.com/"; private Novate novate; private Map<String, Object> parameters = new HashMap<String, Object>(); private Map<String, String> headers = new HashMap<>(); private Button btn, btn_test, btn_get, btn_post, btn_download, btn_download_Min, btn_upload, btn_uploadfile, btn_myApi, btn_more; private ProgressDialog mProgressDialog; String uploadPath = ""; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_exemple); // UI referen btn = (Button) findViewById(R.id.bt_simple); btn_test = (Button) findViewById(R.id.bt_test); btn_get = (Button) findViewById(R.id.bt_get); btn_post = (Button) findViewById(R.id.bt_post); btn_download = (Button) findViewById(R.id.bt_download); btn_upload = (Button) findViewById(R.id.bt_upload); btn_download_Min = (Button) findViewById(R.id.bt_download_min); btn_uploadfile = (Button) findViewById(R.id.bt_uploadflie); btn_myApi = (Button) findViewById(R.id.bt_my_api); btn_more = (Button) findViewById(R.id.bt_more); initProgress(this); parameters.put("ip", "21.22.11.33"); headers.put("Accept", "application/json"); novate = new Novate.Builder(this) //.addParameters(parameters) .connectTimeout(20) .writeTimeout(15) .baseUrl(baseUrl) .addHeader(headers) .addCache(true) .addLog(true) .build(); btn_test.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performTest(); } }); btn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { perform(); } }); btn_get.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performGet(); } }); btn_post.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performPost(); } }); btn_myApi.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { perform_Api(); } }); btn_download.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performDown(); } }); btn_download_Min.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performDownMin(); } }); btn_upload.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performUpLoadImage(); } }); btn_uploadfile.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performUpLoadFlie(); } }); btn_more.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(ExempleActivity.this, RequstActivity.class)); } }); } /** * 初始化进度条 */ private void initProgress(Context aContext) { if (mProgressDialog == null) { mProgressDialog = new ProgressDialog(aContext); } mProgressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); mProgressDialog.setTitle("温馨提示"); mProgressDialog.setMax(100); mProgressDialog.setMessage("正在上传中..."); mProgressDialog.setCancelable(true); } private void showPressDialog() { if (mProgressDialog == null || this.isFinishing()) { return; } mProgressDialog.show(); } private void dismissProgressDialog() { if (mProgressDialog != null && mProgressDialog.isShowing()) { mProgressDialog.dismiss(); } } private void updateProgressDialog(int progress) { if (mProgressDialog != null) { if (!mProgressDialog.isShowing()) { showPressDialog(); } mProgressDialog.setProgress(progress); } } /** * test */ private void performTest() { //http://apis.baidu.com/apistore/weatherservice/cityname?cityname=上海 Map<String, String> headers = new HashMap<>(); headers.put("apikey", "27b6fb21f2b42e9d70cd722b2ed038a9"); headers.put("Accept", "application/json"); novate = new Novate.Builder(this) .addHeader(headers) .addParameters(parameters) .connectTimeout(5) .baseUrl("https://apis.baidu.com/") .addHeader(headers) .addLog(true) .build(); Subscription subscription = novate.test("https://apis.baidu.com/apistore/weatherservice/cityname?cityname=上海", null, new MyBaseSubscriber<ResponseBody>(ExempleActivity.this) { @Override public void onError(Throwable e) { Log.e("OkHttp", e.getMessage()); Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onNext(ResponseBody responseBody) { try { Toast.makeText(ExempleActivity.this, new String(responseBody.bytes()), Toast.LENGTH_SHORT).show(); } catch (IOException e) { e.printStackTrace(); } } }); RxApiManager.get().add("test", subscription); //cancel RxApiManager.get().cancel("my"); } @Override protected void onPause() { super.onPause(); //RxApiManager.get().cancel("my"); } /** * http://www.dianpingmedia.com/framework/web/user/unauth/login */ private void perform() { parameters = new HashMap<>(); /*start=0&count=5*/ parameters.put("start", "0"); parameters.put("count", "1"); Map<String, Object> parameters = new HashMap<>(); parameters.put("mobileNumber", "18826412577"); parameters.put("loginPassword", "123456"); novate = new Novate.Builder(this) .addParameters(parameters) .connectTimeout(10) .baseUrl("http://api.douban.com/") //.addApiManager(ApiManager.class) .addLog(true) .build(); novate.get("v2/movie/top250", parameters, new BaseSubscriber<ResponseBody>() { @Override public void onError(Throwable e) { Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onNext(ResponseBody responseBody) { try { String jstr = new String(responseBody.bytes()); Type type = new TypeToken<MovieModel>() { }.getType(); MovieModel response = new Gson().fromJson(jstr, type); Toast.makeText(ExempleActivity.this, response.toString(), Toast.LENGTH_SHORT).show(); } catch (IOException e) { e.printStackTrace(); } } }); } /** * performGet * 已废弃 */ private void performGet() { /** * 如果不需要数据解析后返回 则调用novate.Get() * 参考 performPost()中的方式 */ novate.executeGet("service/getIpInfo.php", parameters, new Novate.ResponseCallBack<ResultModel>() { @Override public void onStart() { // todo onStart } @Override public void onCompleted() { } @Override public void onError(Throwable e) { Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onSuccee(NovateResponse<ResultModel> response) { } @Override public void onsuccess(int code, String msg, ResultModel response, String originalResponse) { Toast.makeText(ExempleActivity.this, response.toString(), Toast.LENGTH_SHORT).show(); } }); } /** * performPost */ private void performPost() { Map<String, Object> parameters = new HashMap<>(); parameters.put("ip", "21.22.11.33"); novate = new Novate.Builder(this) .connectTimeout(8) .baseUrl(baseUrl) //.addApiManager(ApiManager.class) .addLog(true) .build(); /** * * * 调用post需要你自己解析数据 * * 如果需要解析后返回 则调用novate.executeGet() * 参考 performGet()中的方式 */ novate.post("service/getIpInfo.php", parameters, new MyBaseSubscriber<ResponseBody>(ExempleActivity.this) { @Override public void onError(Throwable e) { if (!TextUtils.isEmpty(e.getMessage())) { Log.e("OkHttp", e.getMessage()); Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); } } @Override public void onNext(ResponseBody responseBody) { try { String jstr = new String(responseBody.bytes()); if (jstr.trim().isEmpty()) { return; } Type type = new TypeToken<NovateResponse<ResultModel>>() { }.getType(); NovateResponse<ResultModel> response = new Gson().fromJson(jstr, type); if (response.getData() != null) { Toast.makeText(ExempleActivity.this, response.getData().toString(), Toast.LENGTH_SHORT).show(); } Toast.makeText(ExempleActivity.this, jstr , Toast.LENGTH_SHORT).show(); } catch (IOException e) { e.printStackTrace(); } } }); } /** * perform_myApi * HostUrl = "http://lbs.sougu.net.cn/"; */ private void perform_Api() { parameters.clear(); parameters.put("m", "souguapp"); parameters.put("c", "appusers"); parameters.put("a", "network"); novate = new Novate.Builder(this) .addHeader(headers) .addParameters(parameters) .baseUrl("http://lbs.sougu.net.cn/") .addLog(true) .build(); MyAPI myAPI = novate.create(MyAPI.class); novate.call(myAPI.getSougu(parameters), new MyBaseSubscriber<SouguBean>(ExempleActivity.this) { @Override public void onError(Throwable e) { Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onNext(SouguBean souguBean) { Toast.makeText(ExempleActivity.this, souguBean.toString(), Toast.LENGTH_SHORT).show(); } }); } /** * upload */ private void performUpLoadImage() { String mPath = uploadPath; //"you File path "; String url = ""; RequestBody requestFile = RequestBody.create(MediaType.parse("image/jpg"), new File(mPath)); final NovateRequestBody requestBody = Utils.createNovateRequestBody(requestFile, new UpLoadCallback() { @Override public void onProgress(Object tag, int progress, long speed, boolean done) { LogWraper.d("uplaod", "tag:" + tag.toString() + "progress:"+ progress); updateProgressDialog(progress); } }); novate.rxUploadWithBody(url, requestBody, new RxStringCallback() { @Override public void onStart(Object tag) { super.onStart(tag); showPressDialog(); } @Override public void onNext(Object tag, String response) { LogWraper.d("novate", response); Toast.makeText(ExempleActivity.this, "成功", Toast.LENGTH_SHORT).show(); } @Override public void onError(Object tag, Throwable e) { Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); dismissProgressDialog(); } @Override public void onCancel(Object tag, Throwable e) { } @Override public void onCompleted(Object tag) { super.onCompleted(tag); dismissProgressDialog(); } }); } /** * upload */ private void performUpLoadFlie() { String mPath = uploadPath; //"you File path "; String url = ""; File file = new File(mPath); RequestBody requestFile = RequestBody.create(MediaType.parse("multipart/form-data; charset=utf-8"), file); final NovateRequestBody requestBody = Utils.createNovateRequestBody(requestFile, new UpLoadCallback() { @Override public void onProgress(Object tag, int progress, long speed, boolean done) { LogWraper.d("uplaod", "tag:" + tag.toString() + "progress:"+ progress); updateProgressDialog(progress); } }); MultipartBody.Part body2 = MultipartBody.Part.createFormData("image", file.getName(), requestBody); novate.rxUploadWithPart(url, body2, new RxStringCallback() { @Override public void onStart(Object tag) { super.onStart(tag); showPressDialog(); } @Override public void onError(Object tag, Throwable e) { Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); dismissProgressDialog(); } @Override public void onCancel(Object tag, Throwable e) { } @Override public void onNext(Object tag, String response) { LogWraper.d(response); Toast.makeText(ExempleActivity.this, "成功", Toast.LENGTH_SHORT).show(); dismissProgressDialog(); } }); } /** * upload */ private void performUpLoadFlies(){ String path = uploadPath;//"you File path "; String url = "http://workflow.tjcclz.com/GWWorkPlatform/NoticeServlet?GWType=wifiUploadFile"; File file = new File(path); // 创建 RequestBody,用于封装 请求RequestBody RequestBody requestFile = RequestBody.create(MediaType.parse("multipart/form-data"), file); final NovateRequestBody requestBody = Utils.createNovateRequestBody(requestFile, new UpLoadCallback() { @Override public void onProgress(Object tag, int progress, long speed, boolean done) { LogWraper.d("uplaod", "tag:" + tag.toString() + "progress:"+ progress); updateProgressDialog(progress); } }); MultipartBody.Part part = MultipartBody.Part.createFormData("image", file.getName(), requestBody); Map<String, MultipartBody.Part> maps = new HashMap<>(); maps.put("image", part); novate.rxUploadWithPartMap(url, maps, new RxStringCallback() { @Override public void onStart(Object tag) { super.onStart(tag); showPressDialog(); } @Override public void onNext(Object tag, String response) { LogWraper.d("novate", response); Toast.makeText(ExempleActivity.this, "成功", Toast.LENGTH_SHORT).show(); } @Override public void onError(Object tag, Throwable e) { Toast.makeText(ExempleActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); dismissProgressDialog(); } @Override public void onCancel(Object tag, Throwable e) { } }); } /** * performDown file * ex: apk , video... */ private void performDown() { String downUrl = "http://wifiapi02.51y5.net/wifiapi/rd.do?f=wk00003&b=gwanz02&rurl=http%3A%2F%2Fdl.lianwifi.com%2Fdownload%2Fandroid%2FWifiKey-3091-guanwang.apk"; novate.download(downUrl, "test.mei", new DownLoadCallBack() { @Override public void onStart(String s) { super.onStart(s); btn_download.setText("DownLoad cancel"); showPressDialog(); } @Override public void onError(Throwable e) { Toast.makeText(ExempleActivity.this, "onError:" + e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onProgress(String key, int progress, long fileSizeDownloaded, long totalSize) { super.onProgress(key, progress, fileSizeDownloaded, totalSize); Log.v("test", fileSizeDownloaded+""); //Toast.makeText(ExempleActivity.this, "progress: " + progress + " download: " + fileSizeDownloaded, Toast.LENGTH_SHORT).show(); updateProgressDialog(progress); } @Override public void onSucess(String key, String path, String name, long fileSize) { Toast.makeText(ExempleActivity.this, "download onSucess", Toast.LENGTH_SHORT).show(); dismissProgressDialog(); } @Override public void onCancel() { super.onCancel(); dismissProgressDialog(); } }); } /** * performDown small file * ex: image txt */ private void performDownMin() { String downUrl = "https://ss0.bdstatic.com/5aV1bjqh_Q23odCf/static/superman/img/logo/bd_logo1_31bdc765.png"; novate.downloadMin("tag", downUrl, "my.jpg",new DownLoadCallBack() { @Override public void onStart(String s) { super.onStart(s); btn_download.setText("DownLoadMin cancel"); showPressDialog(); } @Override public void onError(Throwable e) { Toast.makeText(ExempleActivity.this, "onError:" + e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onSucess(String key, String path, String name, long fileSize) { btn_download.setText("DownLoad start"); uploadPath = path + name; dismissProgressDialog(); } @Override public void onProgress(String key, int progress, long fileSizeDownloaded, long totalSize) { super.onProgress(key, progress, fileSizeDownloaded, totalSize); updateProgressDialog(progress); Toast.makeText(ExempleActivity.this, "progress: " + progress + " download: " + fileSizeDownloaded, Toast.LENGTH_SHORT).show(); } @Override public void onCancel() { super.onCancel(); btn_download.setText("DownLoadMin start"); } }); } }
apache-2.0
dkhwangbo/druid
processing/src/main/java/org/apache/druid/segment/ProgressIndicator.java
1018
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment; /** */ public interface ProgressIndicator { void progress(); void start(); void stop(); void startSection(String section); void stopSection(String section); }
apache-2.0
zhangxuxiuxiu/dtcassandra
tools/stress/src/org/apache/cassandra/stress/Session.java
17956
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.stress; import java.io.*; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.cli.*; import org.apache.cassandra.db.ColumnFamilyType; import org.apache.cassandra.thrift.*; import org.apache.commons.lang.StringUtils; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; public class Session { // command line options public static final Options availableOptions = new Options(); public final AtomicInteger operations; public final AtomicInteger keys; public final AtomicLong latency; static { availableOptions.addOption("h", "help", false, "Show this help message and exit"); availableOptions.addOption("n", "num-keys", true, "Number of keys, default:1000000"); availableOptions.addOption("F", "num-different-keys", true, "Number of different keys (if < NUM-KEYS, the same key will re-used multiple times), default:NUM-KEYS"); availableOptions.addOption("N", "skip-keys", true, "Fraction of keys to skip initially, default:0"); availableOptions.addOption("t", "threads", true, "Number of threads to use, default:50"); availableOptions.addOption("c", "columns", true, "Number of columns per key, default:5"); availableOptions.addOption("S", "column-size", true, "Size of column values in bytes, default:34"); availableOptions.addOption("C", "cardinality", true, "Number of unique values stored in columns, default:50"); availableOptions.addOption("d", "nodes", true, "Host nodes (comma separated), default:locahost"); availableOptions.addOption("D", "nodesfile", true, "File containing host nodes (one per line)"); availableOptions.addOption("s", "stdev", true, "Standard Deviation Factor, default:0.1"); availableOptions.addOption("r", "random", false, "Use random key generator (STDEV will have no effect), default:false"); availableOptions.addOption("f", "file", true, "Write output to given file"); availableOptions.addOption("p", "port", true, "Thrift port, default:9160"); availableOptions.addOption("m", "unframed", false, "Use unframed transport, default:false"); availableOptions.addOption("o", "operation", true, "Operation to perform (INSERT, READ, RANGE_SLICE, INDEXED_RANGE_SLICE, MULTI_GET, COUNTER_ADD, COUNTER_GET), default:INSERT"); availableOptions.addOption("u", "supercolumns", true, "Number of super columns per key, default:1"); availableOptions.addOption("y", "family-type", true, "Column Family Type (Super, Standard), default:Standard"); availableOptions.addOption("K", "keep-trying", true, "Retry on-going operation N times (in case of failure). positive integer, default:10"); availableOptions.addOption("k", "keep-going", false, "Ignore errors inserting or reading (when set, --keep-trying has no effect), default:false"); availableOptions.addOption("i", "progress-interval", true, "Progress Report Interval (seconds), default:10"); availableOptions.addOption("g", "keys-per-call", true, "Number of keys to get_range_slices or multiget per call, default:1000"); availableOptions.addOption("l", "replication-factor", true, "Replication Factor to use when creating needed column families, default:1"); availableOptions.addOption("e", "consistency-level", true, "Consistency Level to use (ONE, QUORUM, LOCAL_QUORUM, EACH_QUORUM, ALL, ANY), default:ONE"); availableOptions.addOption("x", "create-index", true, "Type of index to create on needed column families (KEYS)"); availableOptions.addOption("R", "replication-strategy", true, "Replication strategy to use (only on insert if keyspace does not exist), default:org.apache.cassandra.locator.SimpleStrategy"); availableOptions.addOption("O", "strategy-properties", true, "Replication strategy properties in the following format <dc_name>:<num>,<dc_name>:<num>,..."); availableOptions.addOption("W", "no-replicate-on-write",false, "Set replicate_on_write to false for counters. Only counter add with CL=ONE will work"); availableOptions.addOption("V", "average-size-values", false, "Generate column values of average rather than specific size"); } private int numKeys = 1000 * 1000; private int numDifferentKeys = numKeys; private float skipKeys = 0; private int threads = 50; private int columns = 5; private int columnSize = 34; private int cardinality = 50; private String[] nodes = new String[] { "127.0.0.1" }; private boolean random = false; private boolean unframed = false; private int retryTimes = 10; private int port = 9160; private int superColumns = 1; private int progressInterval = 10; private int keysPerCall = 1000; private boolean replicateOnWrite = true; private boolean ignoreErrors = false; private PrintStream out = System.out; private IndexType indexType = null; private Stress.Operations operation = Stress.Operations.INSERT; private ColumnFamilyType columnFamilyType = ColumnFamilyType.Standard; private ConsistencyLevel consistencyLevel = ConsistencyLevel.ONE; private String replicationStrategy = "org.apache.cassandra.locator.SimpleStrategy"; private Map<String, String> replicationStrategyOptions = new HashMap<String, String>(); public final boolean averageSizeValues; // required by Gaussian distribution. protected int mean; protected float sigma; public Session(String[] arguments) throws IllegalArgumentException { float STDev = 0.1f; CommandLineParser parser = new PosixParser(); try { CommandLine cmd = parser.parse(availableOptions, arguments); if (cmd.getArgs().length > 0) { System.err.println("Application does not allow arbitrary arguments: " + StringUtils.join(cmd.getArgList(), ", ")); System.exit(1); } if (cmd.hasOption("h")) throw new IllegalArgumentException("help"); if (cmd.hasOption("n")) numKeys = Integer.parseInt(cmd.getOptionValue("n")); if (cmd.hasOption("F")) numDifferentKeys = Integer.parseInt(cmd.getOptionValue("F")); else numDifferentKeys = numKeys; if (cmd.hasOption("N")) skipKeys = Float.parseFloat(cmd.getOptionValue("N")); if (cmd.hasOption("t")) threads = Integer.parseInt(cmd.getOptionValue("t")); if (cmd.hasOption("c")) columns = Integer.parseInt(cmd.getOptionValue("c")); if (cmd.hasOption("S")) columnSize = Integer.parseInt(cmd.getOptionValue("S")); if (cmd.hasOption("C")) cardinality = Integer.parseInt(cmd.getOptionValue("C")); if (cmd.hasOption("d")) nodes = cmd.getOptionValue("d").split(","); if (cmd.hasOption("D")) { try { String node = null; List<String> tmpNodes = new ArrayList<String>(); BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(cmd.getOptionValue("D")))); while ((node = in.readLine()) != null) { if (node.length() > 0) tmpNodes.add(node); } nodes = tmpNodes.toArray(new String[tmpNodes.size()]); in.close(); } catch(IOException ioe) { throw new RuntimeException(ioe); } } if (cmd.hasOption("s")) STDev = Float.parseFloat(cmd.getOptionValue("s")); if (cmd.hasOption("r")) random = true; if (cmd.hasOption("f")) { try { out = new PrintStream(new FileOutputStream(cmd.getOptionValue("f"))); } catch (FileNotFoundException e) { System.out.println(e.getMessage()); } } if (cmd.hasOption("p")) port = Integer.parseInt(cmd.getOptionValue("p")); if (cmd.hasOption("m")) unframed = Boolean.parseBoolean(cmd.getOptionValue("m")); if (cmd.hasOption("o")) operation = Stress.Operations.valueOf(cmd.getOptionValue("o").toUpperCase()); if (cmd.hasOption("u")) superColumns = Integer.parseInt(cmd.getOptionValue("u")); if (cmd.hasOption("y")) columnFamilyType = ColumnFamilyType.valueOf(cmd.getOptionValue("y")); if (cmd.hasOption("K")) { retryTimes = Integer.valueOf(cmd.getOptionValue("K")); if (retryTimes <= 0) { throw new RuntimeException("--keep-trying option value should be > 0"); } } if (cmd.hasOption("k")) { retryTimes = 1; ignoreErrors = true; } if (cmd.hasOption("i")) progressInterval = Integer.parseInt(cmd.getOptionValue("i")); if (cmd.hasOption("g")) keysPerCall = Integer.parseInt(cmd.getOptionValue("g")); if (cmd.hasOption("e")) consistencyLevel = ConsistencyLevel.valueOf(cmd.getOptionValue("e").toUpperCase()); if (cmd.hasOption("x")) indexType = IndexType.valueOf(cmd.getOptionValue("x").toUpperCase()); if (cmd.hasOption("R")) replicationStrategy = cmd.getOptionValue("R"); if (cmd.hasOption("l")) replicationStrategyOptions.put("replication_factor", String.valueOf(Integer.parseInt(cmd.getOptionValue("l")))); else if (replicationStrategy.endsWith("SimpleStrategy")) replicationStrategyOptions.put("replication_factor", "1"); if (cmd.hasOption("O")) { String[] pairs = StringUtils.split(cmd.getOptionValue("O"), ','); for (String pair : pairs) { String[] keyAndValue = StringUtils.split(pair, ':'); if (keyAndValue.length != 2) throw new RuntimeException("Invalid --strategy-properties value."); replicationStrategyOptions.put(keyAndValue[0], keyAndValue[1]); } } if (cmd.hasOption("W")) replicateOnWrite = false; averageSizeValues = cmd.hasOption("V"); } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); } mean = numDifferentKeys / 2; sigma = numDifferentKeys * STDev; operations = new AtomicInteger(); keys = new AtomicInteger(); latency = new AtomicLong(); } public int getCardinality() { return cardinality; } public int getColumnSize() { return columnSize; } public boolean isUnframed() { return unframed; } public int getColumnsPerKey() { return columns; } public ColumnFamilyType getColumnFamilyType() { return columnFamilyType; } public int getNumKeys() { return numKeys; } public int getNumDifferentKeys() { return numDifferentKeys; } public int getThreads() { return threads; } public float getSkipKeys() { return skipKeys; } public int getSuperColumns() { return superColumns; } public int getKeysPerThread() { return numKeys / threads; } public int getTotalKeysLength() { return Integer.toString(numDifferentKeys).length(); } public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } public int getRetryTimes() { return retryTimes; } public boolean ignoreErrors() { return ignoreErrors; } public Stress.Operations getOperation() { return operation; } public PrintStream getOutputStream() { return out; } public int getProgressInterval() { return progressInterval; } public boolean useRandomGenerator() { return random; } public int getKeysPerCall() { return keysPerCall; } // required by Gaussian distribution public int getMean() { return mean; } // required by Gaussian distribution public float getSigma() { return sigma; } /** * Create Keyspace1 with Standard1 and Super1 column families */ public void createKeySpaces() { KsDef keyspace = new KsDef(); // column family for standard columns CfDef standardCfDef = new CfDef("Keyspace1", "Standard1"); standardCfDef.setComparator_type("AsciiType").setDefault_validation_class("BytesType"); if (indexType != null) { ColumnDef standardColumn = new ColumnDef(ByteBufferUtil.bytes("C1"), "BytesType"); standardColumn.setIndex_type(indexType).setIndex_name("Idx1"); standardCfDef.setColumn_metadata(Arrays.asList(standardColumn)); } // column family with super columns CfDef superCfDef = new CfDef("Keyspace1", "Super1").setColumn_type("Super"); superCfDef.setComparator_type("AsciiType").setSubcomparator_type("AsciiType").setDefault_validation_class("BytesType"); // column family for standard counters CfDef counterCfDef = new CfDef("Keyspace1", "Counter1").setDefault_validation_class("CounterColumnType").setReplicate_on_write(replicateOnWrite); // column family with counter super columns CfDef counterSuperCfDef = new CfDef("Keyspace1", "SuperCounter1").setDefault_validation_class("CounterColumnType").setReplicate_on_write(replicateOnWrite).setColumn_type("Super"); keyspace.setName("Keyspace1"); keyspace.setStrategy_class(replicationStrategy); if (!replicationStrategyOptions.isEmpty()) { keyspace.setStrategy_options(replicationStrategyOptions); } keyspace.setCf_defs(new ArrayList<CfDef>(Arrays.asList(standardCfDef, superCfDef, counterCfDef, counterSuperCfDef))); Cassandra.Client client = getClient(false); try { client.system_add_keyspace(keyspace); out.println(String.format("Created keyspaces. Sleeping %ss for propagation.", nodes.length)); Thread.sleep(nodes.length * 1000); // seconds } catch (InvalidRequestException e) { out.println("Unable to create stress keyspace: " + e.getWhy()); } catch (Exception e) { out.println(e.getMessage()); } } /** * Thrift client connection with Keyspace1 set. * @return cassandra client connection */ public Cassandra.Client getClient() { return getClient(true); } /** * Thrift client connection * @param setKeyspace - should we set keyspace for client or not * @return cassandra client connection */ public Cassandra.Client getClient(boolean setKeyspace) { // random node selection for fake load balancing String currentNode = nodes[Stress.randomizer.nextInt(nodes.length)]; TSocket socket = new TSocket(currentNode, port); TTransport transport = (isUnframed()) ? socket : new TFramedTransport(socket); Cassandra.Client client = new Cassandra.Client(new TBinaryProtocol(transport)); try { transport.open(); if (setKeyspace) { client.set_keyspace("Keyspace1"); } } catch (InvalidRequestException e) { throw new RuntimeException(e.getWhy()); } catch (Exception e) { throw new RuntimeException(e.getMessage()); } return client; } }
apache-2.0
alittlemind/junior
chapter_010/mapping/src/main/java/carstore/models/Image.java
823
package carstore.models; /** * @author Evgeny Khodzitskiy (evgeny.hodz@gmail.com) * @since 26.08.2017 */ public class Image { /** * Image id. */ private int id; /** * Image url. */ private String url; /** * Default constructor. */ public Image() { } /** * Getter. * * @return id. */ public int getId() { return id; } /** * Setter. * * @param imageId image. */ public void setId(int imageId) { this.id = imageId; } /** * Url getter. * * @return url. */ public String getUrl() { return url; } /** * Setter. * * @param imageUrl url. */ public void setUrl(String imageUrl) { this.url = imageUrl; } }
apache-2.0
denvey/discuz_Android
libs/ZUtilsExt/src/com/kit/app/IDS.java
320
package com.kit.app; /** * Created by Zhao on 15/11/18. */ public class IDS { public static final int ID_RADIOBUTTON_SHOW_GO_TO_TOP_LEFT = 0x1989 + 0; public static final int ID_RADIOBUTTON_SHOW_GO_TO_TOP_HIDDEN = 0x1989 + 1; public static final int ID_RADIOBUTTON_SHOW_GO_TO_TOP_RIGHT = 0x1989 + 2; }
apache-2.0
kierarad/gocd
plugin-infra/go-plugin-access/src/main/java/com/thoughtworks/go/plugin/access/analytics/AnalyticsPluginInfoBuilder.java
2078
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.plugin.access.analytics; import com.thoughtworks.go.plugin.access.common.PluginInfoBuilder; import com.thoughtworks.go.plugin.domain.analytics.AnalyticsPluginInfo; import com.thoughtworks.go.plugin.domain.analytics.Capabilities; import com.thoughtworks.go.plugin.domain.common.Image; import com.thoughtworks.go.plugin.domain.common.PluggableInstanceSettings; import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class AnalyticsPluginInfoBuilder implements PluginInfoBuilder<AnalyticsPluginInfo> { private AnalyticsExtension extension; @Autowired public AnalyticsPluginInfoBuilder(AnalyticsExtension extension) { this.extension = extension; } @Override public AnalyticsPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { Capabilities capabilities = capabilities(descriptor.id()); PluggableInstanceSettings pluginSettingsAndView = getPluginSettingsAndView(descriptor, extension); Image image = image(descriptor.id()); return new AnalyticsPluginInfo(descriptor, image, capabilities, pluginSettingsAndView); } private Capabilities capabilities(String pluginId) { return extension.getCapabilities(pluginId); } private Image image(String pluginId) { return extension.getIcon(pluginId); } }
apache-2.0
jpallas/beakerx
kernel/base/src/main/java/com/twosigma/beakerx/chart/xychart/XYChart.java
7240
/* * Copyright 2014 TWO SIGMA OPEN SOURCE, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twosigma.beakerx.chart.xychart; import com.twosigma.beakerx.chart.AbstractChart; import com.twosigma.beakerx.chart.ChartToJson; import com.twosigma.beakerx.chart.xychart.plotitem.ConstantBand; import com.twosigma.beakerx.chart.xychart.plotitem.ConstantLine; import com.twosigma.beakerx.chart.xychart.plotitem.Rasters; import com.twosigma.beakerx.chart.xychart.plotitem.Text; import com.twosigma.beakerx.chart.xychart.plotitem.XYGraphics; import java.util.ArrayList; import java.util.List; abstract public class XYChart extends AbstractChart { private final List<XYGraphics> xyGraphics = new ArrayList<>(); private final List<ConstantLine> constantLines = new ArrayList<>(); private final List<ConstantBand> constantBands = new ArrayList<>(); private final List<Rasters> rasters = new ArrayList<>(); private final List<Text> texts = new ArrayList<>(); private boolean xAutoRange = true; private double xLowerBound; private double xUpperBound; private boolean logX = false; private double xLogBase = 10; private Integer lodThreshold = null; private boolean xTickLabelsVisible = true; private boolean yTickLabelsVisible = true; public XYChart add(XYGraphics graphics) { graphics.setPlotType(this.getClass()); this.xyGraphics.add(graphics); sendModelUpdate(ChartToJson.serializeXYGraphics(this.xyGraphics)); return this; } public XYChart leftShift(XYGraphics graphics) { return add(graphics); } public List<XYGraphics> getGraphics() { return this.xyGraphics; } public XYChart add(ConstantLine constantLine) { constantLine.setPlotType(this.getClass()); this.constantLines.add(constantLine); sendModelUpdate(ChartToJson.serializeConstantLines(this.constantLines)); return this; } public XYChart leftShift(ConstantLine constantLine) { return add(constantLine); } public List<ConstantLine> getConstantLines() { return constantLines; } public XYChart add(ConstantBand constantBand) { this.constantBands.add(constantBand); sendModelUpdate(ChartToJson.serializeConstantBands(this.constantBands)); return this; } public XYChart leftShift(ConstantBand constantBand) { return add(constantBand); } public List<ConstantBand> getConstantBands() { return constantBands; } public XYChart add(Text text) { text.setPlotType(this.getClass()); this.texts.add(text); sendModelUpdate(ChartToJson.serializeTexts(this.texts)); return this; } public XYChart leftShift(Text text) { return add(text); } public List<Text> getTexts() { return this.texts; } public XYChart add(Rasters raster) { this.rasters.add(raster); sendModelUpdate(ChartToJson.serializeRasters(this.rasters)); return this; } public XYChart leftShift(Rasters raster) { return add(raster); } public List<Rasters> getRasters() { return this.rasters; } public XYChart add(List items) { for (Object o : items) { if (o instanceof Rasters) { add((Rasters) o); } else if (o instanceof XYGraphics) { add((XYGraphics) o); } else if (o instanceof ConstantLine) { add((ConstantLine) o); } else if (o instanceof ConstantBand) { add((ConstantBand) o); } else if (o instanceof Text) { add((Text) o); } else { super.add(items); } } return this; } public XYChart setXAutoRange(boolean xAutoRange) { this.xAutoRange = xAutoRange; sendModelUpdate(ChartToJson.serializeXAutoRange(this.xAutoRange)); return this; } public XYChart setxAutoRange(boolean xAutoRange) { return this.setXAutoRange(xAutoRange); } public Boolean getXAutoRange() { return this.xAutoRange; } public Boolean getxAutoRange() { return getXAutoRange(); } public XYChart setXBound(double lower, double upper) { this.xAutoRange = false; this.xLowerBound = lower; this.xUpperBound = upper; sendModelUpdate(ChartToJson.serializeXBound(this)); return this; } public XYChart setxBound(double lower, double upper) { return setXBound(lower, upper); } public XYChart setXBound(List<Number> bound) { if (bound.size() != 2) { throw new IllegalArgumentException("to set the x bound, the list needs to be of size=2"); } Number n0 = bound.get(0); Number n1 = bound.get(1); setXBound(n0.doubleValue(), n1.doubleValue()); return this; } public XYChart setxBound(List<Number> bound) { return this.setXBound(bound); } public Double getXLowerBound() { return this.xLowerBound; } public Double getxLowerBound() { return getXLowerBound(); } public Double getXUpperBound() { return this.xUpperBound; } public Double getxUpperBound() { return getXUpperBound(); } public XYChart setLogX(boolean logX) { this.logX = logX; sendModelUpdate(ChartToJson.serializeLogX(this.logX)); return this; } public Boolean getLogX() { return this.logX; } public Double getXLogBase() { return xLogBase; } public Double getxLogBase() { return getXLogBase(); } public XYChart setXLogBase(double xLogBase) { this.xLogBase = xLogBase; sendModelUpdate(ChartToJson.serializeXLogBase(this.xLogBase)); return this; } public XYChart setxLogBase(double xLogBase) { return this.setXLogBase(xLogBase); } public Integer getLodThreshold() { return lodThreshold; } public void setLodThreshold(Integer lodThreshold) { this.lodThreshold = lodThreshold; sendModelUpdate(ChartToJson.serializeLodThreshold(this.lodThreshold)); } public boolean isxTickLabelsVisible() { return xTickLabelsVisible; } public boolean isXTickLabelsVisible() { return isxTickLabelsVisible(); } public void setxTickLabelsVisible(boolean xTickLabelsVisible) { this.xTickLabelsVisible = xTickLabelsVisible; sendModelUpdate(ChartToJson.serializeXTickLabelsVisible(this.xTickLabelsVisible)); } public void setXTickLabelsVisible(boolean xTickLabelsVisible) { setxTickLabelsVisible(xTickLabelsVisible); } public boolean isyTickLabelsVisible() { return yTickLabelsVisible; } public boolean isYTickLabelsVisible() { return isyTickLabelsVisible(); } public void setyTickLabelsVisible(boolean yTickLabelsVisible) { this.yTickLabelsVisible = yTickLabelsVisible; sendModelUpdate(ChartToJson.serializeYTickLabelsVisible(this.yTickLabelsVisible)); } public void setYTickLabelsVisible(boolean yTickLabelsVisible) { setyTickLabelsVisible(yTickLabelsVisible); } }
apache-2.0
ppatierno/kaas
cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/TolerationsIT.java
4055
/* * Copyright Strimzi authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.strimzi.operator.cluster.operator.assembly; import io.fabric8.kubernetes.api.model.LabelSelectorBuilder; import io.fabric8.kubernetes.api.model.Toleration; import io.fabric8.kubernetes.api.model.TolerationBuilder; import io.fabric8.kubernetes.api.model.apps.StatefulSet; import io.fabric8.kubernetes.api.model.apps.StatefulSetBuilder; import io.fabric8.kubernetes.client.DefaultKubernetesClient; import io.fabric8.kubernetes.client.KubernetesClient; import io.strimzi.operator.cluster.model.ModelUtils; import io.strimzi.operator.cluster.operator.resource.StatefulSetDiff; import io.strimzi.operator.common.Reconciliation; import io.strimzi.test.k8s.KubeClusterResource; import io.vertx.junit5.Checkpoint; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @ExtendWith(VertxExtension.class) public class TolerationsIT { protected KubeClusterResource cluster = KubeClusterResource.getInstance(); private String namespace = "kafka-it-2"; @BeforeEach public void beforeEach() { cluster.createNamespace(namespace); } @AfterEach public void afterEach() { cluster.deleteNamespaces(); } @Test public void testEmptyStringValueIntoleration(VertxTestContext context) { Toleration t1 = new TolerationBuilder() .withEffect("NoSchedule") .withValue("") .build(); List<Toleration> tolerationList = new ArrayList<>(); tolerationList.add(t1); // CO does this over the generated STS tolerationList = ModelUtils.removeEmptyValuesFromTolerations(tolerationList); StatefulSet ss = new StatefulSetBuilder() .withNewMetadata() .withNamespace(namespace) .withName("foo") .endMetadata() .withNewSpec() .withSelector(new LabelSelectorBuilder().withMatchLabels(Collections.singletonMap("app", "test")).build()) .withNewTemplate() .withNewMetadata() .withLabels(Collections.singletonMap("app", "test")) .endMetadata() .withNewSpec() .withTolerations(tolerationList) .withDnsPolicy("ClusterFirst") .withRestartPolicy("Always") .withSchedulerName("default-scheduler") .withSecurityContext(null) .withTerminationGracePeriodSeconds(30L) .endSpec() .endTemplate() .endSpec() .build(); KubernetesClient client = new DefaultKubernetesClient(); client.apps().statefulSets().inNamespace(namespace).create(ss); StatefulSet stsk8s = client.apps().statefulSets().inNamespace(namespace).withName("foo").get(); StatefulSetDiff diff = new StatefulSetDiff(Reconciliation.DUMMY_RECONCILIATION, ss, stsk8s); Checkpoint checkpoint = context.checkpoint(); context.verify(() -> { assertThat(diff.changesSpecTemplate(), is(false)); assertThat(stsk8s.getSpec().getTemplate().getSpec().getTolerations().get(0).getValue(), is(nullValue())); assertThat(ss.getSpec().getTemplate().getSpec().getTolerations().get(0).getValue(), is(nullValue())); checkpoint.flag(); } ); } }
apache-2.0
nutzam/nutz
src/org/nutz/dao/util/cri/SimpleCriteria.java
4108
package org.nutz.dao.util.cri; import org.nutz.dao.Condition; import org.nutz.dao.Sqls; import org.nutz.dao.entity.Entity; import org.nutz.dao.impl.sql.pojo.AbstractPItem; import org.nutz.dao.jdbc.ValueAdaptor; import org.nutz.dao.pager.Pager; import org.nutz.dao.sql.Criteria; import org.nutz.dao.sql.GroupBy; import org.nutz.dao.sql.OrderBy; import org.nutz.dao.sql.Pojo; import org.nutz.dao.util.lambda.PFun; public class SimpleCriteria extends AbstractPItem implements Criteria, OrderBy, GroupBy { private static final long serialVersionUID = 1L; private SqlExpressionGroup where; private OrderBySet orderBy; private GroupBySet groupBy; private Pager pager; private String beforeWhere; public SimpleCriteria() { where = new SqlExpressionGroup(); orderBy = new OrderBySet(); groupBy = new GroupBySet(); } public SimpleCriteria(String beforeWhere) { this(); this.beforeWhere = beforeWhere; } public void joinSql(Entity<?> en, StringBuilder sb) { if (beforeWhere != null) sb.append(beforeWhere); where.joinSql(en, sb); groupBy.joinSql(en, sb); orderBy.joinSql(en, sb); } public void setPojo(Pojo pojo) { where.setPojo(pojo); groupBy.setPojo(pojo); orderBy.setPojo(pojo); } public void setPager(int pageNumber, int pageSize) { pager = new Pager(); pager.setPageNumber(pageNumber); pager.setPageSize(pageSize); } public void setPager(Pager pager) { this.pager = pager; } public Pager getPager() { return pager; } public int joinAdaptor(Entity<?> en, ValueAdaptor[] adaptors, int off) { return where.joinAdaptor(en, adaptors, off); } public int joinParams(Entity<?> en, Object obj, Object[] params, int off) { return where.joinParams(en, obj, params, off); } public int paramCount(Entity<?> en) { return where.paramCount(en); } public String toSql(Entity<?> en) { Object[] params = new Object[this.paramCount(en)]; int i = where.joinParams(en, null, params, 0); orderBy.joinParams(en, null, params, i); StringBuilder sb = new StringBuilder(); this.joinSql(en, sb); String[] ss = sb.toString().split("[?]"); sb = new StringBuilder(); for (i = 0; i < params.length; i++) { sb.append(ss[i]); sb.append(Sqls.formatFieldValue(params[i])); } if (i < ss.length) sb.append(ss[i]); return sb.toString(); } public OrderBy asc(String name) { return orderBy.asc(name); } @Override public <T> OrderBy asc(PFun<T, ?> name) { return orderBy.asc(name); } public OrderBy desc(String name) { return orderBy.desc(name); } @Override public <T> OrderBy desc(PFun<T, ?> name) { return orderBy.desc(name); } public SqlExpressionGroup where() { return where; } public GroupBy groupBy(String...names) { groupBy = new GroupBySet(names); return this; } @Override public <T> GroupBy groupBy(PFun<T, ?>... names) { groupBy = new GroupBySet(names); return this; } public GroupBy having(Condition cnd) { groupBy.having(cnd); return this; } public OrderBy getOrderBy() { return orderBy; } public String toString() { return toSql(null); } public OrderBy orderBy(String name, String dir) { if ("asc".equalsIgnoreCase(dir)) { this.asc(name); } else { this.desc(name); } return this; } @Override public <T> OrderBy orderBy(PFun<T, ?> name, String dir) { if ("asc".equalsIgnoreCase(dir)) { this.asc(name); } else { this.desc(name); } return this; } public GroupBy getGroupBy() { return groupBy; } public String getBeforeWhere() { return beforeWhere; } }
apache-2.0
queencodemonkey/Audiohackathon-2015
app/src/test/java/com/audiohackathon/boardcast/ExampleUnitTest.java
316
package com.audiohack.boardcast; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
jk1/intellij-community
platform/platform-api/src/com/intellij/openapi/vfs/newvfs/NewVirtualFile.java
3707
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.newvfs; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileWithId; import com.intellij.openapi.vfs.encoding.EncodingRegistry; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.Collection; /** * @author max */ public abstract class NewVirtualFile extends VirtualFile implements VirtualFileWithId { @Override public boolean isValid() { ApplicationManager.getApplication().assertReadAccessAllowed(); return exists(); } @Override @NotNull public byte[] contentsToByteArray() throws IOException { throw new IOException("Cannot get content of " + this); } @Override @NotNull public abstract NewVirtualFileSystem getFileSystem(); @Override public abstract NewVirtualFile getParent(); @Override @Nullable public abstract NewVirtualFile getCanonicalFile(); @Override @Nullable public abstract NewVirtualFile findChild(@NotNull @NonNls final String name); @Nullable public abstract NewVirtualFile refreshAndFindChild(@NotNull String name); @Nullable public abstract NewVirtualFile findChildIfCached(@NotNull String name); public abstract void setTimeStamp(final long time) throws IOException; @Override @NotNull public abstract CharSequence getNameSequence(); @Override public abstract int getId(); @Nullable @Deprecated public NewVirtualFile findChildById(int id) {return null;} @Override public void refresh(final boolean asynchronous, final boolean recursive, final Runnable postRunnable) { RefreshQueue.getInstance().refresh(asynchronous, recursive, postRunnable, this); } @Override public abstract void setWritable(boolean writable) throws IOException; public abstract void markDirty(); public abstract void markDirtyRecursively(); public abstract boolean isDirty(); public abstract void markClean(); @Override public void move(final Object requestor, @NotNull final VirtualFile newParent) throws IOException { if (!exists()) { throw new IOException("File to move does not exist: " + getPath()); } if (!newParent.exists()) { throw new IOException("Destination folder does not exist: " + newParent.getPath()); } if (!newParent.isDirectory()) { throw new IOException("Destination is not a folder: " + newParent.getPath()); } final VirtualFile child = newParent.findChild(getName()); if (child != null) { throw new IOException("Destination already exists: " + newParent.getPath() + "/" + getName()); } EncodingRegistry.doActionAndRestoreEncoding(this, () -> { getFileSystem().moveFile(requestor, this, newParent); return this; }); } @NotNull public abstract Collection<VirtualFile> getCachedChildren(); /** iterated children will NOT contain NullVirtualFile.INSTANCE */ @NotNull public abstract Iterable<VirtualFile> iterInDbChildren(); }
apache-2.0
dkschlos/super-csv-declarative
super-csv-declarative/src/main/java/com/github/dmn1k/supercsv/io/declarative/annotation/ParseEnum.java
1580
/* * Copyright 2007 Kasper B. Graversen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.dmn1k.supercsv.io.declarative.annotation; import com.github.dmn1k.supercsv.io.declarative.CellProcessorAnnotationDescriptor; import com.github.dmn1k.supercsv.io.declarative.ProcessorOrder; import com.github.dmn1k.supercsv.io.declarative.StandardCsvContexts; import com.github.dmn1k.supercsv.io.declarative.provider.ParseEnumCellProcessorProvider; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotation for the {@link org.supercsv.cellprocessor.ParseEnum}-cell processor * * @since 2.5 * @author Dominik Schlosser */ @CellProcessorAnnotationDescriptor(provider = ParseEnumCellProcessorProvider.class, contexts = {StandardCsvContexts.READ}) @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD}) public @interface ParseEnum { boolean ignoreCase() default false; int order() default ProcessorOrder.UNDEFINED; }
apache-2.0
EBIvariation/eva
dgva-server/src/test/java/uk/ac/ebi/dgva/server/ws/StudyWSServerTest.java
3907
package uk.ac.ebi.dgva.server.ws; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.context.junit4.SpringRunner; import uk.ac.ebi.eva.commons.core.models.StudyType; import uk.ac.ebi.eva.lib.metadata.dgva.StudyDgvaDBAdaptor; import uk.ac.ebi.eva.lib.models.VariantStudy; import uk.ac.ebi.eva.lib.utils.QueryResponse; import uk.ac.ebi.eva.lib.utils.QueryResult; import java.net.URI; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.mockito.BDDMockito.given; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) public class StudyWSServerTest { @Autowired private TestRestTemplate restTemplate; @MockBean private StudyDgvaDBAdaptor studyDgvaDBAdaptor; private static final String EXISTING_STUDY = "svS1"; private static final String NOT_EXISTING_STUDY = "svS2"; private VariantStudy svStudy1; @Before public void setUp() throws Exception { svStudy1 = new VariantStudy("Human SV Test study 1", EXISTING_STUDY, null, "SV study 1 description", new int[]{9606}, "Human", "Homo Sapiens", "Germline", "EBI", "DNA", "multi-isolate", StudyType.CASE_CONTROL, "Exome Sequencing", "ES", "GRCh37", "GCA_000001405.3", "Illumina", new URI("http://www.s1.org"), new String[]{"10"}, 1000, 10, false); given(studyDgvaDBAdaptor.getStudyById(eq(EXISTING_STUDY), anyObject())) .willReturn(encapsulateInQueryResult(svStudy1)); given(studyDgvaDBAdaptor.getStudyById(eq(NOT_EXISTING_STUDY), anyObject())) .willReturn(encapsulateInQueryResult()); } private <T> QueryResult<T> encapsulateInQueryResult(T... results) { return new QueryResult<>(null, 0, results.length, results.length, null, null, Arrays.asList(results)); } @Test public void getStudySummary() { QueryResponse<QueryResult<VariantStudy>> queryResponse = callEndpoint("/v1/studies/" + EXISTING_STUDY + "/summary"); assertEquals(1, queryResponse.getResponse().size()); List<VariantStudy> results = queryResponse.getResponse().get(0).getResult(); assertEquals(1, results.size()); assertEquals(svStudy1.getId(), results.get(0).getId()); assertEquals(svStudy1.getName(), results.get(0).getName()); } @Test public void getNotExistentStudySummary() { QueryResponse<QueryResult<VariantStudy>> queryResponse = callEndpoint("/v1/studies/" + NOT_EXISTING_STUDY + "/summary"); assertEquals(1, queryResponse.getResponse().size()); List<VariantStudy> results = queryResponse.getResponse().get(0).getResult(); assertEquals(0, results.size()); } private QueryResponse<QueryResult<VariantStudy>> callEndpoint(String url) { ResponseEntity<QueryResponse<QueryResult<VariantStudy>>> response = restTemplate.exchange( url, HttpMethod.GET, null, new ParameterizedTypeReference<QueryResponse<QueryResult<VariantStudy>>>() {}); assertEquals(HttpStatus.OK, response.getStatusCode()); return response.getBody(); } }
apache-2.0
hardfish/justTest
cloudsigma2/src/main/java/org/jclouds/cloudsigma2/domain/IPConfigurationType.java
1353
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.cloudsigma2.domain; import static com.google.common.base.Preconditions.checkNotNull; public enum IPConfigurationType { DHCP, STATIC, MANUAL, UNRECOGNIZED; public String value() { return name().toLowerCase(); } @Override public String toString() { return value(); } public static IPConfigurationType fromValue(String type) { try { return valueOf(checkNotNull(type, "type").toUpperCase()); } catch (IllegalArgumentException e) { return UNRECOGNIZED; } } }
apache-2.0
jwren/intellij-community
platform/analysis-api/src/com/intellij/lang/LanguageDocumentation.java
1173
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.lang; import com.intellij.lang.documentation.CompositeDocumentationProvider; import com.intellij.lang.documentation.DocumentationProvider; import org.jetbrains.annotations.NotNull; import java.util.List; public final class LanguageDocumentation extends LanguageExtension<DocumentationProvider> { public static final LanguageDocumentation INSTANCE = new LanguageDocumentation(); private LanguageDocumentation() { super("com.intellij.lang.documentationProvider"); } /** * This method is left to preserve binary compatibility. */ @SuppressWarnings("RedundantMethodOverride") @Override public DocumentationProvider forLanguage(@NotNull final Language l) { return super.forLanguage(l); } @Override protected DocumentationProvider findForLanguage(@NotNull Language language) { List<DocumentationProvider> providers = allForLanguage(language); if (providers.isEmpty()) { return null; } return CompositeDocumentationProvider.wrapProviders(providers); } }
apache-2.0
apache/incubator-shardingsphere
shardingsphere-sql-parser/shardingsphere-sql-parser-engine/src/test/java/org/apache/shardingsphere/sql/parser/core/SQLParserEngineTest.java
2388
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.sql.parser.core; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import org.apache.shardingsphere.sql.parser.core.database.parser.SQLParserExecutor; import org.junit.Test; import javax.annotation.ParametersAreNonnullByDefault; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.times; public final class SQLParserEngineTest { @Test public void assertParse() { SQLParserExecutor sqlParserExecutor = mock(SQLParserExecutor.class); when(sqlParserExecutor.parse("")).thenReturn(mock(ParseContext.class)); LoadingCache<String, ParseContext> parseTreeCache = CacheBuilder.newBuilder().softValues() .initialCapacity(128).maximumSize(1024).concurrencyLevel(4).build(new CacheLoader<String, ParseContext>() { @ParametersAreNonnullByDefault @Override public ParseContext load(final String sql) { return sqlParserExecutor.parse(sql); } } ); parseTreeCache.getUnchecked(""); verify(sqlParserExecutor, times(1)).parse(""); parseTreeCache.getUnchecked(""); verify(sqlParserExecutor, times(1)).parse(""); sqlParserExecutor.parse(""); verify(sqlParserExecutor, times(2)).parse(""); } }
apache-2.0
engagepoint/camel
components/camel-spring-batch/src/main/java/org/apache/camel/component/spring/batch/SpringBatchEndpoint.java
3570
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.spring.batch; import java.util.Map; import org.apache.camel.Component; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.impl.DefaultEndpoint; import org.springframework.batch.core.Job; import org.springframework.batch.core.launch.JobLauncher; public class SpringBatchEndpoint extends DefaultEndpoint { private String jobLauncherRef; private JobLauncher jobLauncher; private JobLauncher defaultResolvedJobLauncher; private Map<String, JobLauncher> allResolvedJobLaunchers; private final Job job; public SpringBatchEndpoint(String endpointUri, Component component, JobLauncher jobLauncher, JobLauncher defaultResolvedJobLauncher, Map<String, JobLauncher> allResolvedJobLaunchers, Job job) { super(endpointUri, component); this.jobLauncher = jobLauncher; this.defaultResolvedJobLauncher = defaultResolvedJobLauncher; this.allResolvedJobLaunchers = allResolvedJobLaunchers; this.job = job; } @Override public Producer createProducer() throws Exception { return new SpringBatchProducer(this, jobLauncher, job); } @Override public Consumer createConsumer(Processor processor) throws Exception { throw new UnsupportedOperationException("Not supported"); } @Override public boolean isSingleton() { return true; } @Override protected void doStart() throws Exception { if (jobLauncher == null) { jobLauncher = resolveJobLauncher(); } } private JobLauncher resolveJobLauncher() { if (jobLauncherRef != null) { JobLauncher jobLauncher = getCamelContext().getRegistry().lookup(jobLauncherRef, JobLauncher.class); if (jobLauncher == null) { throw new IllegalStateException(String.format("No JobLauncher named %s found in the registry.", jobLauncherRef)); } return jobLauncher; } if (defaultResolvedJobLauncher != null) { return defaultResolvedJobLauncher; } if (allResolvedJobLaunchers.size() == 1) { return allResolvedJobLaunchers.values().iterator().next(); } else if (allResolvedJobLaunchers.size() > 1) { throw new IllegalStateException("Expected single jobLauncher instance. Found: " + allResolvedJobLaunchers.size()); } throw new IllegalStateException("Cannot find Spring Batch JobLauncher."); } public void setJobLauncherRef(String jobLauncherRef) { this.jobLauncherRef = jobLauncherRef; } }
apache-2.0
nchinth/nd4j
nd4j-netlib-blas/src/main/java/org/nd4j/linalg/netlib/blas/NetlibLevel1.java
11017
package org.nd4j.linalg.netlib.blas; import com.github.fommil.netlib.BLAS; import org.nd4j.linalg.api.blas.BlasBufferUtil; import org.nd4j.linalg.api.blas.impl.BaseLevel1; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.complex.IComplexDouble; import org.nd4j.linalg.api.complex.IComplexFloat; import org.nd4j.linalg.api.complex.IComplexNDArray; import org.nd4j.linalg.api.ndarray.INDArray; import org.netlib.util.doubleW; import org.netlib.util.floatW; import static org.nd4j.linalg.api.blas.BlasBufferUtil.*; import static org.nd4j.linalg.api.blas.BlasBufferUtil.getDoubleData; import static org.nd4j.linalg.api.blas.BlasBufferUtil.getFloatData; /** * @author Adam Gibson */ public class NetlibLevel1 extends BaseLevel1 { @Override protected float sdsdot(int N, float alpha, INDArray X, int incX, INDArray Y, int incY) { return BLAS.getInstance().sdsdot(N,alpha,getFloatData(X),getBlasOffset(X),incX,getFloatData(Y),getBlasOffset(Y),incY); } @Override protected double dsdot(int N, INDArray X, int incX, INDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected float sdot(int N, INDArray X, int incX, INDArray Y, int incY) { return BLAS.getInstance().sdot(N,getFloatData(X),getBlasOffset(X),incX,getFloatData(Y),getBlasOffset(Y),incY); } @Override protected double ddot(int N, INDArray X, int incX, INDArray Y, int incY) { return BLAS.getInstance().ddot(N, getDoubleData(X), getBlasOffset(X), incX, getDoubleData(Y), getBlasOffset(Y), incY); } @Override protected void cdotu_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotu) { throw new UnsupportedOperationException(); } @Override protected void cdotc_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotc) { throw new UnsupportedOperationException(); } @Override protected void zdotu_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotu) { throw new UnsupportedOperationException(); } @Override protected void zdotc_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotc) { throw new UnsupportedOperationException(); } @Override protected float snrm2(int N, INDArray X, int incX) { return BLAS.getInstance().snrm2(N,getFloatData(X),getBlasOffset(X),incX); } @Override protected float sasum(int N, INDArray X, int incX) { return BLAS.getInstance().sasum(N, getFloatData(X), getBlasOffset(X), incX); } @Override protected double dnrm2(int N, INDArray X, int incX) { return BLAS.getInstance().dnrm2(N, getDoubleData(X), getBlasOffset(X), incX); } @Override protected double dasum(int N, INDArray X, int incX) { return BLAS.getInstance().dasum(N, getDoubleData(X), getBlasOffset(X), incX); } @Override protected float scnrm2(int N, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected float scasum(int N, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected double dznrm2(int N, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected double dzasum(int N, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected int isamax(int N, INDArray X, int incX) { return BLAS.getInstance().isamax(N,getFloatData(X),getBlasOffset(X),incX) - 1; } @Override protected int idamax(int N, INDArray X, int incX) { return BLAS.getInstance().idamax(N, getDoubleData(X), getBlasOffset(X), incX) - 1; } @Override protected int icamax(int N, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected int izamax(int N, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected void sswap(int N, INDArray X, int incX, INDArray Y, int incY) { float[] yData = getFloatData(Y); float[] xData = getFloatData(X); BLAS.getInstance().sswap(N, xData, getBlasOffset(X), incX, yData, getBlasOffset(Y), incY); setData(xData,X); setData(yData,Y); } @Override protected void scopy(int N, INDArray X, int incX, INDArray Y, int incY) { float[] yData = getFloatData(Y); BLAS.getInstance().scopy(N, getFloatData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY); setData(yData,Y); } @Override protected void scopy(int n, DataBuffer x, int offsetX, int incrX, DataBuffer y, int offsetY, int incrY ){ BLAS.getInstance().scopy(n, getFloatData(x), offsetX, incrX, getFloatData(y), offsetY, incrY); } @Override protected void saxpy(int N, float alpha, INDArray X, int incX, INDArray Y, int incY) { float[] yData = getFloatData(Y); BLAS.getInstance().saxpy(N, alpha, getFloatData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY); setData(yData,Y); } @Override public void saxpy(int n,float alpha, DataBuffer x, int offsetX, int incrX, DataBuffer y, int offsetY, int incrY ){ BLAS.getInstance().saxpy(n, alpha, getFloatData(x), offsetX, incrX, getFloatData(y), offsetY, incrY); } @Override protected void dswap(int N, INDArray X, int incX, INDArray Y, int incY) { double[] yData = getDoubleData(Y); double[] xData = getDoubleData(X); BLAS.getInstance().dswap(N,xData,getBlasOffset(X),incX,yData,getBlasOffset(Y),incY); setData(xData, X); setData(yData, Y); } @Override protected void dcopy(int N, INDArray X, int incX, INDArray Y, int incY) { double[] yData = getDoubleData(Y); BLAS.getInstance().dcopy(N, getDoubleData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY); setData(yData,Y); } @Override protected void dcopy(int n, DataBuffer x, int offsetX, int incrX, DataBuffer y, int offsetY, int incrY ){ BLAS.getInstance().dcopy(n, getDoubleData(x), offsetX, incrX, getDoubleData(y), offsetY, incrY); } @Override protected void daxpy(int N, double alpha, INDArray X, int incX, INDArray Y, int incY) { double[] yData = getDoubleData(Y); BLAS.getInstance().daxpy(N, alpha, getDoubleData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY); setData(yData,Y); } @Override public void daxpy(int n,double alpha, DataBuffer x, int offsetX, int incrX, DataBuffer y, int offsetY, int incrY ){ BLAS.getInstance().daxpy(n, alpha, getDoubleData(x), offsetX, incrX, getDoubleData(y), offsetY, incrY); } @Override protected void cswap(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected void ccopy(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected void caxpy(int N, IComplexFloat alpha, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected void zswap(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected void zcopy(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected void zaxpy(int N, IComplexDouble alpha, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) { throw new UnsupportedOperationException(); } @Override protected void srotg(float a, float b, float c, float s) { throw new UnsupportedOperationException(); } @Override protected void srotmg(float d1, float d2, float b1, float b2, INDArray P) { float[] pData = getFloatData(P); BLAS.getInstance().srotmg(new floatW(d1), new floatW(d2), new floatW(b1), b2, pData, getBlasOffset(P)); setData(pData,P); } @Override protected void srot(int N, INDArray X, int incX, INDArray Y, int incY, float c, float s) { throw new UnsupportedOperationException(); } @Override protected void srotm(int N, INDArray X, int incX, INDArray Y, int incY, INDArray P) { float[] pData = getFloatData(P); BLAS.getInstance().srotm(N,getFloatData(X),getBlasOffset(X),incX,getFloatData(Y),getBlasOffset(Y),incY,pData,getBlasOffset(P)); setData(pData,P); } @Override protected void drotg(double a, double b, double c, double s) { throw new UnsupportedOperationException(); } @Override protected void drotmg(double d1, double d2, double b1, double b2, INDArray P) { double[] pData = getDoubleData(P); BLAS.getInstance().drotmg(new doubleW(d1),new doubleW(d2),new doubleW(b1),b2,pData,getBlasOffset(P)); setData(pData,P); } @Override protected void drot(int N, INDArray X, int incX, INDArray Y, int incY, double c, double s) { double[] yData = getDoubleData(Y); BLAS.getInstance().drot(N,getDoubleData(X),getBlasOffset(X),incX,getDoubleData(Y),getBlasOffset(Y),incY,c,s); setData(yData,Y); } @Override protected void drotm(int N, INDArray X, int incX, INDArray Y, int incY, INDArray P) { double[] pData = getDoubleData(P); BLAS.getInstance().drotm(N,getDoubleData(X),getBlasOffset(X),incX,getDoubleData(Y),getBlasOffset(Y),incY,pData,getBlasOffset(P)); setData(pData,P); } @Override protected void sscal(int N, float alpha, INDArray X, int incX) { float[] data = getFloatData(X); BLAS.getInstance().sscal(N,alpha,data,getBlasOffset(X),incX); setData(data,X); } @Override protected void dscal(int N, double alpha, INDArray X, int incX) { double[] data = getDoubleData(X); BLAS.getInstance().dscal(N,alpha,data, BlasBufferUtil.getBlasOffset(X),incX); setData(data,X); } @Override protected void cscal(int N, IComplexFloat alpha, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected void zscal(int N, IComplexDouble alpha, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected void csscal(int N, float alpha, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } @Override protected void zdscal(int N, double alpha, IComplexNDArray X, int incX) { throw new UnsupportedOperationException(); } }
apache-2.0
punkhorn/camel-upstream
core/camel-management-impl/src/main/java/org/apache/camel/management/mbean/ManagedSplitter.java
1839
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.management.mbean; import org.apache.camel.CamelContext; import org.apache.camel.api.management.ManagedResource; import org.apache.camel.api.management.mbean.ManagedSplitterMBean; import org.apache.camel.model.SplitDefinition; import org.apache.camel.processor.Splitter; @ManagedResource(description = "Managed Splitter") public class ManagedSplitter extends ManagedMulticast implements ManagedSplitterMBean { private final Splitter processor; public ManagedSplitter(CamelContext context, Splitter processor, SplitDefinition definition) { super(context, processor, definition); this.processor = processor; } @Override public SplitDefinition getDefinition() { return (SplitDefinition) super.getDefinition(); } @Override public String getExpressionLanguage() { return getDefinition().getExpression().getLanguage(); } @Override public String getExpression() { return getDefinition().getExpression().getExpression(); } }
apache-2.0
sleuthkit/autopsy
Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java
111086
/* * Autopsy Forensic Browser * * Copyright 2015-2021 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.experimental.autoingest; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.awt.Cursor; import java.awt.Desktop; import java.awt.EventQueue; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; import java.util.ArrayList; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Observable; import java.util.Observer; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.logging.Level; import javax.swing.DefaultListSelectionModel; import java.awt.Color; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JTable; import javax.swing.SwingWorker; import javax.swing.UIManager; import javax.swing.event.ListSelectionEvent; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableColumn; import org.netbeans.api.options.OptionsDisplayer; import org.openide.DialogDisplayer; import org.openide.LifecycleManager; import org.openide.NotifyDescriptor; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.core.ServicesMonitor; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestManager.JobsSnapshot; import org.sleuthkit.autopsy.guiutils.DurationCellRenderer; import org.sleuthkit.autopsy.guiutils.LongDateCellRenderer; import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestProgressSnapshotDialog; /** * A panel for monitoring automated ingest by a cluster, and for controlling * automated ingest for a single node within the cluster. There can be at most * one such panel per node. */ @Messages({ "AutoIngestControlPanel.bnPause.paused=Paused", "AutoIngestControlPanel.bnPause.running=Running", "AutoIngestControlPanel.bnPause.confirmHeader=Are you sure you want to pause?", "AutoIngestControlPanel.bnPause.warningText=Pause will occur after the current job completes processing. This could take a long time. Continue?", "AutoIngestControlPanel.bnPause.toolTipText=Suspend processing of Pending Jobs", "AutoIngestControlPanel.bnPause.toolTipTextResume=Resume processing of Pending Jobs", "AutoIngestControlPanel.bnPause.pausing=Pausing after current job completes...", "AutoIngestControlPanel.bnStart.startMessage=Waiting to start", "AutoIngestControlPanel.bnStart.text=Start", "AutoIngestControlPanel.bnStart.toolTipText=Start processing auto ingest jobs", "AutoIngestControlPanel.pendingTable.toolTipText=The Pending table displays the order upcoming Jobs will be processed with the top of the list first", "AutoIngestControlPanel.runningTable.toolTipText=The Running table displays the currently running Job and information about it", "AutoIngestControlPanel.completedTable.toolTipText=The Completed table shows all Jobs that have been processed already", "AutoIngestControlPanel.bnCancelJob.toolTipText=Cancel processing of the current Job and move on to the next Job. This functionality is only available for jobs running on current AIM node.", "AutoIngestControlPanel.bnDeleteCase.toolTipText=Delete the selected Case in its entirety", "AutoIngestControlPanel.bnResume.text=Resume", "AutoIngestControlPanel.bnRefresh.toolTipText=Refresh displayed tables", "AutoIngestControlPanel.bnCancelModule.toolTipText=Cancel processing of the current module within the Job and move on to the next module within the Job. This functionality is only available for jobs running on current AIM node.", "AutoIngestControlPanel.bnExit.toolTipText=Exit Application", "AutoIngestControlPanel.bnOptions.toolTipText=Display options panel. All processing must be paused to open the options panel.", "AutoIngestControlPanel.bnShowProgress.toolTipText=Show the progress of the currently running Job. This functionality is only available for jobs running on current AIM node.", "AutoIngestControlPanel.bnShowCaseLog.toolTipText=Display case log file for selected case", "AutoIngestControlPanel.Cancelling=Cancelling...", "AutoIngestControlPanel.AutoIngestStartupWarning.Title=Automated Ingest Warning", "AutoIngestControlPanel.AutoIngestStartupWarning.Message=Failed to establish remote communications with other automated ingest nodes.\nAuto ingest dashboard will only be able to display local ingest job events.\nPlease verify Multi-User settings (Options->Multi-User). See application log for details.", "AutoIngestControlPanel.UpdatingSharedConfig=Updating shared configuration", "AutoIngestControlPanel.SharedConfigurationDisabled=Shared configuration disabled", "AutoIngestControlPanel.EnableConfigurationSettings=Enable shared configuration from the options panel before uploading", "AutoIngestControlPanel.ErrorUploadingConfiguration=Error uploading configuration", "AutoIngestControlPanel.UploadSuccessTitle=Success", "AutoIngestControlPanel.UploadSuccess=Shared configuration successfully uploaded", "AutoIngestControlPanel.UploadFailedTitle=Failed", "AutoIngestControlPanel.ConfigLocked=The shared configuration directory is locked because upload from another node is in progress. \nIf this is an error, you can unlock the directory and then retry the upload.", "AutoIngestControlPanel.ConfigLockedTitle=Configuration directory locked", "AutoIngestControlPanel.PauseDueToSystemError=Paused due to system error, please consult the auto ingest system log" }) @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives public final class AutoIngestControlPanel extends JPanel implements Observer { private static final long serialVersionUID = 1L; private static final int GENERIC_COL_MIN_WIDTH = 30; private static final int GENERIC_COL_MAX_WIDTH = 2000; private static final int PENDING_TABLE_COL_PREFERRED_WIDTH = 280; private static final int RUNNING_TABLE_COL_PREFERRED_WIDTH = 175; private static final int PRIORITY_COLUMN_PREFERRED_WIDTH = 60; private static final int PRIORITY_COLUMN_MAX_WIDTH = 150; private static final int OCR_COLUMN_PREFERRED_WIDTH = 50; private static final int OCR_COLUMN_MAX_WIDTH = 150; private static final int ACTIVITY_TIME_COL_MIN_WIDTH = 250; private static final int ACTIVITY_TIME_COL_MAX_WIDTH = 450; private static final int TIME_COL_MIN_WIDTH = 30; private static final int TIME_COL_MAX_WIDTH = 250; private static final int TIME_COL_PREFERRED_WIDTH = 140; private static final int NAME_COL_MIN_WIDTH = 100; private static final int NAME_COL_MAX_WIDTH = 250; private static final int NAME_COL_PREFERRED_WIDTH = 140; private static final int ACTIVITY_COL_MIN_WIDTH = 70; private static final int ACTIVITY_COL_MAX_WIDTH = 2000; private static final int ACTIVITY_COL_PREFERRED_WIDTH = 300; private static final int STATUS_COL_MIN_WIDTH = 50; private static final int STATUS_COL_MAX_WIDTH = 250; private static final int STATUS_COL_PREFERRED_WIDTH = 50; private static final int COMPLETED_TIME_COL_MIN_WIDTH = 30; private static final int COMPLETED_TIME_COL_MAX_WIDTH = 2000; private static final int COMPLETED_TIME_COL_PREFERRED_WIDTH = 280; private static final String UPDATE_TASKS_THREAD_NAME = "AID-update-tasks-%d"; private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName(); private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice"; private static final Logger sysLogger = AutoIngestSystemLogger.getLogger(); private static AutoIngestControlPanel instance; private final DefaultTableModel pendingTableModel; private final DefaultTableModel runningTableModel; private final DefaultTableModel completedTableModel; private AutoIngestManager manager; private ExecutorService updateExecutor; private boolean isPaused; private boolean autoIngestStarted; private Color pendingTableBackground; private Color pendingTablelForeground; /** * Maintain a mapping of each service to it's last status update. */ private final ConcurrentHashMap<String, String> statusByService; /* * The enum is used in conjunction with the DefaultTableModel class to * provide table models for the JTables used to display a view of the * pending jobs queue, running jobs list, and completed jobs list. The enum * allows the columns of the table model to be described by either an enum * ordinal or a column header string. */ @Messages({ "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority=Prioritized", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Case=Case", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.ImageFolder=Data Source", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.HostName=Host Name", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CreatedTime=Job Created", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime=Job Completed", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage", "# {0} - unitSeparator", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder=Case Folder", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob= Local Job?", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.ManifestFilePath= Manifest File Path", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR=OCR" }) private enum JobsTableModelColumns { CASE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Case")), DATA_SOURCE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.ImageFolder")), HOST_NAME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.HostName")), CREATED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CreatedTime")), STARTED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime")), COMPLETED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime")), STAGE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage")), STAGE_TIME(Bundle.AutoIngestControlPanel_JobsTableModel_ColumnHeader_StageTime(DurationCellRenderer.getUnitSeperator())), STATUS(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status")), CASE_DIRECTORY_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder")), IS_LOCAL_JOB(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob")), MANIFEST_FILE_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.ManifestFilePath")), PRIORITY(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority")), OCR(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR")); private final String header; private JobsTableModelColumns(String header) { this.header = header; } private String getColumnHeader() { return header; } private static final String[] headers = { CASE.getColumnHeader(), DATA_SOURCE.getColumnHeader(), HOST_NAME.getColumnHeader(), CREATED_TIME.getColumnHeader(), STARTED_TIME.getColumnHeader(), COMPLETED_TIME.getColumnHeader(), STAGE.getColumnHeader(), STATUS.getColumnHeader(), STAGE_TIME.getColumnHeader(), CASE_DIRECTORY_PATH.getColumnHeader(), IS_LOCAL_JOB.getColumnHeader(), MANIFEST_FILE_PATH.getColumnHeader(), PRIORITY.getColumnHeader(), OCR.getColumnHeader()}; } /** * Gets the singleton automated ingest control and monitoring panel for this * cluster node. * * @return The panel. */ public static AutoIngestControlPanel getInstance() { if (null == instance) { /* * Two stage construction is used here to avoid publishing a * reference to the panel to the Observable auto ingest manager * before object construction is complete. */ instance = new AutoIngestControlPanel(); } return instance; } /** * Constructs a panel for monitoring automated ingest by a cluster, and for * controlling automated ingest for a single node within the cluster. */ private AutoIngestControlPanel() { this.statusByService = new ConcurrentHashMap<>(); //Disable the main window so they can only use the dashboard (if we used setVisible the taskBar icon would go away) WindowManager.getDefault().getMainWindow().setEnabled(false); manager = AutoIngestManager.getInstance(); pendingTableModel = new AutoIngestTableModel(JobsTableModelColumns.headers, 0); runningTableModel = new AutoIngestTableModel(JobsTableModelColumns.headers, 0); completedTableModel = new AutoIngestTableModel(JobsTableModelColumns.headers, 0); initComponents(); // Generated code. statusByService.put(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString(), NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down")); statusByService.put(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString(), NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down")); statusByService.put(ServicesMonitor.Service.MESSAGING.toString(), NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down")); setServicesStatusMessage(); initPendingJobsTable(); initRunningJobsTable(); initCompletedJobsTable(); initButtons(); completedTable.getRowSorter().toggleSortOrder(JobsTableModelColumns.COMPLETED_TIME.ordinal()); // Start auto ingest immediately if we are running as a service. if (System.getProperty(RUNNING_AS_SERVICE_PROPERTY, "false").equalsIgnoreCase("true")) { startUp(); } /* * Must set this flag, otherwise pop up menus don't close properly. */ UIManager.put("PopupMenu.consumeEventOnClose", false); } /** * Update status of the services on the dashboard */ private void displayServicesStatus() { tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message", statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()), statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()), statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()), statusByService.get(ServicesMonitor.Service.MESSAGING.toString()))); String upStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up"); if (statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()).compareTo(upStatus) != 0 || statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()).compareTo(upStatus) != 0 || statusByService.get(ServicesMonitor.Service.MESSAGING.toString()).compareTo(upStatus) != 0) { tbServicesStatusMessage.setForeground(Color.RED); } else { tbServicesStatusMessage.setForeground(Color.BLACK); } } /** * Queries the services monitor and sets the text for the services status * text box. */ @Messages({ "# {0} - case db status", "# {1} - search svc Status", "# {2} - coord svc Status", "# {3} - msg broker status", "AutoIngestControlPanel.tbServicesStatusMessage.Message=Case databases {0}, keyword search {1}, coordination {2}, messaging {3} ", "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up=up", "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down=down", "AutoIngestControlPanel.tbServicesStatusMessage.Message.Unknown=unknown" }) private void setServicesStatusMessage() { new SwingWorker<Void, Void>() { @Override protected Void doInBackground() throws Exception { statusByService.put(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString(), getServiceStatus(ServicesMonitor.Service.REMOTE_CASE_DATABASE)); statusByService.put(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString(), getServiceStatus(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH)); statusByService.put(ServicesMonitor.Service.MESSAGING.toString(), getServiceStatus(ServicesMonitor.Service.MESSAGING)); return null; } /** * Gets a status string for a given service. * * @param service The service to test. * * @return The status string. */ private String getServiceStatus(ServicesMonitor.Service service) { String serviceStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Unknown"); try { ServicesMonitor servicesMonitor = ServicesMonitor.getInstance(); serviceStatus = servicesMonitor.getServiceStatus(service.toString()); if (serviceStatus.compareTo(ServicesMonitor.ServiceStatus.UP.toString()) == 0) { serviceStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up"); } else { serviceStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down"); } } catch (ServicesMonitor.ServicesMonitorException ex) { sysLogger.log(Level.SEVERE, String.format("Dashboard error getting service status for %s", service), ex); } return serviceStatus; } @Override protected void done() { displayServicesStatus(); } }.execute(); } /** * Sets up the JTable that presents a view of the system-wide pending jobs * queue. */ private void initPendingJobsTable() { /* * Remove some of the jobs table model columns from the JTable. This * does not remove the columns from the model, just from this table. */ pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.HOST_NAME.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.STARTED_TIME.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.COMPLETED_TIME.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.STAGE.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.STAGE_TIME.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.CASE_DIRECTORY_PATH.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.IS_LOCAL_JOB.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.STATUS.getColumnHeader())); pendingTable.removeColumn(pendingTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader())); /* * Set up a column to display the cases associated with the jobs. */ TableColumn column; column = pendingTable.getColumn(JobsTableModelColumns.CASE.getColumnHeader()); column.setMinWidth(GENERIC_COL_MIN_WIDTH); column.setMaxWidth(GENERIC_COL_MAX_WIDTH); column.setPreferredWidth(PENDING_TABLE_COL_PREFERRED_WIDTH); column.setWidth(PENDING_TABLE_COL_PREFERRED_WIDTH); /* * Set up a column to display the image folders associated with the * jobs. */ column = pendingTable.getColumn(JobsTableModelColumns.DATA_SOURCE.getColumnHeader()); column.setMaxWidth(GENERIC_COL_MAX_WIDTH); column.setPreferredWidth(PENDING_TABLE_COL_PREFERRED_WIDTH); column.setWidth(PENDING_TABLE_COL_PREFERRED_WIDTH); /* * Set up a column to display the create times of the jobs. */ column = pendingTable.getColumn(JobsTableModelColumns.CREATED_TIME.getColumnHeader()); column.setCellRenderer(new LongDateCellRenderer()); column.setMinWidth(TIME_COL_MIN_WIDTH); column.setMaxWidth(TIME_COL_MAX_WIDTH); column.setPreferredWidth(TIME_COL_PREFERRED_WIDTH); column.setWidth(TIME_COL_PREFERRED_WIDTH); column = pendingTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader()); column.setCellRenderer(new PrioritizedIconCellRenderer()); column.setMaxWidth(PRIORITY_COLUMN_MAX_WIDTH); column.setPreferredWidth(PRIORITY_COLUMN_PREFERRED_WIDTH); column.setWidth(PRIORITY_COLUMN_PREFERRED_WIDTH); column = pendingTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader()); column.setCellRenderer(new OcrIconCellRenderer()); column.setMaxWidth(OCR_COLUMN_MAX_WIDTH); column.setPreferredWidth(OCR_COLUMN_PREFERRED_WIDTH); column.setWidth(OCR_COLUMN_PREFERRED_WIDTH); /** * Allow sorting when a column header is clicked. */ pendingTable.setRowSorter(new AutoIngestRowSorter<>(pendingTableModel)); /* * Create a row selection listener to enable/disable the prioritize * folder and prioritize case buttons. */ pendingTable.getSelectionModel().addListSelectionListener((ListSelectionEvent e) -> { if (e.getValueIsAdjusting()) { return; } int row = pendingTable.getSelectedRow(); boolean enablePrioritizeButtons = false; boolean enableDeprioritizeButtons = false; if ((row >= 0) && (row < pendingTable.getRowCount())) { enablePrioritizeButtons = true; enableDeprioritizeButtons = ((Integer) pendingTableModel.getValueAt(row, JobsTableModelColumns.PRIORITY.ordinal()) > 0); } enablePrioritizeButtons(enablePrioritizeButtons); enableDeprioritizeButtons(enableDeprioritizeButtons); }); /* * Save the background color of the table so it can be restored on * resume, after being grayed out on pause. Note the assumption that all * of the tables use the same background color. */ pendingTableBackground = pendingTable.getBackground(); pendingTablelForeground = pendingTable.getForeground(); } /** * Sets up the JTable that presents a view of the system-wide running jobs * list. */ private void initRunningJobsTable() { /* * Remove some of the jobs table model columns from the JTable. This * does not remove the columns from the model, just from this table. */ runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.CREATED_TIME.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.STARTED_TIME.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.COMPLETED_TIME.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.STATUS.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.CASE_DIRECTORY_PATH.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.IS_LOCAL_JOB.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader())); /* * Set up a column to display the cases associated with the jobs. */ TableColumn column; column = runningTable.getColumn(JobsTableModelColumns.CASE.getColumnHeader()); column.setMinWidth(GENERIC_COL_MIN_WIDTH); column.setMaxWidth(GENERIC_COL_MAX_WIDTH); column.setPreferredWidth(RUNNING_TABLE_COL_PREFERRED_WIDTH); column.setWidth(RUNNING_TABLE_COL_PREFERRED_WIDTH); /* * Set up a column to display the image folders associated with the * jobs. */ column = runningTable.getColumn(JobsTableModelColumns.DATA_SOURCE.getColumnHeader()); column.setMinWidth(GENERIC_COL_MIN_WIDTH); column.setMaxWidth(GENERIC_COL_MAX_WIDTH); column.setPreferredWidth(RUNNING_TABLE_COL_PREFERRED_WIDTH); column.setWidth(RUNNING_TABLE_COL_PREFERRED_WIDTH); /* * Set up a column to display the host names of the cluster nodes * processing the jobs. */ column = runningTable.getColumn(JobsTableModelColumns.HOST_NAME.getColumnHeader()); column.setMinWidth(NAME_COL_MIN_WIDTH); column.setMaxWidth(NAME_COL_MAX_WIDTH); column.setPreferredWidth(NAME_COL_PREFERRED_WIDTH); column.setWidth(NAME_COL_PREFERRED_WIDTH); /* * Set up a column to display the ingest activities associated with the * jobs. */ column = runningTable.getColumn(JobsTableModelColumns.STAGE.getColumnHeader()); column.setMinWidth(ACTIVITY_COL_MIN_WIDTH); column.setMaxWidth(ACTIVITY_COL_MAX_WIDTH); column.setPreferredWidth(ACTIVITY_COL_PREFERRED_WIDTH); column.setWidth(ACTIVITY_COL_PREFERRED_WIDTH); /* * Set up a column to display the ingest activity times associated with * the jobs. */ column = runningTable.getColumn(JobsTableModelColumns.STAGE_TIME.getColumnHeader()); column.setCellRenderer(new DurationCellRenderer()); column.setMinWidth(GENERIC_COL_MIN_WIDTH); column.setMaxWidth(ACTIVITY_TIME_COL_MAX_WIDTH); column.setPreferredWidth(ACTIVITY_TIME_COL_MIN_WIDTH); column.setWidth(ACTIVITY_TIME_COL_MIN_WIDTH); /* * Prevent sorting when a column header is clicked. */ runningTable.setAutoCreateRowSorter(false); /* * Create a row selection listener to enable/disable the cancel current * job, cancel current module, and show progress buttons. */ runningTable.getSelectionModel().addListSelectionListener((ListSelectionEvent e) -> { if (e.getValueIsAdjusting()) { return; } updateRunningTableButtonsBasedOnSelectedRow(); }); } private void updateRunningTableButtonsBasedOnSelectedRow() { int row = runningTable.convertRowIndexToModel(runningTable.getSelectedRow()); if (row >= 0 && row < runningTable.getRowCount()) { if ((boolean) runningTable.getModel().getValueAt(row, JobsTableModelColumns.IS_LOCAL_JOB.ordinal())) { enableRunningTableButtons(true); return; } } enableRunningTableButtons(false); } /** * Sets up the JTable that presents a view of the system-wide competed jobs * list. */ private void initCompletedJobsTable() { /* * Remove some of the jobs table model columns from the JTable. This * does not remove the columns from the model, just from this table. */ completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.STARTED_TIME.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.HOST_NAME.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.STAGE.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.STAGE_TIME.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.IS_LOCAL_JOB.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.CASE_DIRECTORY_PATH.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader())); /* * Set up a column to display the cases associated with the jobs. */ TableColumn column; column = completedTable.getColumn(JobsTableModelColumns.CASE.getColumnHeader()); column.setMinWidth(COMPLETED_TIME_COL_MIN_WIDTH); column.setMaxWidth(COMPLETED_TIME_COL_MAX_WIDTH); column.setPreferredWidth(COMPLETED_TIME_COL_PREFERRED_WIDTH); column.setWidth(COMPLETED_TIME_COL_PREFERRED_WIDTH); /* * Set up a column to display the image folders associated with the * jobs. */ column = completedTable.getColumn(JobsTableModelColumns.DATA_SOURCE.getColumnHeader()); column.setMinWidth(COMPLETED_TIME_COL_MIN_WIDTH); column.setMaxWidth(COMPLETED_TIME_COL_MAX_WIDTH); column.setPreferredWidth(COMPLETED_TIME_COL_PREFERRED_WIDTH); column.setWidth(COMPLETED_TIME_COL_PREFERRED_WIDTH); /* * Set up a column to display the create times of the jobs. */ column = completedTable.getColumn(JobsTableModelColumns.CREATED_TIME.getColumnHeader()); column.setCellRenderer(new LongDateCellRenderer()); column.setMinWidth(TIME_COL_MIN_WIDTH); column.setMaxWidth(TIME_COL_MAX_WIDTH); column.setPreferredWidth(TIME_COL_PREFERRED_WIDTH); column.setWidth(TIME_COL_PREFERRED_WIDTH); /* * Set up a column to display the completed times of the jobs. */ column = completedTable.getColumn(JobsTableModelColumns.COMPLETED_TIME.getColumnHeader()); column.setCellRenderer(new LongDateCellRenderer()); column.setMinWidth(TIME_COL_MIN_WIDTH); column.setMaxWidth(TIME_COL_MAX_WIDTH); column.setPreferredWidth(TIME_COL_PREFERRED_WIDTH); column.setWidth(TIME_COL_PREFERRED_WIDTH); /* * Set up a column to display the statuses of the jobs, with a cell * renderer that will choose an icon to represent the job status. */ column = completedTable.getColumn(JobsTableModelColumns.STATUS.getColumnHeader()); column.setCellRenderer(new StatusIconCellRenderer()); column.setMinWidth(STATUS_COL_MIN_WIDTH); column.setMaxWidth(STATUS_COL_MAX_WIDTH); column.setPreferredWidth(STATUS_COL_PREFERRED_WIDTH); column.setWidth(STATUS_COL_PREFERRED_WIDTH); /* * Set up a column to display OCR enabled/disabled flag. */ column = completedTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader()); column.setCellRenderer(new OcrIconCellRenderer()); column.setMaxWidth(OCR_COLUMN_MAX_WIDTH); column.setPreferredWidth(OCR_COLUMN_PREFERRED_WIDTH); column.setWidth(OCR_COLUMN_PREFERRED_WIDTH); /* * Allow sorting when a column header is clicked. */ completedTable.setRowSorter(new AutoIngestRowSorter<>(completedTableModel)); /* * Create a row selection listener to enable/disable the delete case and * show log buttons. */ completedTable.getSelectionModel() .addListSelectionListener((ListSelectionEvent e) -> { if (e.getValueIsAdjusting()) { return; } int row = completedTable.getSelectedRow(); boolean enabled = row >= 0 && row < completedTable.getRowCount(); bnShowCaseLog.setEnabled(enabled); bnReprocessJob.setEnabled(enabled); }); } /** * Sets the initial state of the buttons on the panel. */ private void initButtons() { bnOptions.setEnabled(true); enablePrioritizeButtons(false); enableDeprioritizeButtons(false); bnShowCaseLog.setEnabled(false); bnReprocessJob.setEnabled(false); bnPause.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnStart.text")); bnPause.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnStart.toolTipText")); bnPause.setEnabled(true); //initial label for bnPause is 'Start' and it's enabled for user to start the process bnRefresh.setEnabled(false); //at initial stage, nothing to refresh enableRunningTableButtons(false); tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnStart.startMessage")); } /** * Enables or disables buttons related to the running jobs table. * * @param enable Enable/disable the buttons. */ private void enableRunningTableButtons(Boolean enable) { bnCancelJob.setEnabled(enable); bnCancelModule.setEnabled(enable); bnShowProgress.setEnabled(enable); } /** * Enables or disables prioritize buttons related to the pending jobs table. * * @param enable Enable/disable the buttons. */ private void enablePrioritizeButtons(Boolean enable) { bnPrioritizeCase.setEnabled(enable); bnPrioritizeJob.setEnabled(enable); } /** * Enables or disables deprioritize buttons related to the pending jobs * table. * * @param enable Enable/disable the buttons. */ private void enableDeprioritizeButtons(Boolean enable) { bnDeprioritizeCase.setEnabled(enable); bnDeprioritizeJob.setEnabled(enable); } /** * Starts up the auto ingest manager and adds this panel as an observer, * subscribes to services monitor events and starts a task to populate the * auto ingest job tables. The Refresh and Pause buttons are enabled. */ @Messages({ "AutoIngestControlPanel.AutoIngestStartupError=Failed to start automated ingest. Verify Multi-user Settings.", "AutoIngestControlPanel.AutoIngestStartupFailed.Message=Failed to start automated ingest.\nPlease see auto ingest system log for details.", "AutoIngestControlPanel.AutoIngestStartupFailed.Title=Automated Ingest Error",}) private void startUp() { /* * Starts up the auto ingest manager (AIM). */ try { manager.startUp(); autoIngestStarted = true; } catch (AutoIngestManager.AutoIngestManagerException ex) { sysLogger.log(Level.SEVERE, "Dashboard error starting up auto ingest", ex); tbStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.AutoIngestStartupError")); manager = null; JOptionPane.showMessageDialog(this, NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.AutoIngestStartupFailed.Message"), NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.AutoIngestStartupFailed.Title"), JOptionPane.ERROR_MESSAGE); bnOptions.setEnabled(true); /* * If the AIM cannot be started, there is nothing more to do. */ return; } PropertyChangeListener propChangeListener = (PropertyChangeEvent evt) -> { String serviceDisplayName = ServicesMonitor.Service.valueOf(evt.getPropertyName()).toString(); String status = evt.getNewValue().toString(); if (status.equals(ServicesMonitor.ServiceStatus.UP.toString())) { status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up"); } else if (status.equals(ServicesMonitor.ServiceStatus.DOWN.toString())) { status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down"); sysLogger.log(Level.SEVERE, "Connection to {0} is down", serviceDisplayName); //NON-NLS } // if the status update is for an existing service who's status hasn't changed - do nothing. if (statusByService.containsKey(serviceDisplayName) && status.equals(statusByService.get(serviceDisplayName))) { return; } statusByService.put(serviceDisplayName, status); displayServicesStatus(); }; // Subscribe to all multi-user services in order to display their status Set<String> servicesList = new HashSet<>(); servicesList.add(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()); servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()); servicesList.add(ServicesMonitor.Service.MESSAGING.toString()); ServicesMonitor.getInstance().addSubscriber(servicesList, propChangeListener); /* * Register with the AIM as an observer. */ manager.addObserver(this); /* * Populate the pending, running, and completed auto ingest job tables. */ updateExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(UPDATE_TASKS_THREAD_NAME).build()); updateExecutor.submit(new UpdateAllJobsTablesTask()); manager.scanInputDirsNow(); //bnPause.setEnabled(true); bnPause.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.text")); bnPause.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.toolTipText")); bnRefresh.setEnabled(true); bnOptions.setEnabled(false); tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.running")); } /** * Shuts down auto ingest by shutting down the auto ingest manager and doing * an application exit. */ @Messages({ "AutoIngestControlPanel.OK=OK", "AutoIngestControlPanel.Cancel=Cancel", "AutoIngestControlPanel.ExitConsequences=This will cancel any currently running job on this host. Exiting while a job is running potentially leaves the case in an inconsistent or corrupted state.", "AutoIngestControlPanel.ExitingStatus=Exiting..." }) public void shutdown() { /* * Confirm that the user wants to proceed, letting him or her no that if * there is a currently running job it will be cancelled. TODO (RC): If * a wait cursor is provided, this could perhaps be made conditional on * a running job check again. Or the simple check in isLocalJobRunning * could be used. Was this previously used and I removed it thinking it * was grabbing the monitor? */ Object[] options = { NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.OK"), NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.Cancel")}; int reply = JOptionPane.OK_OPTION; if (null != manager && IngestManager.getInstance().isIngestRunning()) { reply = JOptionPane.showOptionDialog(this, NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.ExitConsequences"), NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.ConfirmExitHeader"), JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[JOptionPane.NO_OPTION]); } if (reply == JOptionPane.OK_OPTION) { /* * Provide user feedback. Call setCursor on this to ensure it * appears (if there is time to see it). */ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.ExitingStatus")); /* * Shut down the table refresh task executor. */ if (null != updateExecutor) { updateExecutor.shutdownNow(); } /* * Stop observing the auto ingest manager (AIM). */ if (null != manager) { manager.deleteObserver(this); } /* * Shut down the AIM and close. */ new SwingWorker<Void, Void>() { @Override protected Void doInBackground() throws Exception { if (null != manager) { manager.shutDown(); } return null; } @Override protected void done() { AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); LifecycleManager.getDefault().exit(); } }.execute(); } } /** * @inheritDoc */ @Messages({ "AutoIngestControlPanel.PauseDueToDatabaseServiceDown=Paused, unable to communicate with case database service.", "AutoIngestControlPanel.PauseDueToKeywordSearchServiceDown=Paused, unable to communicate with keyword search service.", "AutoIngestControlPanel.PauseDueToCoordinationServiceDown=Paused, unable to communicate with coordination service.", "AutoIngestControlPanel.PauseDueToWriteStateFilesFailure=Paused, unable to write to shared images or cases location.", "AutoIngestControlPanel.PauseDueToSharedConfigError=Paused, unable to update shared configuration.", "AutoIngestControlPanel.PauseDueToIngestJobStartFailure=Paused, unable to start ingest job processing.", "AutoIngestControlPanel.PauseDueToFileExporterError=Paused, unable to load File Exporter settings.",}) @Override public void update(Observable o, Object arg) { if (arg instanceof AutoIngestManager.Event) { switch ((AutoIngestManager.Event) arg) { case INPUT_SCAN_COMPLETED: case JOB_STARTED: case JOB_COMPLETED: case CASE_DELETED: case REPROCESS_JOB: case OCR_STATE_CHANGE: updateExecutor.submit(new UpdateAllJobsTablesTask()); break; case PAUSED_BY_USER_REQUEST: EventQueue.invokeLater(() -> { tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.paused")); bnOptions.setEnabled(true); bnRefresh.setEnabled(false); isPaused = true; }); break; case PAUSED_FOR_SYSTEM_ERROR: EventQueue.invokeLater(() -> { tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.PauseDueToSystemError")); bnOptions.setEnabled(true); bnRefresh.setEnabled(false); pause(false); isPaused = true; setServicesStatusMessage(); }); break; case RESUMED: EventQueue.invokeLater(() -> { tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.running")); }); break; case CASE_PRIORITIZED: updateExecutor.submit(new UpdatePendingJobsTableTask()); break; case JOB_STATUS_UPDATED: updateExecutor.submit(new UpdateRunningJobsTablesTask()); break; case SHUTTING_DOWN: LifecycleManager.getDefault().exit(); break; default: break; } } } /** * Requests a pause of auto ingest processing by the auto ingest manager and * handles updates to the components that implement the pause and resume * feature. Note that this feature is needed to get around restrictions on * changing ingest module selections and settings while an ingest job is * running, and that the auto ingest manager will not actually pause until * the current auto ingest job completes. * * @param buttonClicked Is this pause request in response to a user gesture * or a nofification from the auto ingest manager * (AIM)? */ private void pause(boolean buttonClicked) { /** * Gray out the cells in the pending table to give a visual indicator of * the pausing/paused state. */ pendingTable.setBackground(Color.LIGHT_GRAY); pendingTable.setForeground(Color.DARK_GRAY); /** * Change the pause button text and tool tip to make it a resume button. */ bnPause.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnResume.text")); bnPause.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.toolTipTextResume")); if (buttonClicked) { /** * Ask the auto ingest manager to pause when it completes the * currently running job, if any. */ manager.pause(); bnRefresh.setEnabled(false); } } /** * Requests a resume of auto ingest processing by the auto ingest manager * and handles updates to the components that implement the pause and resume * feature. Note that this feature is needed to get around restrictions on * changing ingest module selections and settings while an ingest job is * running, and that the auto ingest manager will not actually pause until * the current auto ingest job completes. */ private void resume() { /** * Change the resume button text and tool tip to make it a pause button. */ bnOptions.setEnabled(false); bnPause.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.text")); bnPause.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.toolTipText")); tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.running")); bnRefresh.setEnabled(true); /** * Remove the graying out of the pending table. */ pendingTable.setBackground(pendingTableBackground); pendingTable.setForeground(pendingTablelForeground); /** * Ask the auto ingest manager to resume processing. */ manager.resume(); } /** * A runnable task that gets the pending auto ingest jobs list from the auto * ingest manager and queues a components refresh task for execution in the * EDT. */ private class UpdatePendingJobsTableTask implements Runnable { /** * @inheritDoc */ @Override public void run() { List<AutoIngestJob> pendingJobs = new ArrayList<>(); manager.getJobs(pendingJobs, null, null); EventQueue.invokeLater(new RefreshComponentsTask(pendingJobs, null, null)); } } /** * A runnable task that gets the running auto ingest jobs list from the auto * ingest manager and queues a components refresh task for execution in the * EDT. */ private class UpdateRunningJobsTablesTask implements Runnable { /** * @inheritDoc */ @Override public void run() { List<AutoIngestJob> runningJobs = new ArrayList<>(); manager.getJobs(null, runningJobs, null); EventQueue.invokeLater(new RefreshComponentsTask(null, runningJobs, null)); } } /** * A runnable task that gets the pending, running and completed auto ingest * jobs lists from the auto ingest manager and queues a components refresh * task for execution in the EDT. Note that this task is frequently used * when only the pending and updated lists definitely need to be updated. * This is because the cost of updating the running jobs list is both very * small and it is beneficial to keep running job status up to date if there * is a running job. */ private class UpdateAllJobsTablesTask implements Runnable { /** * @inheritDoc */ @Override public void run() { List<AutoIngestJob> pendingJobs = new ArrayList<>(); List<AutoIngestJob> runningJobs = new ArrayList<>(); List<AutoIngestJob> completedJobs = new ArrayList<>(); manager.getJobs(pendingJobs, runningJobs, completedJobs); // Sort the completed jobs list by completed date EventQueue.invokeLater(new RefreshComponentsTask(pendingJobs, runningJobs, completedJobs)); } } /** * A runnable task that refreshes the components on this panel to reflect * the current state of one or more auto ingest job lists obtained from the * auto ingest manager. */ private class RefreshComponentsTask implements Runnable { private final List<AutoIngestJob> pendingJobs; private final List<AutoIngestJob> runningJobs; private final List<AutoIngestJob> completedJobs; /** * Constructs a runnable task that refreshes the components on this * panel to reflect the current state of the auto ingest jobs. * * @param pendingJobs A list of pending jobs, may be null if the * pending jobs are unchanged. * @param runningJobs A list of running jobs, may be null if the * running jobs are unchanged. * @param completedJobs A list of completed jobs, may be null if the * completed jobs are unchanged. */ RefreshComponentsTask(List<AutoIngestJob> pendingJobs, List<AutoIngestJob> runningJobs, List<AutoIngestJob> completedJobs) { this.pendingJobs = pendingJobs; this.runningJobs = runningJobs; this.completedJobs = completedJobs; } /** * @inheritDoc */ @Override public void run() { /* * NOTE: There is a problem with our approach of preserving table * row selections - what if the number of rows has changed as result * of calling refreshTable(). Then it is possible for what used to * be (for example) row 1 to now be in some other row or be removed * from the table. This code will re-set the selection back to what * it used to be before calling refreshTable(), i.e. row 1 */ if (null != pendingJobs) { Path currentRow = getSelectedEntry(pendingTable); refreshTable(pendingJobs, (DefaultTableModel) pendingTable.getModel(), null); setSelectedEntry(pendingTable, currentRow); } if (null != runningJobs) { if (!isLocalJobRunning()) { enableRunningTableButtons(false); } else { updateRunningTableButtonsBasedOnSelectedRow(); } Path currentRow = getSelectedEntry(runningTable); refreshTable(runningJobs, (DefaultTableModel) runningTable.getModel(), null); setSelectedEntry(runningTable, currentRow); } if (null != completedJobs) { Path currentRow = getSelectedEntry(completedTable); refreshTable(completedJobs, (DefaultTableModel) completedTable.getModel(), null); setSelectedEntry(completedTable, currentRow); } } /** * Checks whether there is a job that is running on local AIN. * * @return true is local job is found, false otherwise. */ private boolean isLocalJobRunning() { for (AutoIngestJob job : runningJobs) { if (isLocalJob(job)) { return true; } } return false; } /** * Checks whether or not an automated ingest job is local to this node. * * @param job The job. * * @return True or fale. */ private boolean isLocalJob(AutoIngestJob job) { return job.getProcessingHostName().equals(LOCAL_HOST_NAME); } /** * Get a path representing the current selection on the table passed in. * If there is no selection, return null. * * @param table The table to get * @param tableModel The tableModel of the table to get * * @return a path representing the current selection */ Path getSelectedEntry(JTable table) { try { int currentlySelectedRow = table.convertRowIndexToModel(table.getSelectedRow()); if (currentlySelectedRow >= 0 && currentlySelectedRow < table.getRowCount()) { return Paths.get(table.getModel().getValueAt(currentlySelectedRow, JobsTableModelColumns.CASE.ordinal()).toString(), table.getModel().getValueAt(currentlySelectedRow, JobsTableModelColumns.DATA_SOURCE.ordinal()).toString()); } } catch (Exception ignored) { return null; } return null; } /** * Set the selection on the table to the passed-in path's item, if that * item exists in the table. If it does not, clears the table selection. * * @param table The table to set * @param tableModel The tableModel of the table to set * @param path The path of the item to set */ void setSelectedEntry(JTable table, Path path) { if (path != null) { try { for (int row = 0; row < table.getRowCount(); ++row) { Path temp = Paths.get(table.getModel().getValueAt(row, JobsTableModelColumns.CASE.ordinal()).toString(), table.getModel().getValueAt(row, JobsTableModelColumns.DATA_SOURCE.ordinal()).toString()); if (temp.compareTo(path) == 0) { // found it table.setRowSelectionInterval(row, row); return; } } } catch (Exception ignored) { table.clearSelection(); } } table.clearSelection(); } } /** * Reloads the table model for an auto ingest jobs table, refreshing the * JTable that uses the model. * * @param jobs The list of auto ingest jobs. * @param tableModel The table model. * @param comparator An optional comparator (may be null) for sorting the * table model. */ private void refreshTable(List<AutoIngestJob> jobs, DefaultTableModel tableModel, Comparator<AutoIngestJob> comparator) { try { if (comparator != null) { jobs.sort(comparator); } tableModel.setRowCount(0); for (AutoIngestJob job : jobs) { AutoIngestJob.StageDetails status = job.getProcessingStageDetails(); tableModel.addRow(new Object[]{ job.getManifest().getCaseName(), // CASE job.getManifest().getDataSourcePath().getFileName(), // DATA_SOURCE job.getProcessingHostName(), // HOST_NAME job.getManifest().getDateFileCreated(), // CREATED_TIME job.getProcessingStageStartDate(), // STARTED_TIME job.getCompletedDate(), // COMPLETED_TIME status.getDescription(), // ACTIVITY job.getErrorsOccurred() ? StatusIconCellRenderer.Status.WARNING : StatusIconCellRenderer.Status.OK, // STATUS ((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())), // ACTIVITY_TIME job.getCaseDirectoryPath(), // CASE_DIRECTORY_PATH job.getProcessingHostName().equals(LOCAL_HOST_NAME), // IS_LOCAL_JOB job.getManifest().getFilePath(), // MANIFEST_FILE_PATH job.getPriority(), // PRIORITY job.getOcrEnabled()}); // OCR FLAG } } catch (Exception ex) { sysLogger.log(Level.SEVERE, "Dashboard error refreshing table", ex); } } /** * Get the current lists of jobs and update the UI. */ private void refreshTables() { JobsSnapshot jobsSnapshot = manager.getCurrentJobsSnapshot(); refreshTable(jobsSnapshot.getCompletedJobs(), (DefaultTableModel) completedTable.getModel(), null); refreshTable(jobsSnapshot.getPendingJobs(), (DefaultTableModel) pendingTable.getModel(), null); refreshTable(jobsSnapshot.getRunningJobs(), (DefaultTableModel) runningTable.getModel(), null); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { pendingScrollPane = new javax.swing.JScrollPane(); pendingTable = new javax.swing.JTable(); runningScrollPane = new javax.swing.JScrollPane(); runningTable = new javax.swing.JTable(); completedScrollPane = new javax.swing.JScrollPane(); completedTable = new javax.swing.JTable(); bnCancelJob = new javax.swing.JButton(); lbPending = new javax.swing.JLabel(); lbRunning = new javax.swing.JLabel(); lbCompleted = new javax.swing.JLabel(); bnRefresh = new javax.swing.JButton(); bnCancelModule = new javax.swing.JButton(); bnExit = new javax.swing.JButton(); bnOptions = new javax.swing.JButton(); bnShowProgress = new javax.swing.JButton(); bnPause = new javax.swing.JButton(); bnPrioritizeCase = new javax.swing.JButton(); bnShowCaseLog = new javax.swing.JButton(); tbStatusMessage = new javax.swing.JTextField(); lbStatus = new javax.swing.JLabel(); bnPrioritizeJob = new javax.swing.JButton(); lbServicesStatus = new javax.swing.JLabel(); tbServicesStatusMessage = new javax.swing.JTextField(); bnOpenLogDir = new javax.swing.JButton(); bnClusterMetrics = new javax.swing.JButton(); bnReprocessJob = new javax.swing.JButton(); bnDeprioritizeCase = new javax.swing.JButton(); bnDeprioritizeJob = new javax.swing.JButton(); pendingTable.setModel(pendingTableModel); pendingTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.pendingTable.toolTipText")); // NOI18N pendingTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); pendingTable.setRowHeight(20); pendingTable.setSelectionModel(new DefaultListSelectionModel() { private static final long serialVersionUID = 1L; @Override public void setSelectionInterval(int index0, int index1) { if (index0 == pendingTable.getSelectedRow()) { pendingTable.clearSelection(); } else { super.setSelectionInterval(index0, index1); } } }); pendingTable.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION); pendingScrollPane.setViewportView(pendingTable); runningTable.setModel(runningTableModel); runningTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.runningTable.toolTipText")); // NOI18N runningTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); runningTable.setRowHeight(20); runningTable.setSelectionModel(new DefaultListSelectionModel() { private static final long serialVersionUID = 1L; @Override public void setSelectionInterval(int index0, int index1) { if (index0 == runningTable.getSelectedRow()) { runningTable.clearSelection(); } else { super.setSelectionInterval(index0, index1); } } }); runningTable.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION); runningScrollPane.setViewportView(runningTable); completedTable.setModel(completedTableModel); completedTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.completedTable.toolTipText")); // NOI18N completedTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); completedTable.setRowHeight(20); completedTable.setSelectionModel(new DefaultListSelectionModel() { private static final long serialVersionUID = 1L; @Override public void setSelectionInterval(int index0, int index1) { if (index0 == completedTable.getSelectedRow()) { completedTable.clearSelection(); } else { super.setSelectionInterval(index0, index1); } } }); completedTable.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION); completedScrollPane.setViewportView(completedTable); org.openide.awt.Mnemonics.setLocalizedText(bnCancelJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelJob.text")); // NOI18N bnCancelJob.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelJob.toolTipText")); // NOI18N bnCancelJob.setMaximumSize(new java.awt.Dimension(162, 23)); bnCancelJob.setMinimumSize(new java.awt.Dimension(162, 23)); bnCancelJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnCancelJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnCancelJobActionPerformed(evt); } }); lbPending.setFont(lbPending.getFont().deriveFont(lbPending.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbPending, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.lbPending.text")); // NOI18N lbRunning.setFont(lbRunning.getFont().deriveFont(lbRunning.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbRunning, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.lbRunning.text")); // NOI18N lbCompleted.setFont(lbCompleted.getFont().deriveFont(lbCompleted.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbCompleted, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.lbCompleted.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(bnRefresh, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnRefresh.text")); // NOI18N bnRefresh.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnRefresh.toolTipText")); // NOI18N bnRefresh.setMaximumSize(new java.awt.Dimension(162, 23)); bnRefresh.setMinimumSize(new java.awt.Dimension(162, 23)); bnRefresh.setPreferredSize(new java.awt.Dimension(162, 23)); bnRefresh.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnRefreshActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnCancelModule, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelModule.text")); // NOI18N bnCancelModule.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelModule.toolTipText")); // NOI18N bnCancelModule.setMaximumSize(new java.awt.Dimension(162, 23)); bnCancelModule.setMinimumSize(new java.awt.Dimension(162, 23)); bnCancelModule.setPreferredSize(new java.awt.Dimension(162, 23)); bnCancelModule.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnCancelModuleActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnExit, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnExit.text")); // NOI18N bnExit.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnExit.toolTipText")); // NOI18N bnExit.setMaximumSize(new java.awt.Dimension(162, 23)); bnExit.setMinimumSize(new java.awt.Dimension(162, 23)); bnExit.setPreferredSize(new java.awt.Dimension(162, 23)); bnExit.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnExitActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnOptions, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnOptions.text")); // NOI18N bnOptions.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnOptions.toolTipText")); // NOI18N bnOptions.setEnabled(false); bnOptions.setMaximumSize(new java.awt.Dimension(162, 23)); bnOptions.setMinimumSize(new java.awt.Dimension(162, 23)); bnOptions.setPreferredSize(new java.awt.Dimension(162, 23)); bnOptions.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnOptionsActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnShowProgress, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowProgress.text")); // NOI18N bnShowProgress.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowProgress.toolTipText")); // NOI18N bnShowProgress.setMaximumSize(new java.awt.Dimension(162, 23)); bnShowProgress.setMinimumSize(new java.awt.Dimension(162, 23)); bnShowProgress.setPreferredSize(new java.awt.Dimension(162, 23)); bnShowProgress.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnShowProgressActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnPause, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.text")); // NOI18N bnPause.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.toolTipText")); // NOI18N bnPause.setMaximumSize(new java.awt.Dimension(162, 23)); bnPause.setMinimumSize(new java.awt.Dimension(162, 23)); bnPause.setPreferredSize(new java.awt.Dimension(162, 23)); bnPause.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnPauseActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnPrioritizeCase, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeCase.text")); // NOI18N bnPrioritizeCase.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeCase.toolTipText")); // NOI18N bnPrioritizeCase.setMaximumSize(new java.awt.Dimension(162, 23)); bnPrioritizeCase.setMinimumSize(new java.awt.Dimension(162, 23)); bnPrioritizeCase.setPreferredSize(new java.awt.Dimension(162, 23)); bnPrioritizeCase.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnPrioritizeCaseActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnShowCaseLog, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowCaseLog.text")); // NOI18N bnShowCaseLog.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowCaseLog.toolTipText")); // NOI18N bnShowCaseLog.setMaximumSize(new java.awt.Dimension(162, 23)); bnShowCaseLog.setMinimumSize(new java.awt.Dimension(162, 23)); bnShowCaseLog.setPreferredSize(new java.awt.Dimension(162, 23)); bnShowCaseLog.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnShowCaseLogActionPerformed(evt); } }); tbStatusMessage.setEditable(false); tbStatusMessage.setFont(tbStatusMessage.getFont().deriveFont(tbStatusMessage.getFont().getStyle() | java.awt.Font.BOLD, tbStatusMessage.getFont().getSize()+1)); tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbStatusMessage.text")); // NOI18N tbStatusMessage.setBorder(null); lbStatus.setFont(lbStatus.getFont().deriveFont(lbStatus.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbStatus, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.lbStatus.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(bnPrioritizeJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeJob.text")); // NOI18N bnPrioritizeJob.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeJob.toolTipText")); // NOI18N bnPrioritizeJob.setActionCommand(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeJob.actionCommand")); // NOI18N bnPrioritizeJob.setMaximumSize(new java.awt.Dimension(162, 23)); bnPrioritizeJob.setMinimumSize(new java.awt.Dimension(162, 23)); bnPrioritizeJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnPrioritizeJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnPrioritizeJobActionPerformed(evt); } }); lbServicesStatus.setFont(lbServicesStatus.getFont().deriveFont(lbServicesStatus.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbServicesStatus, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.lbServicesStatus.text")); // NOI18N tbServicesStatusMessage.setEditable(false); tbServicesStatusMessage.setFont(tbServicesStatusMessage.getFont().deriveFont(tbServicesStatusMessage.getFont().getStyle() | java.awt.Font.BOLD, tbServicesStatusMessage.getFont().getSize()+1)); tbServicesStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.text")); // NOI18N tbServicesStatusMessage.setBorder(null); org.openide.awt.Mnemonics.setLocalizedText(bnOpenLogDir, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnOpenLogDir.text")); // NOI18N bnOpenLogDir.setMaximumSize(new java.awt.Dimension(162, 23)); bnOpenLogDir.setMinimumSize(new java.awt.Dimension(162, 23)); bnOpenLogDir.setPreferredSize(new java.awt.Dimension(162, 23)); bnOpenLogDir.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnOpenLogDirActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnClusterMetrics, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnClusterMetrics.text")); // NOI18N bnClusterMetrics.setMaximumSize(new java.awt.Dimension(162, 23)); bnClusterMetrics.setMinimumSize(new java.awt.Dimension(162, 23)); bnClusterMetrics.setPreferredSize(new java.awt.Dimension(162, 23)); bnClusterMetrics.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnClusterMetricsActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnReprocessJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnReprocessJob.text")); // NOI18N bnReprocessJob.setMaximumSize(new java.awt.Dimension(162, 23)); bnReprocessJob.setMinimumSize(new java.awt.Dimension(162, 23)); bnReprocessJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnReprocessJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnReprocessJobActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnDeprioritizeCase, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeprioritizeCase.text")); // NOI18N bnDeprioritizeCase.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeprioritizeCase.toolTipText")); // NOI18N bnDeprioritizeCase.setMaximumSize(new java.awt.Dimension(162, 23)); bnDeprioritizeCase.setMinimumSize(new java.awt.Dimension(162, 23)); bnDeprioritizeCase.setPreferredSize(new java.awt.Dimension(162, 23)); bnDeprioritizeCase.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnDeprioritizeCaseActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(bnDeprioritizeJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeprioritizeJob.text")); // NOI18N bnDeprioritizeJob.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeprioritizeJob.toolTipText")); // NOI18N bnDeprioritizeJob.setActionCommand(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeprioritizeJob.actionCommand")); // NOI18N bnDeprioritizeJob.setMaximumSize(new java.awt.Dimension(162, 23)); bnDeprioritizeJob.setMinimumSize(new java.awt.Dimension(162, 23)); bnDeprioritizeJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnDeprioritizeJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnDeprioritizeJobActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(bnPause, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(bnRefresh, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(bnOptions, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(bnOpenLogDir, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(bnClusterMetrics, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(bnExit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addComponent(lbStatus) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(tbStatusMessage, javax.swing.GroupLayout.PREFERRED_SIZE, 861, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(lbCompleted) .addComponent(lbRunning) .addGroup(layout.createSequentialGroup() .addComponent(lbServicesStatus) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(tbServicesStatusMessage, javax.swing.GroupLayout.PREFERRED_SIZE, 861, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGap(0, 0, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(lbPending) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(runningScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 1021, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(completedScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 1021, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(bnCancelJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(bnShowProgress, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(bnCancelModule, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(bnReprocessJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addComponent(bnShowCaseLog, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createSequentialGroup() .addComponent(pendingScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 1021, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(bnPrioritizeCase, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(bnPrioritizeJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(bnDeprioritizeCase, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(bnDeprioritizeJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) ); layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {bnCancelJob, bnCancelModule, bnShowProgress}); layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {bnClusterMetrics, bnExit, bnOpenLogDir, bnOptions, bnPause, bnRefresh}); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(lbStatus, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tbStatusMessage, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(lbServicesStatus, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tbServicesStatusMessage, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbPending, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(pendingScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 215, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addGap(48, 48, 48) .addComponent(bnPrioritizeCase, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bnDeprioritizeCase, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(30, 30, 30) .addComponent(bnPrioritizeJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bnDeprioritizeJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbRunning) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(34, 34, 34) .addComponent(bnShowProgress, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bnCancelJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bnCancelModule, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(runningScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 133, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(68, 68, 68) .addComponent(bnReprocessJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bnShowCaseLog, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbCompleted) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(completedScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 179, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(bnPause, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bnRefresh, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bnOptions, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bnOpenLogDir, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bnClusterMetrics, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bnExit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addContainerGap()) ); layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {bnCancelJob, bnCancelModule, bnClusterMetrics, bnExit, bnOpenLogDir, bnOptions, bnPrioritizeCase, bnPrioritizeJob, bnRefresh, bnShowProgress}); }// </editor-fold>//GEN-END:initComponents /** * Handles a click on the refresh button. Requests an immediate scan of the * input folders for new jobs and queues a refresh of all three of the jobs * tables. * * @param evt - The button click event. */ private void bnRefreshActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnRefreshActionPerformed this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); manager.scanInputDirsAndWait(); refreshTables(); this.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }//GEN-LAST:event_bnRefreshActionPerformed /** * Handles a click on the cancel auto ingest job button. Cancels the * selected job. * * @param evt The button click event. */ private void bnCancelJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnCancelJobActionPerformed Object[] options = { org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.CancelJob"), org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.DoNotCancelJob")}; int reply = JOptionPane.showOptionDialog(this, NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.CancelJobAreYouSure"), NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.ConfirmCancellationHeader"), JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[1]); if (reply == 0) { /* * Call setCursor on this to ensure it appears (if there is time to * see it). */ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); manager.cancelCurrentJob(); refreshTables(); this.setCursor(Cursor.getDefaultCursor()); } }//GEN-LAST:event_bnCancelJobActionPerformed /** * Handles a click on the show auto ingest job progress button. Displays an * ingest job progress panel. * * @param evt The button click event. */ private void bnShowProgressActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnShowProgressActionPerformed IngestProgressSnapshotDialog dialog = new IngestProgressSnapshotDialog(this.getTopLevelAncestor(), true); }//GEN-LAST:event_bnShowProgressActionPerformed /** * Handles a click on the pause/resume auto ingest job button. Sends a * pause/resume request to the auto ingest manager. * * @param evt The button click event. */ private void bnPauseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPauseActionPerformed if (!autoIngestStarted) { //put up a wait cursor during the start up operation this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); startUp(); this.setCursor(null); //done for startup return; } if (!isPaused) { tbStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.pausing")); pause(true); } else { resume(); } isPaused = !isPaused; }//GEN-LAST:event_bnPauseActionPerformed /** * Handles a click on the options button. Displays the options window. * * @param evt The button click event. */ private void bnOptionsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnOptionsActionPerformed setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); OptionsDisplayer.getDefault().open(); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }//GEN-LAST:event_bnOptionsActionPerformed /** * Handles a click on the cancel ingest module button. Cancels the currently * running data source level ingest module for the selected job. * * @param evt The button click event. */ private void bnCancelModuleActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnCancelModuleActionPerformed Object[] options = { org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.CancelModule"), org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.DoNotCancelModule")}; int reply = JOptionPane.showOptionDialog(this, NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.CancelModuleAreYouSure"), NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.ConfirmCancellationHeader"), JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[1]); if (reply == 0) { /* * Call setCursor on this to ensure it appears (if there is time to * see it). */ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); manager.cancelCurrentDataSourceLevelIngestModule(); refreshTables(); this.setCursor(Cursor.getDefaultCursor()); } }//GEN-LAST:event_bnCancelModuleActionPerformed /** * Handles a click on the exit button. Shuts down auto ingest. * * @param evt The button click event. */ private void bnExitActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnExitActionPerformed shutdown(); }//GEN-LAST:event_bnExitActionPerformed /** * Handle a click on the prioritize case button. Requests prioritization of * all of the auto ingest jobs for a case. * * @param evt The button click event. */ @Messages({"AutoIngestControlPanel.errorMessage.casePrioritization=An error occurred when prioritizing the case. Some or all jobs may not have been prioritized."}) private void bnPrioritizeCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeCaseActionPerformed if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); String caseName = (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.CASE.ordinal())).toString(); try { manager.prioritizeCase(caseName); } catch (AutoIngestManager.AutoIngestManagerException ex) { sysLogger.log(Level.SEVERE, "Error prioritizing a case", ex); MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_errorMessage_casePrioritization()); } refreshTables(); pendingTable.clearSelection(); enablePrioritizeButtons(false); enableDeprioritizeButtons(false); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); } }//GEN-LAST:event_bnPrioritizeCaseActionPerformed /** * Handles a click on the show log button. Displays the auto ingest job log * for a case in NotePad. * * @param evt The button click event. */ @Messages({ "AutoIngestControlPanel.ShowLogFailed.Title=Unable to display case log", "AutoIngestControlPanel.ShowLogFailed.Message=Case log file does not exist" }) private void bnShowCaseLogActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnShowCaseLogActionPerformed try { int selectedRow = completedTable.convertRowIndexToModel(completedTable.getSelectedRow()); if (selectedRow != -1) { Path caseDirectoryPath = (Path) completedTable.getModel().getValueAt(selectedRow, JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); if (null != caseDirectoryPath) { Path pathToLog = AutoIngestJobLogger.getLogPath(caseDirectoryPath); if (pathToLog.toFile().exists()) { Desktop.getDesktop().edit(pathToLog.toFile()); } else { JOptionPane.showMessageDialog(this, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.ShowLogFailed.Message"), org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.ShowLogFailed.Title"), JOptionPane.ERROR_MESSAGE); } } else { MessageNotifyUtil.Message.warn("The case directory for this job has been deleted."); } } } catch (IOException ex) { sysLogger.log(Level.SEVERE, "Dashboard error attempting to display case auto ingest log", ex); Object[] options = {org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "DisplayLogDialog.okay")}; JOptionPane.showOptionDialog(this, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "DisplayLogDialog.cannotFindLog"), org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "DisplayLogDialog.unableToShowLogFile"), JOptionPane.DEFAULT_OPTION, JOptionPane.PLAIN_MESSAGE, null, options, options[0]); } }//GEN-LAST:event_bnShowCaseLogActionPerformed @Messages({"AutoIngestControlPanel.errorMessage.jobPrioritization=An error occurred when prioritizing the job."}) private void bnPrioritizeJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeJobActionPerformed if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); Path manifestFilePath = (Path) (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); try { manager.prioritizeJob(manifestFilePath); } catch (AutoIngestManager.AutoIngestManagerException ex) { sysLogger.log(Level.SEVERE, "Error prioritizing a job", ex); MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_errorMessage_jobPrioritization()); } refreshTables(); pendingTable.clearSelection(); enablePrioritizeButtons(false); enableDeprioritizeButtons(false); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); } }//GEN-LAST:event_bnPrioritizeJobActionPerformed private void bnOpenLogDirActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnOpenLogDirActionPerformed Path logDirPath = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "var", "log"); File logDir = logDirPath.toFile(); try { Desktop.getDesktop().open(logDir); } catch (IOException ex) { DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message( String.format("Unable to open log directory %s:\n%s", logDirPath, ex.getLocalizedMessage()), NotifyDescriptor.ERROR_MESSAGE)); } }//GEN-LAST:event_bnOpenLogDirActionPerformed private void bnReprocessJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnReprocessJobActionPerformed if (completedTable.getModel().getRowCount() < 0 || completedTable.getSelectedRow() < 0) { return; } this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); Path manifestPath = (Path) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal()); manager.reprocessJob(manifestPath); refreshTables(); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); }//GEN-LAST:event_bnReprocessJobActionPerformed private void bnClusterMetricsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnClusterMetricsActionPerformed new AutoIngestMetricsDialog(this.getTopLevelAncestor()); }//GEN-LAST:event_bnClusterMetricsActionPerformed @Messages({"AutoIngestControlPanel.errorMessage.caseDeprioritization=An error occurred when deprioritizing the case. Some or all jobs may not have been deprioritized."}) private void bnDeprioritizeCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnDeprioritizeCaseActionPerformed if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); String caseName = (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.CASE.ordinal())).toString(); try { manager.deprioritizeCase(caseName); } catch (AutoIngestManager.AutoIngestManagerException ex) { sysLogger.log(Level.SEVERE, "Error deprioritizing a case", ex); MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_errorMessage_caseDeprioritization()); } refreshTables(); pendingTable.clearSelection(); enablePrioritizeButtons(false); enableDeprioritizeButtons(false); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); } }//GEN-LAST:event_bnDeprioritizeCaseActionPerformed @Messages({"AutoIngestControlPanel.errorMessage.jobDeprioritization=An error occurred when deprioritizing the job."}) private void bnDeprioritizeJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnDeprioritizeJobActionPerformed if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); Path manifestFilePath = (Path) (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); try { manager.deprioritizeJob(manifestFilePath); } catch (AutoIngestManager.AutoIngestManagerException ex) { sysLogger.log(Level.SEVERE, "Error deprioritizing a job", ex); MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_errorMessage_jobDeprioritization()); } refreshTables(); pendingTable.clearSelection(); enablePrioritizeButtons(false); enableDeprioritizeButtons(false); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); } }//GEN-LAST:event_bnDeprioritizeJobActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton bnCancelJob; private javax.swing.JButton bnCancelModule; private javax.swing.JButton bnClusterMetrics; private javax.swing.JButton bnDeprioritizeCase; private javax.swing.JButton bnDeprioritizeJob; private javax.swing.JButton bnExit; private javax.swing.JButton bnOpenLogDir; private javax.swing.JButton bnOptions; private javax.swing.JButton bnPause; private javax.swing.JButton bnPrioritizeCase; private javax.swing.JButton bnPrioritizeJob; private javax.swing.JButton bnRefresh; private javax.swing.JButton bnReprocessJob; private javax.swing.JButton bnShowCaseLog; private javax.swing.JButton bnShowProgress; private javax.swing.JScrollPane completedScrollPane; private javax.swing.JTable completedTable; private javax.swing.JLabel lbCompleted; private javax.swing.JLabel lbPending; private javax.swing.JLabel lbRunning; private javax.swing.JLabel lbServicesStatus; private javax.swing.JLabel lbStatus; private javax.swing.JScrollPane pendingScrollPane; private javax.swing.JTable pendingTable; private javax.swing.JScrollPane runningScrollPane; private javax.swing.JTable runningTable; private javax.swing.JTextField tbServicesStatusMessage; private javax.swing.JTextField tbStatusMessage; // End of variables declaration//GEN-END:variables private class AutoIngestTableModel extends DefaultTableModel { private static final long serialVersionUID = 1L; private AutoIngestTableModel(String[] headers, int i) { super(headers, i); } @Override public boolean isCellEditable(int row, int column) { return false; } @Override public Class<?> getColumnClass(int columnIndex) { if (columnIndex == JobsTableModelColumns.PRIORITY.ordinal()) { return Integer.class; } else if (columnIndex == JobsTableModelColumns.CREATED_TIME.ordinal() || columnIndex == JobsTableModelColumns.COMPLETED_TIME.ordinal() || columnIndex == JobsTableModelColumns.STARTED_TIME.ordinal() || columnIndex == JobsTableModelColumns.STAGE_TIME.ordinal()) { return Date.class; } else if (columnIndex == JobsTableModelColumns.STATUS.ordinal()) { return Boolean.class; } else { return super.getColumnClass(columnIndex); } } } }
apache-2.0
forGGe/kaa
server/node/src/main/java/org/kaaproject/kaa/server/admin/shared/endpoint/EndpointProfileViewDto.java
4134
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.admin.shared.endpoint; import java.io.Serializable; import java.util.List; import org.kaaproject.avro.ui.shared.RecordField; import org.kaaproject.kaa.common.dto.EndpointGroupDto; import org.kaaproject.kaa.common.dto.TopicDto; import org.kaaproject.kaa.common.dto.VersionDto; import org.kaaproject.kaa.common.dto.admin.SdkProfileDto; public class EndpointProfileViewDto implements Serializable { private static final long serialVersionUID = 758374275609746719L; private byte[] endpointKeyHash; private SdkProfileDto sdkProfileDto; private String userId; private String userExternalId; private VersionDto profileSchemaVersion; private String profileSchemaName; private VersionDto serverProfileSchemaVersion; private String serverProfileSchemaName; private RecordField profileRecord; private RecordField serverProfileRecord; private List<EndpointGroupDto> endpointGroups; private List<TopicDto> topics; public EndpointProfileViewDto() { } public byte[] getEndpointKeyHash() { return endpointKeyHash; } public void setEndpointKeyHash(byte[] endpointKeyHash) { this.endpointKeyHash = endpointKeyHash; } public String getUserId() { return userId; } public void setUserId(String userId) { this.userId = userId; } public String getUserExternalId() { return userExternalId; } public void setUserExternalId(String userExternalId) { this.userExternalId = userExternalId; } public String getProfileSchemaName() { return profileSchemaName; } public void setProfileSchemaName(String profileSchemaName) { this.profileSchemaName = profileSchemaName; } public String getServerProfileSchemaName() { return serverProfileSchemaName; } public void setServerProfileSchemaName(String serverProfileSchemaName) { this.serverProfileSchemaName = serverProfileSchemaName; } public VersionDto getProfileSchemaVersion() { return profileSchemaVersion; } public void setProfileSchemaVersion(VersionDto profileSchemaVersion) { this.profileSchemaVersion = profileSchemaVersion; } public VersionDto getServerProfileSchemaVersion() { return serverProfileSchemaVersion; } public void setServerProfileSchemaVersion(VersionDto serverProfileSchemaVersion) { this.serverProfileSchemaVersion = serverProfileSchemaVersion; } public RecordField getProfileRecord() { return profileRecord; } public void setProfileRecord(RecordField profileRecord) { this.profileRecord = profileRecord; } public RecordField getServerProfileRecord() { return serverProfileRecord; } public void setServerProfileRecord(RecordField serverProfileRecord) { this.serverProfileRecord = serverProfileRecord; } public List<EndpointGroupDto> getEndpointGroups() { return endpointGroups; } public void setEndpointGroups(List<EndpointGroupDto> endpointGroups) { this.endpointGroups = endpointGroups; } public List<TopicDto> getTopics() { return topics; } public void setTopics(List<TopicDto> topics) { this.topics = topics; } public SdkProfileDto getSdkProfileDto() { return sdkProfileDto; } public void setSdkProfileDto(SdkProfileDto sdkProfileDto) { this.sdkProfileDto = sdkProfileDto; } }
apache-2.0
pfirmstone/JGDMS
qa/src/org/apache/river/qa/harness/SlaveRequest.java
1311
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.river.qa.harness; import java.io.Serializable; /** * The messages which can be sent to a <code>SlaveTest.</code> */ public interface SlaveRequest extends Serializable { /** * The <code>SlaveTest</code> calls this method after unmarshalling * this request object. * * @param slaveTest a reference to the <code>SlaveTest</code> * @throws Exception if the request fails */ public Object doSlaveRequest(SlaveTest slaveTest) throws Exception; }
apache-2.0
danilomendonca/A3Droid_Test_Greenhouse
alljoyn_java/src/main/java/org/alljoyn/bus/AboutKeys.java
3120
/* * Copyright AllSeen Alliance. All rights reserved. * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package org.alljoyn.bus; /** * About keys as defined by About Feature */ public interface AboutKeys { /** * Required. The globally unique id for the application */ public final static String ABOUT_APP_ID = "AppId"; /** * Required. The default language supported by the device. IETF language * tags specified by RFC 5646. */ public final static String ABOUT_DEFAULT_LANGUAGE = "DefaultLanguage"; /** * Required. Set by platform specific means. */ public final static String ABOUT_DEVICE_NAME = "DeviceName"; /** * Required. Set using the platform specific means */ public final static String ABOUT_DEVICE_ID = "DeviceId"; /** * Required. This field specifies the Application Name. It is assigned by * the App Manufacturer */ public final static String ABOUT_APP_NAME = "AppName"; /** * Required. The manufacturer's name of the application. */ public final static String ABOUT_MANUFACTURER = "Manufacturer"; /** * Required. The application model number. */ public final static String ABOUT_MODEL_NUMBER = "ModelNumber"; /** * Required. This field returns the list of supported languages by the * device. */ public final static String ABOUT_SUPPORTED_LANGUAGES = "SupportedLanguages"; /** * Required. Detailed description */ public final static String ABOUT_DESCRIPTION = "Description"; /** * Optional. The date of manufacture using format YYYY-MM-DD (known as XML * DateTime Format) */ public final static String ABOUT_DATE_OF_MANUFACTURE = "DateOfManufacture"; /** * Required. The software version for the OEM software. */ public final static String ABOUT_SOFTWARE_VERSION = "SoftwareVersion"; /** * Required. The current version of the AllJoyn SDK utilized by the * application. */ public final static String ABOUT_AJ_SOFTWARE_VERSION = "AJSoftwareVersion"; /** * Optional. The device hardware version */ public final static String ABOUT_HARDWARE_VERSION = "HardwareVersion"; /** * Optional. The support URL to be populated by device OEM */ public final static String ABOUT_SUPPORT_URL = "SupportUrl"; }
apache-2.0
massakam/pulsar
bouncy-castle/bc/src/main/java/org/apache/pulsar/bcloader/BouncyCastleLoader.java
1607
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.bcloader; import static org.apache.pulsar.common.util.SecurityUtility.BC; import java.security.Provider; import java.security.Security; import lombok.extern.slf4j.Slf4j; import org.apache.pulsar.common.util.BCLoader; import org.bouncycastle.jce.provider.BouncyCastleProvider; /** * This is a Bouncy Castle provider Loader. */ @Slf4j public class BouncyCastleLoader implements BCLoader { public static Provider provider; static { if (Security.getProvider(BC) == null) { Security.addProvider(new BouncyCastleProvider()); } provider = Security.getProvider(BC); log.info("BouncyCastle Provider BC: {}", provider); } @Override public Provider getProvider() { return Security.getProvider(BC); } }
apache-2.0
MissionCriticalCloud/cosmic
cosmic-core/engine/schema/src/main/java/com/cloud/storage/dao/StoragePoolWorkDao.java
649
package com.cloud.storage.dao; import com.cloud.storage.StoragePoolWorkVO; import com.cloud.utils.db.GenericDao; import java.util.List; /** * Data Access Object for storage_pool table */ public interface StoragePoolWorkDao extends GenericDao<StoragePoolWorkVO, Long> { List<StoragePoolWorkVO> listPendingWorkForPrepareForMaintenanceByPoolId(long poolId); List<StoragePoolWorkVO> listPendingWorkForCancelMaintenanceByPoolId(long poolId); StoragePoolWorkVO findByPoolIdAndVmId(long poolId, long vmId); void removePendingJobsOnMsRestart(long msId, long poolId); List<Long> searchForPoolIdsForPendingWorkJobs(long msId); }
apache-2.0
hortonworks/streamline
cache/src/main/java/com/hortonworks/streamline/cache/Cache.java
1191
/** * Copyright 2017 Hortonworks. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package com.hortonworks.streamline.cache; import com.hortonworks.streamline.cache.stats.CacheStats; import com.hortonworks.streamline.cache.view.config.ExpiryPolicy; import java.util.Collection; import java.util.Map; public interface Cache<K, V> { V get(K key); Map<K, V> getAll(Collection<? extends K> keys); void put(K key, V val); void putAll(Map<? extends K,? extends V> entries); void remove(K key); void removeAll(Collection<? extends K> keys); void clear(); long size(); CacheStats stats(); ExpiryPolicy getExpiryPolicy(); }
apache-2.0
AludraTest/aludratest
src/main/java/org/aludratest/content/separated/webdecs/SeparatedFileReader.java
4110
/* * Copyright (C) 2010-2014 Hamburg Sud and the contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.aludratest.content.separated.webdecs; import java.io.BufferedReader; import java.io.Closeable; import java.io.IOException; import java.io.Reader; import org.aludratest.content.separated.data.SeparatedFileBeanData; import org.aludratest.content.separated.util.SeparatedUtil; import org.aludratest.exception.TechnicalException; import org.databene.commons.IOUtil; import org.databene.formats.DataContainer; import org.databene.formats.csv.CSVToJavaBeanMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Reads data from a separated file. * @author Volker Bergmann * @param <E> Generic parameter to be set by final child classes to the child class itself */ public class SeparatedFileReader<E extends SeparatedFileBeanData> implements Closeable { private static final Logger LOGGER = LoggerFactory.getLogger(SeparatedFileReader.class); /** the reader that provides the separated file's character data. */ private CSVToJavaBeanMapper<E> rowIterator; /** Constructor of the SeparatedFileReader. * @param source the reader that provides the separated file's character data. * @param beanType the type of Java objects to read * @param separator the separator character to use * @throws IOException if opening the file fails */ public SeparatedFileReader(Reader source, Class<E> beanType, char separator) throws IOException { String[] featureNames = SeparatedUtil.featureNames(beanType); this.rowIterator = new CSVToJavaBeanMapper<E>(bufferedReader(source), beanType, separator, null, featureNames); } /** Reads a single text row of the separated file and creates a {@link SeparatedFileBeanData} instance configured by the row * data. * @return a {@link SeparatedFileBeanData} object representing the next row of the source document */ public E readRow() { if (this.rowIterator == null) { throw new TechnicalException("Row iterator has already been closed: " + this); } DataContainer<E> rowData = rowIterator.next(new DataContainer<E>()); if (rowData == null) { return null; // reached end of data } LOGGER.debug("Imported data row: {}", rowData); return rowData.getData(); } protected String[] readRaw() { if (this.rowIterator == null) { throw new TechnicalException("Row iterator has already been closed: " + this); } DataContainer<String[]> rowData = rowIterator.nextRaw(new DataContainer<String[]>()); if (rowData == null) { return null; // return null to signal end of data // NOSONAR } LOGGER.debug("Imported data row: {}", rowData); return rowData.getData(); } /** Closes the reader. */ @Override public void close() throws IOException { if (this.rowIterator != null) { IOUtil.close(this.rowIterator); this.rowIterator = null; } } // private helper ---------------------------------------------------------- /** Wraps a {@link Reader} with a {@link BufferedReader} if it is no instance of BufferedReader. * @return a BufferedReader that provides the content of the Reader provided. */ private static BufferedReader bufferedReader(Reader reader) { if (reader instanceof BufferedReader) { return (BufferedReader) reader; } else { return new BufferedReader(reader); } } }
apache-2.0
pranavlathigara/Carpaccio
carpaccio/src/main/java/com/github/florent37/carpaccio/Carpaccio.java
5178
package com.github.florent37.carpaccio; import android.content.Context; import android.content.res.TypedArray; import android.util.AttributeSet; import android.view.View; import android.widget.FrameLayout; import com.github.florent37.carpaccio.controllers.adapter.CarpaccioRecyclerViewAdapter; import com.github.florent37.carpaccio.controllers.adapter.OnItemClickListener; import com.github.florent37.carpaccio.controllers.adapter.RecyclerViewCallback; import com.github.florent37.carpaccio.mapping.MappingManager; import java.util.ArrayList; import java.util.List; /** * Created by florentchampigny on 21/07/15. */ public class Carpaccio extends FrameLayout { private static final String TAG = "Carpaccio"; protected CarpaccioManager carpaccioManager; public static boolean IN_EDIT_MODE = false; protected boolean onlyForPreview = false; //is a normal FrameLayout on device, only effective on Preview protected void handleAttributes(Context context, AttributeSet attrs) { TypedArray styledAttrs = context.obtainStyledAttributes(attrs, R.styleable.Carpaccio); { String register = styledAttrs.getString(R.styleable.Carpaccio_register); onlyForPreview = styledAttrs.getBoolean(R.styleable.Carpaccio_onlyForPreview, false); if((onlyForPreview && IN_EDIT_MODE) || !IN_EDIT_MODE){ if (register != null && carpaccioManager != null) carpaccioManager.registerControllers(register); } } styledAttrs.recycle(); } public Carpaccio(Context context) { super(context); carpaccioManager = new CarpaccioManager(new MappingManager()); } public Carpaccio(Context context, AttributeSet attrs) { super(context, attrs); carpaccioManager = new CarpaccioManager(new MappingManager()); handleAttributes(context, attrs); } public Carpaccio(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); carpaccioManager = new CarpaccioManager(new MappingManager()); handleAttributes(context, attrs); } @Override protected void onFinishInflate() { super.onFinishInflate(); IN_EDIT_MODE = isInEditMode(); if (onlyForPreview) { if(IN_EDIT_MODE) { execute(); }else{ CarpaccioLogger.d(TAG, "This Carpaccio is only effective on preview"); } }else{ execute(); } } public void execute() { if (carpaccioManager != null) { carpaccioManager.findCarpaccioControlledViews(this); carpaccioManager.executeActionsOnViews(); } } public CarpaccioManager getCarpaccioManager() { return carpaccioManager; } public void mapObject(String name, Object object) { if (carpaccioManager != null) carpaccioManager.mapObject(name, object); } //region mapList public <T> T registerAdapter(String mappedName, Object adapter) { if (carpaccioManager != null) return (T) carpaccioManager.registerAdapter(mappedName, adapter); return null; } public CarpaccioRecyclerViewAdapter getAdapter(String mappedName) { if (carpaccioManager != null) return carpaccioManager.getAdapter(mappedName); return null; } public void onItemClick(String mappedName, OnItemClickListener onItemClickListener) { CarpaccioRecyclerViewAdapter adapter = getAdapter(mappedName); if (adapter != null) { adapter.setOnItemClickListener(onItemClickListener); } } public void setRecyclerViewCallback(String mappedName, RecyclerViewCallback recyclerViewCallback) { CarpaccioRecyclerViewAdapter adapter = getAdapter(mappedName); if (adapter != null) { adapter.setRecyclerViewCallback(recyclerViewCallback); } } public void mapList(String name, List list) { if (carpaccioManager != null) carpaccioManager.mapList(name, list); } public void appendList(String name, List list){ if (carpaccioManager != null) carpaccioManager.appendList(name, list); } public List getMappedList(String mapName) { if (carpaccioManager != null) return carpaccioManager.getMappedList(mapName); return null; } public void addChildViews(View view) { if (carpaccioManager != null) carpaccioManager.addChildViews(view); } public Object bindView(View view, String mapName, int position) { if (carpaccioManager != null) return carpaccioManager.bindView(view, mapName, position); return null; } public void addCarpaccioView(View view) { if (carpaccioManager != null) { //carpaccioManager.addView(view); //TODO List<View> childrens = new ArrayList<>(); carpaccioManager.findCarpaccioControlledViews(view, childrens); carpaccioManager.executeActionsOnViews(childrens, null); } } //endregion }
apache-2.0
howepeng/isis
core/metamodel/src/test/java/org/apache/isis/core/metamodel/facets/object/cssclass/CssClassFacetOnTypeAnnotationFactoryTest.java
2650
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.core.metamodel.facets.object.cssclass; import org.junit.Test; import org.apache.isis.applib.annotation.CssClass; import org.apache.isis.core.metamodel.facetapi.Facet; import org.apache.isis.core.metamodel.facets.AbstractFacetFactoryJUnit4TestCase; import org.apache.isis.core.metamodel.facets.FacetFactory; import org.apache.isis.core.metamodel.facets.members.cssclass.CssClassFacet; import org.apache.isis.core.metamodel.facets.members.cssclass.CssClassFacetAbstract; import org.apache.isis.core.metamodel.facets.object.cssclass.annotation.CssClassFacetOnTypeAnnotationFactory; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; public class CssClassFacetOnTypeAnnotationFactoryTest extends AbstractFacetFactoryJUnit4TestCase { @Test public void testCssClassAnnotationPickedUpOnClass() { final CssClassFacetOnTypeAnnotationFactory facetFactory = new CssClassFacetOnTypeAnnotationFactory(); facetFactory.setSpecificationLoader(mockSpecificationLoaderSpi); @CssClass("testClass") class Customer { } expectNoMethodsRemoved(); facetFactory.process(new FacetFactory.ProcessClassContext(Customer.class, null, mockMethodRemover, facetedMethod)); final Facet facet = facetedMethod.getFacet(CssClassFacet.class); assertThat(facet, is(not(nullValue()))); assertThat(facet, is(instanceOf(CssClassFacetAbstract.class))); final CssClassFacetAbstract cssClassFacetAbstract = (CssClassFacetAbstract) facet; assertThat(cssClassFacetAbstract.cssClass(null), equalTo("testClass")); } }
apache-2.0
Slania/daytrader
javaee6/modules/web/src/main/java/org/apache/geronimo/daytrader/javaee6/core/api/TradeDBServices.java
1381
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.daytrader.javaee6.core.api; /** * TradeDBServices interface specifies the DB specific methods provided by SOME TradeServices instances. * * @see TradeJDBCDirect * @see TradeJEEDirect * */ public interface TradeDBServices { /** * TradeBuildDB needs this abstracted method */ public String checkDBProductName() throws Exception; /** * TradeBuildDB needs this abstracted method */ public boolean recreateDBTables(Object[] sqlBuffer, java.io.PrintWriter out) throws Exception; }
apache-2.0
jbeecham/ovirt-engine
backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/irsbroker/DestroyImageVDSCommand.java
1678
package org.ovirt.engine.core.vdsbroker.irsbroker; import org.ovirt.engine.core.common.asynctasks.AsyncTaskCreationInfo; import org.ovirt.engine.core.common.asynctasks.AsyncTaskType; import org.ovirt.engine.core.common.vdscommands.DestroyImageVDSCommandParameters; import org.ovirt.engine.core.compat.Guid; public class DestroyImageVDSCommand<P extends DestroyImageVDSCommandParameters> extends IrsCreateCommand<P> { public DestroyImageVDSCommand(P parameters) { super(parameters); } @Override protected void ExecuteIrsBrokerCommand() { DestroyImageVDSCommandParameters params = getParameters(); int imageListSize = params.getImageList().size(); String[] volUUID = new String[imageListSize]; int i = 0; for (Guid tempGuid : params.getImageList()) { volUUID[i++] = tempGuid.toString(); } uuidReturn = getIrsProxy().deleteVolume(params.getStorageDomainId().toString(), params.getStoragePoolId().toString(), params.getImageGroupId().toString(), volUUID, String.valueOf(params.getPostZero()), String.valueOf(params.getForce())); ProceedProxyReturnValue(); Guid taskID = new Guid(uuidReturn.mUuid); getVDSReturnValue() .setCreationInfo( new AsyncTaskCreationInfo(taskID, AsyncTaskType.deleteVolume, getParameters() .getStoragePoolId())); } }
apache-2.0
hernsys/uberfire-0.4.0.CR1
uberfire-workbench/uberfire-workbench-panel-managers/uberfire-panel-manager-nswe/src/test/java/org/uberfire/client/mvp/AbstractWorkbenchContextActivityTest.java
1100
package org.uberfire.client.mvp; import java.util.HashSet; import org.junit.Test; import org.uberfire.mvp.Command; import org.uberfire.mvp.PlaceRequest; import org.uberfire.mvp.impl.DefaultPlaceRequest; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.*; public class AbstractWorkbenchContextActivityTest extends BaseWorkbenchTest { @Test public void testAbstractWorkbenchContextActivityLaunch() throws Exception { final PlaceRequest somewhere = new DefaultPlaceRequest( "Somewhere" ); final AbstractWorkbenchContextActivity activity = mock( AbstractWorkbenchContextActivity.class ); HashSet<Activity> activities = new HashSet<Activity>( 1 ) {{ add( activity ); }}; when( activityManager.getActivities( somewhere ) ).thenReturn( activities ); placeManager = new PlaceManagerImplUnitTestWrapper( activity, panelManager ); placeManager.goTo( somewhere ); verify( activity , never()).launch( eq( somewhere ), any(Command.class)); } }
apache-2.0
googlearchive/androidpay-quickstart
app/src/main/java/com/google/android/gms/samples/wallet/Constants.java
2355
/* * Copyright Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.gms.samples.wallet; import com.google.android.gms.wallet.WalletConstants; /** * Constants used by Google Wallet SDK Sample. */ public class Constants { // Environment to use when creating an instance of Wallet.WalletOptions public static final int WALLET_ENVIRONMENT = WalletConstants.ENVIRONMENT_TEST; public static final String MERCHANT_NAME = "Awesome Bike Store"; // Intent extra keys public static final String EXTRA_ITEM_ID = "EXTRA_ITEM_ID"; public static final String EXTRA_MASKED_WALLET = "EXTRA_MASKED_WALLET"; public static final String EXTRA_FULL_WALLET = "EXTRA_FULL_WALLET"; public static final String CURRENCY_CODE_USD = "USD"; // values to use with KEY_DESCRIPTION public static final String DESCRIPTION_LINE_ITEM_SHIPPING = "Shipping"; public static final String DESCRIPTION_LINE_ITEM_TAX = "Tax"; /** * Sample list of items for sale. The list would normally be fetched from * the merchant's servers. */ public static final ItemInfo[] ITEMS_FOR_SALE = { new ItemInfo("Simple Bike", "Features", 300000000, 9990000, CURRENCY_CODE_USD, "seller data 0", R.drawable.bike000), new ItemInfo("Adjustable Bike", "More features", 400000000, 9990000, CURRENCY_CODE_USD, "seller data 1", R.drawable.bike001), new ItemInfo("Conference Bike", "Even more features", 600000000, 9990000, CURRENCY_CODE_USD, "seller data 2", R.drawable.bike002) }; // To change promotion item, change the item here and also corresponding text/image // in fragment_promo_address_lookup.xml layout. public static final int PROMOTION_ITEM = 2; }
apache-2.0
odiszapc/stem
components/client/src/main/java/org/stem/client/ReplicaResponseHandler.java
3058
/* * Copyright 2014 Alexey Plotnik * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.stem.client; import com.google.common.util.concurrent.Uninterruptibles; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class ReplicaResponseHandler { private static final Logger logger = LoggerFactory.getLogger(ReplicaResponseHandler.class); private final ConsistentResponseHandler context; final DefaultResultFuture future; private volatile Throwable cause = null; private final Host endpoint; private Message.Response response; private volatile boolean completed; public ReplicaResponseHandler(ConsistentResponseHandler context, DefaultResultFuture future) { this.context = context; this.future = future; endpoint = context.session.router.getHost(future.request()); // TODO: optimize } public void startWithTimeout(long timeoutMs) { try { this.response = Uninterruptibles.getUninterruptibly(this.future, timeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { this.cause = e.getCause(); logger.error("Error sending request {} to {}, {}", this.future.request(), this.endpoint, this.cause.getMessage()); } catch (TimeoutException e) { this.cause = e; logger.error("Timed out sending request {} to {}, {}", this.future.request(), this.endpoint, e.getMessage()); } finally { this.completed = true; context.onRequestFinished(this); } } public Host getHost() { return endpoint; } public Message.Response getResponse() { return response; } public boolean isSuccess() { return null == cause; } public Throwable getError() { return cause; } public String getErrorMessage() { return cause.getMessage(); } public boolean isCompleted() { return completed; } public static interface SuccessCondition { boolean success(); } public static class ReadSuccessCondition implements SuccessCondition { private ReplicaResponseHandler handler; public ReadSuccessCondition(ReplicaResponseHandler handler) { this.handler = handler; } @Override public boolean success() { return handler.isSuccess(); } } }
apache-2.0
lmenezes/elasticsearch
src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java
2035
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.settings.loader; import org.elasticsearch.common.settings.Settings; import org.junit.Test; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; /** * */ public class JsonSettingsLoaderTests { @Test public void testSimpleJsonSettings() throws Exception { Settings settings = settingsBuilder() .loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.json") .build(); assertThat(settings.get("test1.value1"), equalTo("value1")); assertThat(settings.get("test1.test2.value2"), equalTo("value2")); assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2)); // check array assertThat(settings.get("test1.test3.0"), equalTo("test3-1")); assertThat(settings.get("test1.test3.1"), equalTo("test3-2")); assertThat(settings.getAsArray("test1.test3").length, equalTo(2)); assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1")); assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); } }
apache-2.0
andrewgaul/modernizer-maven-plugin
modernizer-maven-plugin/src/it/issue-28-option-enabled/src/main/java/org/gaul/it/Generated.java
941
/* * Copyright 2014-2021 Andrew Gaul <andrew@gaul.org> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gaul.it; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.CLASS; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; @Documented @Retention(CLASS) @Target(TYPE) public @interface Generated {}
apache-2.0
ysnows/AndroidCommons
library/src/main/java/com/alexvasilkov/android/commons/state/InstanceState.java
310
package com.alexvasilkov.android.commons.state; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target(ElementType.FIELD) @Retention(RetentionPolicy.RUNTIME) public @interface InstanceState { }
apache-2.0
google/binnavi
src/main/java/com/google/security/zynamics/binnavi/Gui/SettingsDialog/CColorSettingsPanel.java
18978
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.binnavi.Gui.SettingsDialog; import com.google.security.zynamics.binnavi.config.ColorsConfigItem; import com.google.security.zynamics.binnavi.config.ConfigManager; import com.google.security.zynamics.binnavi.config.DebugColorsConfigItem; import com.google.security.zynamics.zylib.gui.ColorPanel.ColorPanel; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.GridLayout; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.border.EmptyBorder; import javax.swing.border.TitledBorder; /** * In this panel of the settings dialog, the user can configure all kinds of different color * settings. */ public final class CColorSettingsPanel extends CAbstractSettingsPanel { /** * Width of all color panels. */ private static final int COLORPANEL_WIDTH = 200; /** * Height of all color panels. */ private static final int COLORPANEL_HEIGHT = 25; /** * Color panel used to edit the color of normal functions. */ private final ColorPanel m_normalFunctionColorPanel; /** * Color panel used to edit the color of imported functions. */ private final ColorPanel m_importFunctionColorPanel; /** * Color panel used to edit the color of library functions. */ private final ColorPanel m_libraryFunctionColorPanel; /** * Color panel used to edit the color of thunk functions. */ private final ColorPanel m_thunkFunctionColorPanel; /** * Color panel used to edit the color of adjustor thunk functions. */ private final ColorPanel m_adjustorThunkFunctionColorPanel; /** * Color panel used to edit the color of addresses in graphs. */ private final ColorPanel m_addressColorPanel; /** * Color panel used to edit the color of mnemonics. */ private final ColorPanel m_mnemonicColorPanel; /** * Color panel used to edit the color of literals. */ private final ColorPanel m_literalsColorPanel; /** * Color panel used to edit the color of registers. */ private final ColorPanel m_registersColorPanel; /** * Color panel used to edit the color of function references. */ private final ColorPanel m_functionColorPanel; /** * Color panel used to edit the color of variables. */ private final ColorPanel m_variableColorPanel; /** * Color panel used to edit the color of expression lists. */ private final ColorPanel m_expressionListColorPanel; /** * Color panel used to edit the color of memory references. */ private final ColorPanel m_memoryReferencesColorPanel; /** * Color panel used to edit the color of operators. */ private final ColorPanel m_operatorColorPanel; /** * Color panel used to edit the color of operand separators. */ private final ColorPanel m_operandSeparatorColorPanel; /** * Color panel used to edit the color of prefixes. */ private final ColorPanel m_prefixColorPanel; /** * Color panel used to edit the color of native basic blocks. */ private final ColorPanel m_basicblocksPanel; /** * Color panel used to edit the color of unconditional jump edges. */ private final ColorPanel m_unconditionalJumpsPanel; /** * Color panel used to edit the color of conditional jump edges (taken). */ private final ColorPanel m_conditionalJumpsTakenPanel; /** * Color panel used to edit the color of conditional jump edges (not taken). */ private final ColorPanel m_conditionalJumpsNotTakenPanel; /** * Color panel used to edit the color of inlined jumps. */ private final ColorPanel m_enterInlinedJumpsPanel; /** * Color panel used to edit the color of leaving inlined jumps. */ private final ColorPanel m_leaveInlinedJumpsPanel; /** * Color panel used to edit the color of switch edges. */ private final ColorPanel m_switchPanel; /** * Color panel used to edit the color of text node edges. */ private final ColorPanel m_textJumpsPanel; /** * Color panel used to edit the color of the PC during debugging. */ private final ColorPanel m_activeLineColorPanel; /** * Color panel used to edit the color of active breakpoints. */ private final ColorPanel m_activeBreakpointColorPanel; /** * Color panel used to edit the color of inactive breakpoints. */ private final ColorPanel m_inactiveBreakpointColorPanel; /** * Color panel used to edit the color of enabled breakpoints. */ private final ColorPanel m_enabledBreakpointColorPanel; /** * Color panel used to edit the color of disabled breakpoints. */ private final ColorPanel m_disabledBreakpointColorPanel; /** * Color panel used to edit the color of invalid breakpoints. */ private final ColorPanel m_invalidBreakpointColorPanel; /** * Color panel used to edit the color of deleting breakpoints. */ private final ColorPanel m_deletingBreakpointColorPanel; /** * Color panel used to edit the color of hit breakpoints. */ private final ColorPanel m_hitBreakpointColorPanel; /** * Creates a new color settings panel. */ public CColorSettingsPanel() { super(new BorderLayout()); final JPanel mainPanel = new JPanel(new BorderLayout()); final JPanel innerMainPanel = new JPanel(new GridBagLayout()); final JPanel functionTypeColorPanel = new JPanel(new GridLayout(5, 1, 3, 3)); final ColorsConfigItem colors = ConfigManager.instance().getColorSettings(); m_normalFunctionColorPanel = buildRow(functionTypeColorPanel, "Normal Function" + ":", "Color used to paint normal functions in callgraphs and view tables", new ColorPanel( colors.getNormalFunctionColor(), true, true), false); m_importFunctionColorPanel = buildRow(functionTypeColorPanel, "Imported Function" + ":", "Color used to paint imported functions in callgraphs and view tables", new ColorPanel( colors.getImportedFunctionColor(), true, true), false); m_libraryFunctionColorPanel = buildRow(functionTypeColorPanel, "Library Function" + ":", "Color used to paint library functions in callgraphs and view tables", new ColorPanel( colors.getLibraryFunctionColor(), true, true), false); m_thunkFunctionColorPanel = buildRow(functionTypeColorPanel, "Thunk Function" + ":", "Color used to paint thunk functions in callgraphs and view tables", new ColorPanel( colors.getThunkFunctionColor(), true, true), false); m_adjustorThunkFunctionColorPanel = buildRow(functionTypeColorPanel, "Unknown Function" + ":", "Color used to paint thunk adjustor functions in callgraphs and view tables", new ColorPanel(colors.getAdjustorThunkFunctionColor(), true, true), true); functionTypeColorPanel.setBorder(new TitledBorder("Function Colors")); final JPanel instructionColorPanel = new JPanel(new GridLayout(11, 1, 3, 3)); instructionColorPanel.setBorder(new TitledBorder("Instruction Colors")); m_addressColorPanel = buildRow(instructionColorPanel, "Addresses" + ":", "Color used to paint addresses in graphs", new ColorPanel(colors.getAddressColor(), true, true), false); m_mnemonicColorPanel = buildRow(instructionColorPanel, "Mnemonics" + ":", "Color used to paint mnemonics in graphs", new ColorPanel(colors.getMnemonicColor(), true, true), false); m_literalsColorPanel = buildRow(instructionColorPanel, "Immediates" + ":", "Color used to paint immediate values in graphs", new ColorPanel(colors.getImmediateColor(), true, true), false); m_registersColorPanel = buildRow(instructionColorPanel, "Registers" + ":", "Color used to paint registers in graphs", new ColorPanel(colors.getRegisterColor(), true, true), false); m_functionColorPanel = buildRow(instructionColorPanel, "Functions" + ":", "Color used to paint function references in graphs", new ColorPanel(colors.getFunctionColor(), true, true), false); m_variableColorPanel = buildRow(instructionColorPanel, "Variables" + ":", "Color used to paint variables in graphs", new ColorPanel(colors.getVariableColor(), true, true), false); m_expressionListColorPanel = buildRow(instructionColorPanel, "Expression List" + ":", "Color used to paint expression lists in graphs", new ColorPanel(colors.getExpressionListColor(), true, true), false); m_memoryReferencesColorPanel = buildRow(instructionColorPanel, "Memory References" + ":", "Color used to paint memory references in graphs", new ColorPanel(colors.getMemRefColor(), true, true), false); m_operatorColorPanel = buildRow(instructionColorPanel, "Operators" + ":", "Color used to paint operators in graphs", new ColorPanel(colors.getOperatorColor(), true, true), false); m_operandSeparatorColorPanel = buildRow(instructionColorPanel, "Operand Separators" + ":", "Color used to paint operand separators in graphs", new ColorPanel(colors.getOperandSeparatorColor(), true, true), false); m_prefixColorPanel = buildRow(instructionColorPanel, "Prefixes" + ":", "Color used to paint prefixes in graphs", new ColorPanel(colors.getPrefixColor(), true, true), true); final JPanel graphColorPanel = new JPanel(new GridLayout(8, 1, 3, 3)); graphColorPanel.setBorder(new TitledBorder("Graph Colors")); m_basicblocksPanel = buildRow(graphColorPanel, "Basic Blocks" + ":", "Color used to paint basic blocks in graphs", new ColorPanel(colors.getBasicBlocksColor(), true, true), false); m_unconditionalJumpsPanel = buildRow(graphColorPanel, "Unconditional Jumps" + ":", "Color of unconditional jumps", new ColorPanel(colors.getUnconditionalJumpColor(), true, true), false); m_conditionalJumpsTakenPanel = buildRow(graphColorPanel, "Conditional Jumps (Taken)" + ":", "Color of conditional jumps which are taken", new ColorPanel(colors.getConditionalJumpTrueColor(), true, true), false); m_conditionalJumpsNotTakenPanel = buildRow(graphColorPanel, "Conditional Jumps (Not taken)" + ":", "Color of conditional jumps which are not taken", new ColorPanel(colors.getConditionalJumpFalseColor(), true, true), false); m_enterInlinedJumpsPanel = buildRow(graphColorPanel, "Entering inlined functions" + ":", "Color of edges used to enter inlined functions", new ColorPanel(colors.getEnterInlinedJumpColor(), true, true), false); m_leaveInlinedJumpsPanel = buildRow(graphColorPanel, "Leaving inlined functions" + ":", "Color of edges used to leave inlined functions", new ColorPanel(colors.getLeaveInlinedJumpColor(), true, true), true); m_switchPanel = buildRow(graphColorPanel, "Switches" + ":", "Color of edges that belong to switch statements", new ColorPanel(colors.getSwitchJumpColor(), true, true), true); m_textJumpsPanel = buildRow(graphColorPanel, "Edges to comment nodes" + ":", "Color of edges that connect comment nodes to other nodes", new ColorPanel(colors.getTextEdgeColor(), true, true), true); final JPanel debuggerColorPanel = new JPanel(new GridLayout(8, 1, 3, 3)); final DebugColorsConfigItem debuggerColors = ConfigManager.instance().getDebuggerColorSettings(); m_activeLineColorPanel = buildRow(debuggerColorPanel, "Active Line" + ":", "Color used to show the line at the program counter while debugging.", new ColorPanel( debuggerColors.getActiveLine(), true, true), false); m_activeBreakpointColorPanel = buildRow(debuggerColorPanel, "Active Breakpoint" + ":", "Color used to show active breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointActive(), true, true), false); m_inactiveBreakpointColorPanel = buildRow(debuggerColorPanel, "Inactive Breakpoint" + ":", "Color used to show inactive breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointInactive(), true, true), false); m_enabledBreakpointColorPanel = buildRow(debuggerColorPanel, "Enabled Breakpoint" + ":", "Color used to show enabled breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointEnabled(), true, true), false); m_disabledBreakpointColorPanel = buildRow(debuggerColorPanel, "Disabled Breakpoint" + ":", "Color used to show disabled breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointDisabled(), true, true), false); m_hitBreakpointColorPanel = buildRow(debuggerColorPanel, "Hit Breakpoint" + ":", "Color used to show hit breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointHit(), true, true), false); m_invalidBreakpointColorPanel = buildRow(debuggerColorPanel, "Invalid Breakpoint" + ":", "Color used to show invalid breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointInvalid(), true, true), false); m_deletingBreakpointColorPanel = buildRow(debuggerColorPanel, "Deleting Breakpoint" + ":", "Color used to show deleting breakpoints while debugging.", new ColorPanel(debuggerColors.getBreakpointDeleting(), true, true), true); debuggerColorPanel.setBorder(new TitledBorder("Debugger Colors")); final GridBagConstraints constraints = new GridBagConstraints(); constraints.gridx = 0; constraints.gridy = 0; constraints.anchor = GridBagConstraints.FIRST_LINE_START; constraints.weightx = 1; constraints.fill = GridBagConstraints.HORIZONTAL; innerMainPanel.add(functionTypeColorPanel, constraints); constraints.gridy = 1; innerMainPanel.add(instructionColorPanel, constraints); constraints.gridy = 2; innerMainPanel.add(graphColorPanel, constraints); constraints.gridy = 3; innerMainPanel.add(debuggerColorPanel, constraints); mainPanel.add(innerMainPanel, BorderLayout.NORTH); add(new JScrollPane(mainPanel)); } /** * Builds a single row of components in the panel. * * @param <T> Type of the editing component. * * @param panel Panel the editing component is added to. * @param labelText Text of the label that describes the option. * @param hint Hint shown as a tooltip. * @param component The component to add to the panel. * @param isLast True, if the component is the last component to be added to the panel. * * @return The panel passed to the function. */ private static <T extends Component> T buildRow(final JPanel panel, final String labelText, final String hint, final T component, final boolean isLast) { component.setPreferredSize(new Dimension(COLORPANEL_WIDTH, COLORPANEL_HEIGHT)); final JPanel rowPanel = new JPanel(new BorderLayout()); rowPanel.setBorder(new EmptyBorder(0, 2, isLast ? 2 : 0, 2)); rowPanel.add(new JLabel(labelText), BorderLayout.CENTER); rowPanel.add(CHintCreator.createHintPanel(component, hint), BorderLayout.EAST); panel.add(rowPanel); return component; } @Override protected boolean save() { final ColorsConfigItem colors = ConfigManager.instance().getColorSettings(); colors.setNormalFunctionColor(m_normalFunctionColorPanel.getColor()); colors.setImportedFunctionColor(m_importFunctionColorPanel.getColor()); colors.setLibraryFunctionColor(m_libraryFunctionColorPanel.getColor()); colors.setThunkFunctionColor(m_thunkFunctionColorPanel.getColor()); colors.setAdjustorThunkFunctionColor(m_adjustorThunkFunctionColorPanel.getColor()); colors.setAddressColor(m_addressColorPanel.getColor()); colors.setMnemonicColor(m_mnemonicColorPanel.getColor()); colors.setImmediateColor(m_literalsColorPanel.getColor()); colors.setRegisterColor(m_registersColorPanel.getColor()); colors.setFunctionColor(m_functionColorPanel.getColor()); colors.setVariableColor(m_variableColorPanel.getColor()); colors.setExpressionListColor(m_expressionListColorPanel.getColor()); colors.setMemRefColor(m_memoryReferencesColorPanel.getColor()); colors.setOperatorColor(m_operatorColorPanel.getColor()); colors.setOperandSeperatorColor(m_operandSeparatorColorPanel.getColor()); colors.setPrefixColor(m_prefixColorPanel.getColor()); colors.setAddressColor(m_addressColorPanel.getColor()); colors.setBasicBlocksColor(m_basicblocksPanel.getColor()); colors.setUnconditionalJumpColor(m_unconditionalJumpsPanel.getColor()); colors.setConditionalJumpTrueColor(m_conditionalJumpsTakenPanel.getColor()); colors.setConditionalJumpFalseColor(m_conditionalJumpsNotTakenPanel.getColor()); colors.setEnterInlinedJumpColor(m_enterInlinedJumpsPanel.getColor()); colors.setLeaveInlinedJumpColor(m_leaveInlinedJumpsPanel.getColor()); colors.setSwitchJumpColor(m_switchPanel.getColor()); colors.setTextEdgeColor(m_textJumpsPanel.getColor()); final DebugColorsConfigItem debuggerColors = ConfigManager.instance().getDebuggerColorSettings(); debuggerColors.setActiveLine(m_activeLineColorPanel.getColor()); debuggerColors.setBreakpointActive(m_activeBreakpointColorPanel.getColor()); debuggerColors.setBreakpointInactive(m_inactiveBreakpointColorPanel.getColor()); debuggerColors.setBreakpointEnabled(m_enabledBreakpointColorPanel.getColor()); debuggerColors.setBreakpointDisabled(m_disabledBreakpointColorPanel.getColor()); debuggerColors.setBreakpointInvalid(m_invalidBreakpointColorPanel.getColor()); debuggerColors.setBreakpointDeleting(m_deletingBreakpointColorPanel.getColor()); debuggerColors.setBreakpointHit(m_hitBreakpointColorPanel.getColor()); return false; } }
apache-2.0
tduehr/cas
support/cas-server-support-validation/src/main/java/org/apereo/cas/web/view/Cas30ResponseView.java
4459
package org.apereo.cas.web.view; import org.apereo.cas.CasProtocolConstants; import org.apereo.cas.authentication.AuthenticationServiceSelectionPlan; import org.apereo.cas.authentication.ProtocolAttributeEncoder; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.validation.AuthenticationAttributeReleasePolicy; import org.apereo.cas.validation.CasProtocolAttributesRenderer; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.springframework.web.servlet.View; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.HashMap; import java.util.Map; /** * Renders and prepares CAS3 views. This view is responsible * to simply just prep the base model, and delegates to * a the real view to render the final output. * * @author Misagh Moayyed * @since 4.1.0 */ @Slf4j public class Cas30ResponseView extends Cas20ResponseView { private final CasProtocolAttributesRenderer attributesRenderer; public Cas30ResponseView(final boolean successResponse, final ProtocolAttributeEncoder protocolAttributeEncoder, final ServicesManager servicesManager, final View view, final AuthenticationAttributeReleasePolicy authenticationAttributeReleasePolicy, final AuthenticationServiceSelectionPlan serviceSelectionStrategy, final CasProtocolAttributesRenderer attributesRenderer) { super(successResponse, protocolAttributeEncoder, servicesManager, view, authenticationAttributeReleasePolicy, serviceSelectionStrategy); this.attributesRenderer = attributesRenderer; } @Override protected void prepareMergedOutputModel(final Map<String, Object> model, final HttpServletRequest request, final HttpServletResponse response) throws Exception { super.prepareMergedOutputModel(model, request, response); val service = authenticationRequestServiceSelectionStrategies.resolveService(getServiceFrom(model)); val registeredService = this.servicesManager.findServiceBy(service); val principalAttributes = getCasPrincipalAttributes(model, registeredService); val attributes = new HashMap<String, Object>(principalAttributes); LOGGER.debug("Processed principal attributes from the output model to be [{}]", principalAttributes.keySet()); val protocolAttributes = getCasProtocolAuthenticationAttributes(model, registeredService); attributes.putAll(protocolAttributes); LOGGER.debug("Final collection of attributes for the response are [{}].", attributes.keySet()); putCasResponseAttributesIntoModel(model, attributes, registeredService); } /** * Put cas principal attributes into model. * * @param model the model * @param registeredService the registered service * @return the cas principal attributes */ protected Map<String, Object> getCasPrincipalAttributes(final Map<String, Object> model, final RegisteredService registeredService) { return super.getPrincipalAttributesAsMultiValuedAttributes(model); } /** * Put cas response attributes into model. * * @param model the model * @param attributes the attributes * @param registeredService the registered service */ protected void putCasResponseAttributesIntoModel(final Map<String, Object> model, final Map<String, Object> attributes, final RegisteredService registeredService) { LOGGER.debug("Beginning to encode attributes for the response"); val encodedAttributes = this.protocolAttributeEncoder.encodeAttributes(attributes, registeredService); LOGGER.debug("Encoded attributes for the response are [{}]", encodedAttributes); super.putIntoModel(model, CasProtocolConstants.VALIDATION_CAS_MODEL_ATTRIBUTE_NAME_ATTRIBUTES, encodedAttributes); val formattedAttributes = this.attributesRenderer.render(encodedAttributes); super.putIntoModel(model, CasProtocolConstants.VALIDATION_CAS_MODEL_ATTRIBUTE_NAME_FORMATTED_ATTRIBUTES, formattedAttributes); } }
apache-2.0
sdole/aws-sdk-java
aws-java-sdk-storagegateway/src/main/java/com/amazonaws/services/storagegateway/model/GatewayInfo.java
6480
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.storagegateway.model; import java.io.Serializable; /** * */ public class GatewayInfo implements Serializable, Cloneable { private String gatewayARN; private String gatewayType; private String gatewayOperationalState; private String gatewayName; /** * @param gatewayARN */ public void setGatewayARN(String gatewayARN) { this.gatewayARN = gatewayARN; } /** * @return */ public String getGatewayARN() { return this.gatewayARN; } /** * @param gatewayARN * @return Returns a reference to this object so that method calls can be * chained together. */ public GatewayInfo withGatewayARN(String gatewayARN) { setGatewayARN(gatewayARN); return this; } /** * @param gatewayType */ public void setGatewayType(String gatewayType) { this.gatewayType = gatewayType; } /** * @return */ public String getGatewayType() { return this.gatewayType; } /** * @param gatewayType * @return Returns a reference to this object so that method calls can be * chained together. */ public GatewayInfo withGatewayType(String gatewayType) { setGatewayType(gatewayType); return this; } /** * @param gatewayOperationalState */ public void setGatewayOperationalState(String gatewayOperationalState) { this.gatewayOperationalState = gatewayOperationalState; } /** * @return */ public String getGatewayOperationalState() { return this.gatewayOperationalState; } /** * @param gatewayOperationalState * @return Returns a reference to this object so that method calls can be * chained together. */ public GatewayInfo withGatewayOperationalState( String gatewayOperationalState) { setGatewayOperationalState(gatewayOperationalState); return this; } /** * @param gatewayName */ public void setGatewayName(String gatewayName) { this.gatewayName = gatewayName; } /** * @return */ public String getGatewayName() { return this.gatewayName; } /** * @param gatewayName * @return Returns a reference to this object so that method calls can be * chained together. */ public GatewayInfo withGatewayName(String gatewayName) { setGatewayName(gatewayName); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getGatewayARN() != null) sb.append("GatewayARN: " + getGatewayARN() + ","); if (getGatewayType() != null) sb.append("GatewayType: " + getGatewayType() + ","); if (getGatewayOperationalState() != null) sb.append("GatewayOperationalState: " + getGatewayOperationalState() + ","); if (getGatewayName() != null) sb.append("GatewayName: " + getGatewayName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GatewayInfo == false) return false; GatewayInfo other = (GatewayInfo) obj; if (other.getGatewayARN() == null ^ this.getGatewayARN() == null) return false; if (other.getGatewayARN() != null && other.getGatewayARN().equals(this.getGatewayARN()) == false) return false; if (other.getGatewayType() == null ^ this.getGatewayType() == null) return false; if (other.getGatewayType() != null && other.getGatewayType().equals(this.getGatewayType()) == false) return false; if (other.getGatewayOperationalState() == null ^ this.getGatewayOperationalState() == null) return false; if (other.getGatewayOperationalState() != null && other.getGatewayOperationalState().equals( this.getGatewayOperationalState()) == false) return false; if (other.getGatewayName() == null ^ this.getGatewayName() == null) return false; if (other.getGatewayName() != null && other.getGatewayName().equals(this.getGatewayName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getGatewayARN() == null) ? 0 : getGatewayARN().hashCode()); hashCode = prime * hashCode + ((getGatewayType() == null) ? 0 : getGatewayType().hashCode()); hashCode = prime * hashCode + ((getGatewayOperationalState() == null) ? 0 : getGatewayOperationalState().hashCode()); hashCode = prime * hashCode + ((getGatewayName() == null) ? 0 : getGatewayName().hashCode()); return hashCode; } @Override public GatewayInfo clone() { try { return (GatewayInfo) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
spotify/scio
scio-smb/src/main/java/org/apache/beam/sdk/extensions/smb/TFRecordCodec.java
4295
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.smb; import com.spotify.scio.smb.annotations.PatchedFromBeam; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.HashFunction; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import javax.annotation.Nullable; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState; /** * Codec for TFRecords file format. See * https://www.tensorflow.org/api_guides/python/python_io#TFRecords_Format_Details */ @PatchedFromBeam(origin = "org.apache.beam.sdk.io.TFRecordIO") public class TFRecordCodec { private static final int HEADER_LEN = (Long.SIZE + Integer.SIZE) / Byte.SIZE; private static final int FOOTER_LEN = Integer.SIZE / Byte.SIZE; private static HashFunction crc32c = Hashing.crc32c(); private ByteBuffer header = ByteBuffer.allocate(HEADER_LEN).order(ByteOrder.LITTLE_ENDIAN); private ByteBuffer footer = ByteBuffer.allocate(FOOTER_LEN).order(ByteOrder.LITTLE_ENDIAN); private int mask(int crc) { return ((crc >>> 15) | (crc << 17)) + 0xa282ead8; } private int hashLong(long x) { return mask(crc32c.hashLong(x).asInt()); } private int hashBytes(byte[] x) { return mask(crc32c.hashBytes(x).asInt()); } public int recordLength(byte[] data) { return HEADER_LEN + data.length + FOOTER_LEN; } @Nullable byte[] read(ReadableByteChannel inChannel) throws IOException { header.clear(); int headerBytes = inChannel.read(header); if (headerBytes <= 0) { return null; } checkState(headerBytes == HEADER_LEN, "Not a valid TFRecord. Fewer than 12 bytes."); header.rewind(); long length = header.getLong(); long lengthHash = hashLong(length); int maskedCrc32OfLength = header.getInt(); if (lengthHash != maskedCrc32OfLength) { throw new IOException( String.format( "Mismatch of length mask when reading a record. Expected %d but received %d.", maskedCrc32OfLength, lengthHash)); } ByteBuffer data = ByteBuffer.allocate((int) length); while (data.hasRemaining() && inChannel.read(data) >= 0) {} if (data.hasRemaining()) { throw new IOException( String.format( "EOF while reading record of length %d. Read only %d bytes. Input might be truncated.", length, data.position())); } footer.clear(); inChannel.read(footer); footer.rewind(); int maskedCrc32OfData = footer.getInt(); int dataHash = hashBytes(data.array()); if (dataHash != maskedCrc32OfData) { throw new IOException( String.format( "Mismatch of data mask when reading a record. Expected %d but received %d.", maskedCrc32OfData, dataHash)); } return data.array(); } public void write(WritableByteChannel outChannel, byte[] data) throws IOException { int maskedCrc32OfLength = hashLong(data.length); int maskedCrc32OfData = hashBytes(data); header.clear(); header.putLong(data.length).putInt(maskedCrc32OfLength); header.rewind(); outChannel.write(header); outChannel.write(ByteBuffer.wrap(data)); footer.clear(); footer.putInt(maskedCrc32OfData); footer.rewind(); outChannel.write(footer); } }
apache-2.0
qhanam/Pangor
js/src/ca/ubc/ece/salt/pangor/js/analysis/scope/JavaScriptScope.java
2942
package ca.ubc.ece.salt.pangor.js.analysis.scope; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.mozilla.javascript.ast.AstNode; import org.mozilla.javascript.ast.FunctionNode; import org.mozilla.javascript.ast.ScriptNode; import ca.ubc.ece.salt.gumtree.ast.ClassifiedASTNode; import ca.ubc.ece.salt.pangor.analysis.scope.Scope; public final class JavaScriptScope implements Scope<AstNode> { /** The scope above this scope (i.e., the scope for this function's parent). **/ public JavaScriptScope parent; /** The AST node of the script or function of this scope. **/ public ScriptNode scope; /** The variables declared in the scope. **/ public Map<String, AstNode> variables; /** The globals declared (implicitly if this is not the script scope) in the scope. **/ public Map<String, AstNode> globals; /** The scopes of the child functions. **/ public List<Scope<AstNode>> children; /** Uniquely identifies each function. **/ public String identity; public JavaScriptScope(JavaScriptScope parent, ScriptNode scope, String identity) { this.parent = parent; this.scope = scope; this.variables = new HashMap<String, AstNode>(); this.globals = new HashMap<String, AstNode>(); this.children = new LinkedList<Scope<AstNode>>(); this.identity = identity; } @Override public Scope<AstNode> getParent() { return this.parent; } @Override public AstNode getScope() { return this.scope; } @Override public Map<String, AstNode> getVariables() { return this.variables; } @Override public Map<String, AstNode> getGlobals() { return this.globals; } @Override public List<Scope<AstNode>> getChildren() { return this.children; } @Override public String getIdentity() { return this.identity; } @Override public AstNode getVariableDeclaration(String variable) { if(this.variables.containsKey(variable)) return this.variables.get(variable); if(this.parent == null) return null; return parent.getVariableDeclaration(variable); } @Override public Scope<AstNode> getFunctionScope(ClassifiedASTNode function) { /* Sanity check. */ if(!(function instanceof FunctionNode)) throw new IllegalArgumentException("The AST must be parsed from Apache Rhino."); if(this.scope == function) return this; for(Scope<AstNode> child : this.children) { Scope<AstNode> functionScope = child.getFunctionScope(function); if(functionScope != null) return functionScope; } return null; } @Override public boolean isLocal(String variable) { if(this.parent == null) return false; if(this.variables.containsKey(variable)) return true; return this.parent.isLocal(variable); } @Override public boolean isGlobal(String variable) { if(this.globals.containsKey(variable)) return true; if(this.parent == null) return this.variables.containsKey(variable); return this.parent.isGlobal(variable); } }
apache-2.0
apache/pdfbox
pdfbox/src/main/java/org/apache/pdfbox/pdfwriter/compress/COSWriterCompressionPool.java
12826
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdfwriter.compress; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map.Entry; import org.apache.pdfbox.pdfparser.PDFXRefStream; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSDocument; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSObject; import org.apache.pdfbox.cos.COSObjectKey; import org.apache.pdfbox.cos.COSStream; /** * An instance of this class compresses the contents of a given {@link PDDocument}. * * @author Christian Appl */ public class COSWriterCompressionPool { public static final float MINIMUM_SUPPORTED_VERSION = 1.6f; private final PDDocument document; private final CompressParameters parameters; private final COSObjectPool objectPool; // A list containing all objects, that shall be directly appended to the document's top level container. private final List<COSObjectKey> topLevelObjects = new ArrayList<>(); // A list containing all objects, that may be appended to an object stream. private final List<COSObjectKey> objectStreamObjects = new ArrayList<>(); /** * <p> * Constructs an object that can be used to compress the contents of a given {@link PDDocument}. It provides the * means to: * </p> * <ul> * <li>Compress the COSStructure of the document, by streaming {@link COSBase}s to compressed * {@link COSWriterObjectStream}s</li> * </ul> * * @param document The document, that shall be compressed. * @param parameters The configuration of the compression operations, that shall be applied. * @throws IOException Shall be thrown if a compression operation failed. */ public COSWriterCompressionPool(PDDocument document, CompressParameters parameters) throws IOException { this.document = document; this.parameters = parameters != null ? parameters : new CompressParameters(); objectPool = new COSObjectPool(document.getDocument().getHighestXRefObjectNumber()); // Initialize object pool. COSDocument cosDocument = document.getDocument(); COSDictionary trailer = cosDocument.getTrailer(); addStructure(new TraversedCOSElement(trailer.getItem(COSName.ROOT))); addStructure(new TraversedCOSElement(trailer.getItem(COSName.INFO))); Collections.sort(objectStreamObjects); Collections.sort(topLevelObjects); } /** * Adds the given {@link COSBase} to this pool, using the given {@link COSObjectKey} as it's referencable ID. This * method shall determine an appropriate key, for yet unregistered objects, to register them. Depending on the type * of object, it shall either be appended as-is or shall be appended to a compressed {@link COSWriterObjectStream}. * * @param key The {@link COSObjectKey} that shall be used as the {@link COSBase}s ID, if possible. * @param element The {@link COSBase}, that shall be registered in this pool. */ private COSBase addObjectToPool(COSObjectKey key, TraversedCOSElement element) { // Drop hollow objects. COSBase base = element.getCurrentObject(); base = base instanceof COSObject ? ((COSObject) base).getObject() : base; // to avoid to mixup indirect COSInteger objects holding the same value we have to check // if the given key is the same than the key which is stored for the "same" base object wihtin the object pool // the same is always true for COSFloat, COSBoolean and COSName and under certain circumstances for the remainig // types as well if (base == null // || (key != null && objectPool.contains(key)) // || (key == null && objectPool.contains(base))) { return base; } // Check whether the object can not be appended to an object stream. // An objectStream shall only contain generation 0 objects. // It shall never contain the encryption dictionary. // It shall never contain the document's root dictionary. (relevant for document encryption) // It shall never contain other streams. if ((key != null && key.getGeneration() != 0) || base instanceof COSStream || (document.getEncryption() != null && base == document.getEncryption().getCOSObject()) || base == this.document.getDocument().getTrailer().getCOSDictionary(COSName.ROOT)) { COSObjectKey actualKey = objectPool.put(key, base); if (actualKey == null) { return base; } topLevelObjects.add(actualKey); return base; } // Determine the object key. COSObjectKey actualKey = objectPool.put(key, base); if (actualKey == null) { return base; } // Append it to an object stream. this.objectStreamObjects.add(actualKey); return base; } /** * Attempts to find yet unregistered streams and dictionaries in the given structure. * * @param traversedObject A Collection of all objects, that have already been traversed, to avoid cycles. * @throws IOException Shall be thrown, if compressing the object failed. */ private void addStructure(TraversedCOSElement traversedObject) throws IOException { COSBase current = traversedObject.getCurrentObject(); COSBase base = current; if (current instanceof COSStream || (current instanceof COSDictionary && !current.isDirect())) { base = addObjectToPool(base.getKey(), traversedObject); } else if (current instanceof COSObject) { base = ((COSObject) current).getObject(); if (base != null) { base = addObjectToPool(current.getKey(), traversedObject); } } if (base instanceof COSArray) { addCOSArray(traversedObject, (COSArray) base); } else if (base instanceof COSDictionary) { addCOSDictionary(traversedObject, (COSDictionary) base); } } private void addCOSArray(TraversedCOSElement traversedObject, COSArray array) throws IOException { for (COSBase value : array) { if (value instanceof COSArray || value instanceof COSDictionary) { addStructure(traversedObject.appendTraversedElement(value)); } else if (value instanceof COSObject) { COSObject cosObject = (COSObject) value; if (cosObject.getKey() != null && objectPool.contains(cosObject.getKey())) { continue; } if (cosObject.getObject() != null) { addStructure(traversedObject.appendTraversedElement(value)); } } } } private void addCOSDictionary(TraversedCOSElement traversedObject, COSDictionary dictionary) throws IOException { for (Entry<COSName, COSBase> entry : dictionary.entrySet()) { COSBase value = entry.getValue(); if (value instanceof COSArray || (value instanceof COSDictionary && !traversedObject.getAllTraversedObjects().contains(value))) { addStructure(traversedObject.appendTraversedElement(value)); } else if (value instanceof COSObject) { COSObject cosObject = (COSObject) value; if (cosObject.getKey() != null && objectPool.contains(cosObject.getKey())) { continue; } if (cosObject.getObject() != null) { addStructure(traversedObject.appendTraversedElement(value)); } } } } /** * Returns all {@link COSBase}s, that must be added to the document's top level container. Those objects are not * valid to be added to an object stream. * * @return A list of all top level {@link COSBase}s. */ public List<COSObjectKey> getTopLevelObjects() { return topLevelObjects; } /** * Returns all {@link COSBase}s that can be appended to an object stream. This list is only provided to enable * reflections. Contained objects should indeed be added to a compressed document via an object stream, as can be * created via calling: {@link COSWriterCompressionPool#createObjectStreams()} * * @return A list of all {@link COSBase}s, that can be added to an object stream. */ public List<COSObjectKey> getObjectStreamObjects() { return objectStreamObjects; } /** * Returns true, if the given {@link COSBase} is a registered object of this compression pool. * * @param object The object, that shall be checked. * @return True, if the given {@link COSBase} is a registered object of this compression pool. */ public boolean contains(COSBase object) { return objectPool.contains(object); } /** * Returns the {@link COSObjectKey}, that is registered for the given {@link COSBase} in this compression pool. * * @param object The {@link COSBase} a {@link COSObjectKey} is registered for in this compression pool. * @return The {@link COSObjectKey}, that is registered for the given {@link COSBase} in this compression pool, if * such an object is contained. */ public COSObjectKey getKey(COSBase object) { return objectPool.getKey(object); } /** * Returns the {@link COSBase}, that is registered for the given {@link COSObjectKey} in this compression pool. * * @param key The {@link COSObjectKey} a {@link COSBase} is registered for in this compression pool. * @return The {@link COSBase}, that is registered for the given {@link COSObjectKey} in this compression pool, if * such an object is contained. */ public COSBase getObject(COSObjectKey key) { return objectPool.getObject(key); } /** * Returns the highest object number, that is registered in this compression pool. * * @return The highest object number, that is registered in this compression pool. */ public long getHighestXRefObjectNumber() { return objectPool.getHighestXRefObjectNumber(); } /** * Creates {@link COSWriterObjectStream}s for all currently registered objects of this pool, that have been marked * as fit for being compressed in this manner. Such object streams may be added to a PDF document and shall be * declared in a document's {@link PDFXRefStream} accordingly. The objects contained in such a stream must not be * added to the document separately. * * @return The created {@link COSWriterObjectStream}s for all currently registered compressible objects. */ public List<COSWriterObjectStream> createObjectStreams() { List<COSWriterObjectStream> objectStreams = new ArrayList<>(); COSWriterObjectStream objectStream = null; for (int i = 0; i < objectStreamObjects.size(); i++) { COSObjectKey key = objectStreamObjects.get(i); if (objectStream == null || (i % parameters.getObjectStreamSize()) == 0) { objectStream = new COSWriterObjectStream(this); objectStreams.add(objectStream); } objectStream.prepareStreamObject(key, objectPool.getObject(key)); } return objectStreams; } }
apache-2.0
Haulmont/yarg
core/modules/core/src/com/haulmont/yarg/formatters/impl/doc/connector/OfficeIntegrationAPI.java
931
/* * Copyright 2013 Haulmont * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.haulmont.yarg.formatters.impl.doc.connector; public interface OfficeIntegrationAPI { String getTemporaryDirPath(); Integer getTimeoutInSeconds(); int getCountOfRetry(); Boolean isDisplayDeviceAvailable(); void runTaskWithTimeout(OfficeTask officeTask, int timeoutInSeconds) throws NoFreePortsException; }
apache-2.0
jianjianH/Order
app/src/main/java/jne/com/order/OrderApplication.java
407
package jne.com.order; import android.app.Application; /** * 整个程序定制的Application对象。 * Created by JianHuang * Date: 2015/1/13 * Time: 11:16 */ public class OrderApplication extends Application{ @Override public void onCreate() { super.onCreate(); if (!OrderContext.isInitialized()) { OrderContext.init(getApplicationContext()); } } }
apache-2.0
leveyj/ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/platform/messaging/PlatformMessaging.java
6300
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform.messaging; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteMessaging; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.binary.BinaryRawReaderEx; import org.apache.ignite.internal.binary.BinaryRawWriterEx; import org.apache.ignite.internal.processors.platform.PlatformAbstractTarget; import org.apache.ignite.internal.processors.platform.PlatformContext; import org.apache.ignite.internal.processors.platform.message.PlatformMessageFilter; import org.apache.ignite.internal.processors.platform.utils.PlatformUtils; import org.apache.ignite.internal.util.future.IgniteFutureImpl; import java.util.UUID; /** * Interop messaging. */ public class PlatformMessaging extends PlatformAbstractTarget { /** */ public static final int OP_LOC_LISTEN = 1; /** */ public static final int OP_REMOTE_LISTEN = 2; /** */ public static final int OP_SEND = 3; /** */ public static final int OP_SEND_MULTI = 4; /** */ public static final int OP_SEND_ORDERED = 5; /** */ public static final int OP_STOP_LOC_LISTEN = 6; /** */ public static final int OP_STOP_REMOTE_LISTEN = 7; /** */ public static final int OP_WITH_ASYNC = 8; /** */ public static final int OP_REMOTE_LISTEN_ASYNC = 9; /** */ public static final int OP_STOP_REMOTE_LISTEN_ASYNC = 10; /** */ private final IgniteMessaging messaging; /** */ private final IgniteMessaging messagingAsync; /** * Ctor. * * @param platformCtx Context. * @param messaging Ignite messaging. */ public PlatformMessaging(PlatformContext platformCtx, IgniteMessaging messaging) { super(platformCtx); assert messaging != null; this.messaging = messaging; messagingAsync = messaging.withAsync(); } /** {@inheritDoc} */ @Override protected long processInStreamOutLong(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { switch (type) { case OP_SEND: messaging.send(reader.readObjectDetached(), reader.readObjectDetached()); return TRUE; case OP_SEND_MULTI: messaging.send(reader.readObjectDetached(), PlatformUtils.readCollection(reader)); return TRUE; case OP_SEND_ORDERED: messaging.sendOrdered(reader.readObjectDetached(), reader.readObjectDetached(), reader.readLong()); return TRUE; case OP_LOC_LISTEN: { PlatformMessageLocalFilter filter = new PlatformMessageLocalFilter(reader.readLong(), platformCtx); Object topic = reader.readObjectDetached(); messaging.localListen(topic, filter); return TRUE; } case OP_STOP_LOC_LISTEN: { PlatformMessageLocalFilter filter = new PlatformMessageLocalFilter(reader.readLong(), platformCtx); Object topic = reader.readObjectDetached(); messaging.stopLocalListen(topic, filter); return TRUE; } case OP_STOP_REMOTE_LISTEN: { messaging.stopRemoteListen(reader.readUuid()); return TRUE; } case OP_REMOTE_LISTEN_ASYNC: { startRemoteListen(reader, messagingAsync); return readAndListenFuture(reader); } case OP_STOP_REMOTE_LISTEN_ASYNC: { messagingAsync.stopRemoteListen(reader.readUuid()); return readAndListenFuture(reader); } default: return super.processInStreamOutLong(type, reader); } } /** {@inheritDoc} */ @SuppressWarnings({"IfMayBeConditional", "ConstantConditions", "unchecked"}) @Override protected void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException { switch (type) { case OP_REMOTE_LISTEN:{ writer.writeUuid(startRemoteListen(reader, messaging)); break; } default: super.processInStreamOutStream(type, reader, writer); } } /** * Starts the remote listener. * @param reader Reader. * @return Listen id. */ private UUID startRemoteListen(BinaryRawReaderEx reader, IgniteMessaging messaging) { Object nativeFilter = reader.readObjectDetached(); long ptr = reader.readLong(); // interop pointer Object topic = reader.readObjectDetached(); PlatformMessageFilter filter = platformCtx.createRemoteMessageFilter(nativeFilter, ptr); return messaging.remoteListen(topic, filter); } /** {@inheritDoc} */ @Override protected IgniteInternalFuture currentFuture() throws IgniteCheckedException { return ((IgniteFutureImpl)messagingAsync.future()).internalFuture(); } /** {@inheritDoc} */ @Override protected Object processOutObject(int type) throws IgniteCheckedException { switch (type) { case OP_WITH_ASYNC: if (messaging.isAsync()) return this; return new PlatformMessaging (platformCtx, messaging.withAsync()); } return super.processOutObject(type); } }
apache-2.0
apache/httpcore
httpcore5-h2/src/test/java/org/apache/hc/core5/http2/examples/H2MultiStreamExecutionExample.java
7140
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.http2.examples; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.hc.core5.concurrent.FutureCallback; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpConnection; import org.apache.hc.core5.http.HttpHost; import org.apache.hc.core5.http.HttpResponse; import org.apache.hc.core5.http.Message; import org.apache.hc.core5.http.impl.bootstrap.HttpAsyncRequester; import org.apache.hc.core5.http.nio.AsyncClientEndpoint; import org.apache.hc.core5.http.nio.entity.StringAsyncEntityConsumer; import org.apache.hc.core5.http.nio.support.AsyncRequestBuilder; import org.apache.hc.core5.http.nio.support.BasicResponseConsumer; import org.apache.hc.core5.http2.HttpVersionPolicy; import org.apache.hc.core5.http2.config.H2Config; import org.apache.hc.core5.http2.frame.RawFrame; import org.apache.hc.core5.http2.impl.nio.H2StreamListener; import org.apache.hc.core5.http2.impl.nio.bootstrap.H2RequesterBootstrap; import org.apache.hc.core5.io.CloseMode; import org.apache.hc.core5.reactor.IOReactorConfig; import org.apache.hc.core5.util.Timeout; /** * Example of HTTP/2 concurrent request execution using multiple streams. */ public class H2MultiStreamExecutionExample { public static void main(final String[] args) throws Exception { // Create and start requester final IOReactorConfig ioReactorConfig = IOReactorConfig.custom() .setSoTimeout(5, TimeUnit.SECONDS) .build(); final H2Config h2Config = H2Config.custom() .setPushEnabled(false) .setMaxConcurrentStreams(100) .build(); final HttpAsyncRequester requester = H2RequesterBootstrap.bootstrap() .setIOReactorConfig(ioReactorConfig) .setVersionPolicy(HttpVersionPolicy.FORCE_HTTP_2) .setH2Config(h2Config) .setStreamListener(new H2StreamListener() { @Override public void onHeaderInput(final HttpConnection connection, final int streamId, final List<? extends Header> headers) { for (int i = 0; i < headers.size(); i++) { System.out.println(connection.getRemoteAddress() + " (" + streamId + ") << " + headers.get(i)); } } @Override public void onHeaderOutput(final HttpConnection connection, final int streamId, final List<? extends Header> headers) { for (int i = 0; i < headers.size(); i++) { System.out.println(connection.getRemoteAddress() + " (" + streamId + ") >> " + headers.get(i)); } } @Override public void onFrameInput(final HttpConnection connection, final int streamId, final RawFrame frame) { } @Override public void onFrameOutput(final HttpConnection connection, final int streamId, final RawFrame frame) { } @Override public void onInputFlowControl(final HttpConnection connection, final int streamId, final int delta, final int actualSize) { } @Override public void onOutputFlowControl(final HttpConnection connection, final int streamId, final int delta, final int actualSize) { } }) .create(); Runtime.getRuntime().addShutdownHook(new Thread(() -> { System.out.println("HTTP requester shutting down"); requester.close(CloseMode.GRACEFUL); })); requester.start(); final HttpHost target = new HttpHost("nghttp2.org"); final String[] requestUris = new String[] {"/httpbin/ip", "/httpbin/user-agent", "/httpbin/headers"}; final Future<AsyncClientEndpoint> future = requester.connect(target, Timeout.ofSeconds(5)); final AsyncClientEndpoint clientEndpoint = future.get(); final CountDownLatch latch = new CountDownLatch(requestUris.length); for (final String requestUri: requestUris) { clientEndpoint.execute( AsyncRequestBuilder.get() .setHttpHost(target) .setPath(requestUri) .build(), new BasicResponseConsumer<>(new StringAsyncEntityConsumer()), new FutureCallback<Message<HttpResponse, String>>() { @Override public void completed(final Message<HttpResponse, String> message) { latch.countDown(); final HttpResponse response = message.getHead(); final String body = message.getBody(); System.out.println(requestUri + "->" + response.getCode()); System.out.println(body); } @Override public void failed(final Exception ex) { latch.countDown(); System.out.println(requestUri + "->" + ex); } @Override public void cancelled() { latch.countDown(); System.out.println(requestUri + " cancelled"); } }); } latch.await(); // Manually release client endpoint when done !!! clientEndpoint.releaseAndDiscard(); System.out.println("Shutting down I/O reactor"); requester.initiateShutdown(); } }
apache-2.0
mdunker/usergrid
stack/core/src/main/java/org/apache/usergrid/corepersistence/pipeline/read/traverse/IdFilter.java
1814
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.usergrid.corepersistence.pipeline.read.traverse; import org.apache.usergrid.corepersistence.pipeline.read.AbstractFilter; import org.apache.usergrid.corepersistence.pipeline.read.FilterResult; import org.apache.usergrid.persistence.model.entity.Id; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import rx.Observable; import java.util.List; /** * This command is a stopgap to make migrating 1.0 code easier. Once full traversal has been implemented, this should * be removed */ public class IdFilter extends AbstractFilter<FilterResult<Id>, FilterResult<Id>>{ @Inject public IdFilter() {}; @Override public Observable<FilterResult<Id>> call( final Observable<FilterResult<Id>> filterValueObservable ) { //ignore what our input was, and simply emit the id specified return filterValueObservable.map( idFilterResult -> new FilterResult( idFilterResult.getValue(), idFilterResult.getPath() )); } }
apache-2.0
rozza/mongo-java-driver
bson/src/main/org/bson/codecs/pojo/ConventionSetPrivateFieldImpl.java
3549
/* * Copyright 2008-present MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bson.codecs.pojo; import org.bson.codecs.configuration.CodecConfigurationException; import static java.lang.String.format; import static java.lang.reflect.Modifier.isPrivate; final class ConventionSetPrivateFieldImpl implements Convention { @Override public void apply(final ClassModelBuilder<?> classModelBuilder) { for (PropertyModelBuilder<?> propertyModelBuilder : classModelBuilder.getPropertyModelBuilders()) { if (!(propertyModelBuilder.getPropertyAccessor() instanceof PropertyAccessorImpl)) { throw new CodecConfigurationException(format("The SET_PRIVATE_FIELDS_CONVENTION is not compatible with " + "propertyModelBuilder instance that have custom implementations of org.bson.codecs.pojo.PropertyAccessor: %s", propertyModelBuilder.getPropertyAccessor().getClass().getName())); } PropertyAccessorImpl<?> defaultAccessor = (PropertyAccessorImpl<?>) propertyModelBuilder.getPropertyAccessor(); PropertyMetadata<?> propertyMetaData = defaultAccessor.getPropertyMetadata(); if (!propertyMetaData.isDeserializable() && propertyMetaData.getField() != null && isPrivate(propertyMetaData.getField().getModifiers())) { setPropertyAccessor(propertyModelBuilder); } } } @SuppressWarnings("unchecked") private <T> void setPropertyAccessor(final PropertyModelBuilder<T> propertyModelBuilder) { propertyModelBuilder.propertyAccessor(new PrivatePropertyAccessor<T>( (PropertyAccessorImpl<T>) propertyModelBuilder.getPropertyAccessor())); } private static final class PrivatePropertyAccessor<T> implements PropertyAccessor<T> { private final PropertyAccessorImpl<T> wrapped; private PrivatePropertyAccessor(final PropertyAccessorImpl<T> wrapped) { this.wrapped = wrapped; try { wrapped.getPropertyMetadata().getField().setAccessible(true); } catch (Exception e) { throw new CodecConfigurationException(format("Unable to make private field accessible '%s' in %s", wrapped.getPropertyMetadata().getName(), wrapped.getPropertyMetadata().getDeclaringClassName()), e); } } @Override public <S> T get(final S instance) { return wrapped.get(instance); } @Override public <S> void set(final S instance, final T value) { try { wrapped.getPropertyMetadata().getField().set(instance, value); } catch (Exception e) { throw new CodecConfigurationException(format("Unable to set value for property '%s' in %s", wrapped.getPropertyMetadata().getName(), wrapped.getPropertyMetadata().getDeclaringClassName()), e); } } } }
apache-2.0
bredy/ChromeRestClient
RestClient/src/org/rest/client/dom/worker/WorkerImpl.java
1156
package org.rest.client.dom.worker; import com.google.gwt.core.client.JavaScriptObject; class WorkerImpl extends JavaScriptObject { protected WorkerImpl() {} static final native WorkerImpl get(String script) /*-{ var worker = new Worker(script); return worker; }-*/; /** * Starts the worker. * @param message Message sent to worker. */ final native void postMessage(String message) /*-{ this.postMessage(message); }-*/; final native void postMessage(JavaScriptObject message) /*-{ this.postMessage(message); }-*/; final native void onMessage(WorkerMessageHandler handler) /*-{ this.addEventListener('message', $entry(function(e) { $wnd._lastWorker = e.data; handler.@org.rest.client.dom.worker.WorkerMessageHandler::onMessage(Ljava/lang/String;)(e.data); }), false); this.addEventListener('error', $entry(function(e){ $wnd._lastWorkerError = ['ERROR: Line ', e.lineno, ' in ', e.filename, ': ', e.message].join(''); handler.@org.rest.client.dom.worker.WorkerMessageHandler::onError(Lorg/rest/client/dom/worker/WebWorkerError;)(e); }), false); }-*/; }
apache-2.0
emmanuelsantana/jquati
jquati-1.0/src/net/sf/jquati/adviceinspector/AdviceInspectorElement.java
323
package net.sf.jquati.adviceinspector; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface AdviceInspectorElement { }
apache-2.0
maxml/sample-apps
common/src/main/java/org/kaaproject/kaa/examples/common/KaaDemoBuilder.java
978
/** * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.examples.common; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.ElementType; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface KaaDemoBuilder { Class<?>[] dependsOnBuilders() default {}; }
apache-2.0
Jackygq1982/hbase_src
hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
33868
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.security.access; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.TestTableName; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestCellACLWithMultipleVersions extends SecureTestUtil { private static final Log LOG = LogFactory.getLog(TestCellACLWithMultipleVersions.class); static { Logger.getLogger(AccessController.class).setLevel(Level.TRACE); Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); } @Rule public TestTableName TEST_TABLE = new TestTableName(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] TEST_FAMILY1 = Bytes.toBytes("f1"); private static final byte[] TEST_FAMILY2 = Bytes.toBytes("f2"); private static final byte[] TEST_ROW = Bytes.toBytes("cellpermtest"); private static final byte[] TEST_Q1 = Bytes.toBytes("q1"); private static final byte[] TEST_Q2 = Bytes.toBytes("q2"); private static final byte[] ZERO = Bytes.toBytes(0L); private static final byte[] ONE = Bytes.toBytes(1L); private static final byte[] TWO = Bytes.toBytes(2L); private static Configuration conf; private static User USER_OWNER; private static User USER_OTHER; private static User USER_OTHER2; @BeforeClass public static void setupBeforeClass() throws Exception { // setup configuration conf = TEST_UTIL.getConfiguration(); // Enable security enableSecurity(conf); // Verify enableSecurity sets up what we require verifyConfiguration(conf); // We expect 0.98 cell ACL semantics conf.setBoolean(AccessControlConstants.CF_ATTRIBUTE_EARLY_OUT, false); TEST_UTIL.startMiniCluster(); MasterCoprocessorHost cpHost = TEST_UTIL.getMiniHBaseCluster() .getMaster().getCoprocessorHost(); cpHost.load(AccessController.class, Coprocessor.PRIORITY_HIGHEST, conf); AccessController ac = (AccessController) cpHost.findCoprocessor(AccessController.class.getName()); cpHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf); RegionServerCoprocessorHost rsHost = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0) .getCoprocessorHost(); rsHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf); // Wait for the ACL table to become available TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName()); // create a set of test users USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]); USER_OTHER = User.createUserForTesting(conf, "other", new String[0]); USER_OTHER2 = User.createUserForTesting(conf, "other2", new String[0]); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Before public void setUp() throws Exception { // Create the test table (owner added to the _acl_ table) HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName()); HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY1); hcd.setMaxVersions(4); htd.setOwner(USER_OWNER); htd.addFamily(hcd); hcd = new HColumnDescriptor(TEST_FAMILY2); hcd.setMaxVersions(4); htd.setOwner(USER_OWNER); htd.addFamily(hcd); admin.createTable(htd, new byte[][] { Bytes.toBytes("s") }); TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName()); } @Test public void testCellPermissionwithVersions() throws Exception { // store two sets of values, one store with a cell level ACL, and one // without verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; // with ro ACL p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.WRITE)); t.put(p); // with ro ACL p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.READ)); t.put(p); p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.WRITE)); t.put(p); p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.READ)); t.put(p); p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.WRITE)); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); /* ---- Gets ---- */ AccessTestAction getQ1 = new AccessTestAction() { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW); get.setMaxVersions(10); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { t.close(); } } }; AccessTestAction get2 = new AccessTestAction() { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW); get.setMaxVersions(10); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { t.close(); } } }; // Confirm special read access set at cell level verifyAllowed(USER_OTHER, getQ1, 2); // store two sets of values, one store with a cell level ACL, and one // without verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.WRITE)); t.put(p); p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.READ)); t.put(p); p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.WRITE)); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); // Confirm special read access set at cell level verifyAllowed(USER_OTHER, get2, 1); } @Test public void testCellPermissionsWithDeleteMutipleVersions() throws Exception { // table/column/qualifier level permissions final byte[] TEST_ROW1 = Bytes.toBytes("r1"); final byte[] TEST_ROW2 = Bytes.toBytes("r2"); final byte[] TEST_Q1 = Bytes.toBytes("q1"); final byte[] TEST_Q2 = Bytes.toBytes("q2"); final byte[] ZERO = Bytes.toBytes(0L); // additional test user final User user1 = User.createUserForTesting(conf, "user1", new String[0]); final User user2 = User.createUserForTesting(conf, "user2", new String[0]); verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { // with rw ACL for "user1" Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, ZERO); p.add(TEST_FAMILY1, TEST_Q2, ZERO); p.setACL(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); t.put(p); // with rw ACL for "user1" p = new Put(TEST_ROW2); p.add(TEST_FAMILY1, TEST_Q1, ZERO); p.add(TEST_FAMILY1, TEST_Q2, ZERO); p.setACL(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { // with rw ACL for "user1" and "user2" Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, ZERO); p.add(TEST_FAMILY1, TEST_Q2, ZERO); Map<String, Permission> perms = new HashMap<String, Permission>(); perms.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); perms.put(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); p.setACL(perms); t.put(p); // with rw ACL for "user1" and "user2" p = new Put(TEST_ROW2); p.add(TEST_FAMILY1, TEST_Q1, ZERO); p.add(TEST_FAMILY1, TEST_Q2, ZERO); p.setACL(perms); t.put(p); } finally { t.close(); } return null; } }, user1); // user1 should be allowed to delete TEST_ROW1 as he is having write permission on both // versions of the cells user1.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumns(TEST_FAMILY1, TEST_Q1); d.deleteColumns(TEST_FAMILY1, TEST_Q2); t.delete(d); } finally { t.close(); } return null; } }); // user2 should not be allowed to delete TEST_ROW2 as he is having write permission only on one // version of the cells. user2.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW2); d.deleteColumns(TEST_FAMILY1, TEST_Q1); d.deleteColumns(TEST_FAMILY1, TEST_Q2); t.delete(d); fail("user2 should not be allowed to delete the row"); } catch (Exception e) { } finally { t.close(); } return null; } }); // user1 should be allowed to delete the cf. (All data under cf for a row) user1.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW2); d.deleteFamily(TEST_FAMILY1); t.delete(d); } finally { t.close(); } return null; } }); } @Test public void testDeleteWithFutureTimestamp() throws Exception { // Store two values, one in the future verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { // Store read only ACL at a future time Put p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, EnvironmentEdgeManager.currentTimeMillis() + 1000000, ZERO); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.READ)); t.put(p); // Store a read write ACL without a timestamp, server will use current time p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q2, ONE); p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); // Confirm stores are visible AccessTestAction getQ1 = new AccessTestAction() { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { t.close(); } } }; AccessTestAction getQ2 = new AccessTestAction() { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q2); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { t.close(); } } }; verifyAllowed(getQ1, USER_OWNER, USER_OTHER); verifyAllowed(getQ2, USER_OWNER, USER_OTHER); // Issue a DELETE for the family, should succeed because the future ACL is // not considered AccessTestAction deleteFamily = new AccessTestAction() { @Override public Object run() throws Exception { Delete delete = new Delete(TEST_ROW).deleteFamily(TEST_FAMILY1); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { t.delete(delete); } finally { t.close(); } return null; } }; verifyAllowed(deleteFamily, USER_OTHER); // The future put should still exist verifyAllowed(getQ1, USER_OWNER, USER_OTHER); // The other put should be covered by the tombstone verifyDenied(getQ2, USER_OTHER); } @Test public void testCellPermissionsWithDeleteWithUserTs() throws Exception { USER_OWNER.runAs(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { // This version (TS = 123) with rw ACL for USER_OTHER and USER_OTHER2 Put p = new Put(TEST_ROW); p.add(TEST_FAMILY1, TEST_Q1, 123L, ZERO); p.add(TEST_FAMILY1, TEST_Q2, 123L, ZERO); Map<String, Permission> perms = new HashMap<String, Permission>(); perms.put(USER_OTHER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); perms.put(USER_OTHER2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); p.setACL(perms); t.put(p); // This version (TS = 125) with rw ACL for USER_OTHER p = new Put(TEST_ROW); p.add(TEST_FAMILY1, TEST_Q1, 125L, ONE); p.add(TEST_FAMILY1, TEST_Q2, 125L, ONE); perms = new HashMap<String, Permission>(); perms.put(USER_OTHER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); p.setACL(perms); t.put(p); // This version (TS = 127) with rw ACL for USER_OTHER p = new Put(TEST_ROW); p.add(TEST_FAMILY1, TEST_Q1, 127L, TWO); p.add(TEST_FAMILY1, TEST_Q2, 127L, TWO); perms = new HashMap<String, Permission>(); perms.put(USER_OTHER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); p.setACL(perms); t.put(p); return null; } finally { t.close(); } } }); // USER_OTHER2 should be allowed to delete the column f1:q1 versions older than TS 124L USER_OTHER2.runAs(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW, 124L); d.deleteColumns(TEST_FAMILY1, TEST_Q1); t.delete(d); } finally { t.close(); } return null; } }); // USER_OTHER2 should be allowed to delete the column f1:q2 versions older than TS 124L USER_OTHER2.runAs(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW); d.deleteColumns(TEST_FAMILY1, TEST_Q2, 124L); t.delete(d); } finally { t.close(); } return null; } }); } @Test public void testCellPermissionsWithDeleteExactVersion() throws Exception { final byte[] TEST_ROW1 = Bytes.toBytes("r1"); final byte[] TEST_Q1 = Bytes.toBytes("q1"); final byte[] TEST_Q2 = Bytes.toBytes("q2"); final byte[] ZERO = Bytes.toBytes(0L); final User user1 = User.createUserForTesting(conf, "user1", new String[0]); final User user2 = User.createUserForTesting(conf, "user2", new String[0]); verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Map<String, Permission> permsU1andOwner = new HashMap<String, Permission>(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Map<String, Permission> permsU2andOwner = new HashMap<String, Permission>(); permsU2andOwner.put(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU2andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY2, TEST_Q1, 123, ZERO); p.add(TEST_FAMILY2, TEST_Q2, 123, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY2, TEST_Q1, 125, ZERO); p.add(TEST_FAMILY2, TEST_Q2, 125, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY2, TEST_Q1, 129, ZERO); p.add(TEST_FAMILY2, TEST_Q2, 129, ZERO); p.setACL(permsU1andOwner); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); // user1 should be allowed to delete TEST_ROW1 as he is having write permission on both // versions of the cells user1.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumn(TEST_FAMILY1, TEST_Q1, 123); d.deleteColumn(TEST_FAMILY1, TEST_Q2); d.deleteFamilyVersion(TEST_FAMILY2, 125); t.delete(d); } finally { t.close(); } return null; } }); user2.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1, 127); d.deleteColumns(TEST_FAMILY1, TEST_Q1); d.deleteColumns(TEST_FAMILY1, TEST_Q2); d.deleteFamily(TEST_FAMILY2, 129); t.delete(d); fail("user2 can not do the delete"); } catch (Exception e) { } finally { t.close(); } return null; } }); } @Test public void testCellPermissionsForIncrementWithMultipleVersions() throws Exception { final byte[] TEST_ROW1 = Bytes.toBytes("r1"); final byte[] TEST_Q1 = Bytes.toBytes("q1"); final byte[] TEST_Q2 = Bytes.toBytes("q2"); final byte[] ZERO = Bytes.toBytes(0L); final User user1 = User.createUserForTesting(conf, "user1", new String[0]); final User user2 = User.createUserForTesting(conf, "user2", new String[0]); verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Map<String, Permission> permsU1andOwner = new HashMap<String, Permission>(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Map<String, Permission> permsU2andOwner = new HashMap<String, Permission>(); permsU2andOwner.put(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU2andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(permsU1andOwner); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); // Increment considers the TimeRange set on it. user1.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Increment inc = new Increment(TEST_ROW1); inc.setTimeRange(0, 123); inc.addColumn(TEST_FAMILY1, TEST_Q1, 2L); t.increment(inc); t.incrementColumnValue(TEST_ROW1, TEST_FAMILY1, TEST_Q2, 1L); } finally { t.close(); } return null; } }); user2.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Increment inc = new Increment(TEST_ROW1); inc.setTimeRange(0, 127); inc.addColumn(TEST_FAMILY1, TEST_Q2, 2L); t.increment(inc); fail(); } catch (Exception e) { } finally { t.close(); } return null; } }); } @Test public void testCellPermissionsForPutWithMultipleVersions() throws Exception { final byte[] TEST_ROW1 = Bytes.toBytes("r1"); final byte[] TEST_Q1 = Bytes.toBytes("q1"); final byte[] TEST_Q2 = Bytes.toBytes("q2"); final byte[] ZERO = Bytes.toBytes(0L); final User user1 = User.createUserForTesting(conf, "user1", new String[0]); final User user2 = User.createUserForTesting(conf, "user2", new String[0]); verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Map<String, Permission> permsU1andOwner = new HashMap<String, Permission>(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Map<String, Permission> permsU2andOwner = new HashMap<String, Permission>(); permsU2andOwner.put(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU2andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(permsU1andOwner); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); // new Put with TEST_Q1 column having TS=125. This covers old cell with TS 123 and user1 is // having RW permission. While TEST_Q2 is with latest TS and so it covers old cell with TS 127. // User1 is having RW permission on that too. user1.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 125, ZERO); p.add(TEST_FAMILY1, TEST_Q2, ZERO); p.setACL(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); t.put(p); } finally { t.close(); } return null; } }); // Should be denied. user2.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p = new Put(TEST_ROW1); // column Q1 covers version at 123 fr which user2 do not have permission p.add(TEST_FAMILY1, TEST_Q1, 124, ZERO); p.add(TEST_FAMILY1, TEST_Q2, ZERO); t.put(p); fail(); } catch (Exception e) { } finally { t.close(); } return null; } }); } @Test public void testCellPermissionsForCheckAndDelete() throws Exception { final byte[] TEST_ROW1 = Bytes.toBytes("r1"); final byte[] ZERO = Bytes.toBytes(0L); final User user1 = User.createUserForTesting(conf, "user1", new String[0]); final User user2 = User.createUserForTesting(conf, "user2", new String[0]); verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Map<String, Permission> permsU1andOwner = new HashMap<String, Permission>(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Map<String, Permission> permsU1andU2andOwner = new HashMap<String, Permission>(); permsU1andU2andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andU2andOwner.put(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andU2andOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Map<String, Permission> permsU1andU2 = new HashMap<String, Permission>(); permsU1andU2.put(user1.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); permsU1andU2.put(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 120, ZERO); p.add(TEST_FAMILY1, TEST_Q2, 120, ZERO); p.setACL(permsU1andU2andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU1andU2); t.put(p); p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(user2.getShortName(), new Permission(Permission.Action.READ)); t.put(p); } finally { t.close(); } return null; } }, USER_OWNER); // user1 should be allowed to do the checkAndDelete. user1 having read permission on the latest // version cell and write permission on all versions user1.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumns(TEST_FAMILY1, TEST_Q1, 120); t.checkAndDelete(TEST_ROW1, TEST_FAMILY1, TEST_Q1, ZERO, d); } finally { t.close(); } return null; } }); // user2 shouldn't be allowed to do the checkAndDelete. user2 having RW permission on the latest // version cell but not on cell version TS=123 user2.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumns(TEST_FAMILY1, TEST_Q1); t.checkAndDelete(TEST_ROW1, TEST_FAMILY1, TEST_Q1, ZERO, d); fail("user2 should not be allowed to do checkAndDelete"); } catch (Exception e) { } finally { t.close(); } return null; } }); // user2 should be allowed to do the checkAndDelete when delete tries to delete the old version // TS=120. user2 having R permission on the latest version(no W permission) cell // and W permission on cell version TS=120. user2.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumn(TEST_FAMILY1, TEST_Q2, 120); t.checkAndDelete(TEST_ROW1, TEST_FAMILY1, TEST_Q2, ZERO, d); } finally { t.close(); } return null; } }); } @After public void tearDown() throws Exception { // Clean the _acl_ table try { TEST_UTIL.deleteTable(TEST_TABLE.getTableName()); } catch (TableNotFoundException ex) { // Test deleted the table, no problem LOG.info("Test deleted table " + TEST_TABLE.getTableName()); } assertEquals(0, AccessControlLists.getTablePermissions(conf, TEST_TABLE.getTableName()).size()); } }
apache-2.0
lastfm/moji
src/test/java/fm/last/moji/impl/GetDeviceStatusesCommandTest.java
4638
/** * Copyright (C) 2012-2017 Last.fm & The "mogilefs-moji" committers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fm.last.moji.impl; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import fm.last.moji.MojiDeviceStatus; import fm.last.moji.tracker.Tracker; import fm.last.moji.tracker.TrackerException; @RunWith(MockitoJUnitRunner.class) public class GetDeviceStatusesCommandTest { @Mock private Tracker mockTracker; private GetDeviceStatusesCommand command; @Before public void init() throws TrackerException { command = new GetDeviceStatusesCommand("domain"); Map<String, String> device1params = new HashMap<String, String>(); device1params.put("utilization", "0.5"); device1params.put("observed_state", "ok"); device1params.put("mb_total", "100"); device1params.put("mb_used", "51"); device1params.put("mb_free", "49"); device1params.put("reject_bad_md5", "1"); device1params.put("weight", "10"); device1params.put("devid", "1"); device1params.put("status", "failed"); device1params.put("hostid", "4"); Map<String, String> device2params = new HashMap<String, String>(); device2params.put("utilization", "0.1"); device2params.put("observed_state", "error"); device2params.put("mb_total", "200"); device2params.put("mb_used", "51"); device2params.put("mb_free", "149"); device2params.put("reject_bad_md5", "0"); device2params.put("weight", "5"); device2params.put("devid", "2"); device2params.put("status", "ok"); device2params.put("hostid", "8"); Map<String, Map<String, String>> statuses = new HashMap<String, Map<String, String>>(); statuses.put("dev1", device1params); statuses.put("dev2", device2params); when(mockTracker.getDeviceStatuses("domain")).thenReturn(statuses); } @Test public void typical() throws Exception { command.executeWithTracker(mockTracker); List<MojiDeviceStatus> statuses = command.getStatuses(); assertThat(statuses.size(), is(2)); MojiDeviceStatus device1Status = statuses.get(0); assertThat(device1Status.getCapacityFreeBytes(), is(51380224L)); assertThat(device1Status.getCapacityTotalBytes(), is(104857600L)); assertThat(device1Status.getCapacityUsedBytes(), is(53477376L)); assertThat(device1Status.getDeviceName(), is("dev1")); assertThat(device1Status.getHostId(), is(4)); assertThat(device1Status.getId(), is(1)); assertThat(device1Status.getObservedState(), is("ok")); assertThat(device1Status.getRejectBadMd5(), is(true)); assertThat(device1Status.getStatus(), is("failed")); assertThat(device1Status.getUtilization(), is(0.5f)); assertThat(device1Status.getWeight(), is(10)); MojiDeviceStatus device2Status = statuses.get(1); assertThat(device2Status.getCapacityFreeBytes(), is(156237824L)); assertThat(device2Status.getCapacityTotalBytes(), is(209715200L)); assertThat(device2Status.getCapacityUsedBytes(), is(53477376L)); assertThat(device2Status.getDeviceName(), is("dev2")); assertThat(device2Status.getHostId(), is(8)); assertThat(device2Status.getId(), is(2)); assertThat(device2Status.getObservedState(), is("error")); assertThat(device2Status.getRejectBadMd5(), is(false)); assertThat(device2Status.getStatus(), is("ok")); assertThat(device2Status.getUtilization(), is(0.1f)); assertThat(device2Status.getWeight(), is(5)); } @Test public void empty() throws Exception { Map<String, Map<String, String>> parameters = Collections.emptyMap(); when(mockTracker.getDeviceStatuses("domain")).thenReturn(parameters); command.executeWithTracker(mockTracker); List<MojiDeviceStatus> statuses = command.getStatuses(); assertThat(statuses.isEmpty(), is(true)); } }
apache-2.0
treeform/orekit
src/main/java/org/orekit/bodies/IAUPoleFactory.java
17513
/* Copyright 2002-2014 CS Systèmes d'Information * Licensed to CS Systèmes d'Information (CS) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * CS licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.orekit.bodies; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.apache.commons.math3.util.FastMath; import org.orekit.time.AbsoluteDate; import org.orekit.utils.Constants; /** Factory class for IAU poles. * <p>The pole models provided here come from the <a * href="http://astropedia.astrogeology.usgs.gov/alfresco/d/d/workspace/SpacesStore/28fd9e81-1964-44d6-a58b-fbbf61e64e15/WGCCRE2009reprint.pdf"> * 2009 report</a> and the <a href="http://astropedia.astrogeology.usgs.gov/alfresco/d/d/workspace/SpacesStore/04d348b0-eb2b-46a2-abe9-6effacb37763/WGCCRE-Erratum-2011reprint.pdf"> * 2011 erratum</a> of the IAU/IAG Working Group on Cartographic Coordinates * and Rotational Elements of the Planets and Satellites (WGCCRE). Note that these value * differ from earliest reports (before 2005). *</p> * @author Luc Maisonobe * @since 5.1 */ class IAUPoleFactory { /** Private constructor. * <p>This class is a utility class, it should neither have a public * nor a default constructor. This private constructor prevents * the compiler from generating one automatically.</p> */ private IAUPoleFactory() { } /** Get an IAU pole. * @param body body for which the pole is requested * @return IAU pole for the body, or dummy GCRF aligned pole * for barycenters */ public static IAUPole getIAUPole(final JPLEphemeridesLoader.EphemerisType body) { switch (body) { case SUN: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 5715331729495237139L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { return new Vector3D(FastMath.toRadians(286.13), FastMath.toRadians(63.87)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(84.176 + 14.1844000 * d(date)); } }; case MERCURY: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = -5769710119654037007L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double t = t(date); return new Vector3D(FastMath.toRadians(281.0097 - 0.0328 * t), FastMath.toRadians( 61.4143 - 0.0049 * t)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { final double[] m = computeMi(date); return FastMath.toRadians(329.5469 + 6.1385025 * d(date) + 0.00993822 * FastMath.sin(m[0]) - 0.00104581 * FastMath.sin(m[1]) - 0.00010280 * FastMath.sin(m[2]) - 0.00002364 * FastMath.sin(m[3]) - 0.00000532 * FastMath.sin(m[4])); } /** Compute the Mercury angles M<sub>i</sub>. * @param date date * @return array of Mercury angles, with M<sub>i</sub> stored at index i-1 */ private double[] computeMi(final AbsoluteDate date) { final double d = d(date); return new double[] { FastMath.toRadians(174.791096 + 4.092335 * d), // M1 FastMath.toRadians(349.582171 + 8.184670 * d), // M2 FastMath.toRadians(164.373257 + 12.277005 * d), // M3 FastMath.toRadians(339.164343 + 16.369340 * d), // M4 FastMath.toRadians(153.955429 + 20.461675 * d), // M5 }; } }; case VENUS: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 7030506277976648896L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { return new Vector3D(FastMath.toRadians(272.76), FastMath.toRadians(67.16)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(160.20 - 1.4813688 * d(date)); } }; case EARTH: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 6912325697192667056L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double t = t(date); return new Vector3D(FastMath.toRadians( 0.00 - 0.641 * t), FastMath.toRadians(90.00 - 0.557 * t)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(190.147 + 360.9856235 * d(date)); } }; case MOON: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = -1310155975084976571L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double[] e = computeEi(date); final double t = t(date); return new Vector3D(FastMath.toRadians(269.9949 + 0.0031 * t - 3.8787 * FastMath.sin(e[0]) - 0.1204 * FastMath.sin(e[1]) + 0.0700 * FastMath.sin(e[2]) - 0.0172 * FastMath.sin(e[3]) + 0.0072 * FastMath.sin(e[5]) - 0.0052 * FastMath.sin(e[9]) + 0.0043 * FastMath.sin(e[12])), FastMath.toRadians( 66.5392 + 0.0130 * t + 1.5419 * FastMath.cos(e[0]) + 0.0239 * FastMath.cos(e[1]) - 0.0278 * FastMath.cos(e[2]) + 0.0068 * FastMath.cos(e[3]) - 0.0029 * FastMath.cos(e[5]) + 0.0009 * FastMath.cos(e[6]) + 0.0008 * FastMath.cos(e[9]) - 0.0009 * FastMath.cos(e[12]))); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { final double[] e = computeEi(date); final double d = d(date); return FastMath.toRadians(38.3213 + (13.17635815 - 1.4e-12 * d) * d + 3.5610 * FastMath.sin(e[0]) + 0.1208 * FastMath.sin(e[1]) - 0.0642 * FastMath.sin(e[2]) + 0.0158 * FastMath.sin(e[3]) + 0.0252 * FastMath.sin(e[4]) - 0.0066 * FastMath.sin(e[5]) - 0.0047 * FastMath.sin(e[6]) - 0.0046 * FastMath.sin(e[7]) + 0.0028 * FastMath.sin(e[8]) + 0.0052 * FastMath.sin(e[9]) + 0.0040 * FastMath.sin(e[10]) + 0.0019 * FastMath.sin(e[11]) - 0.0044 * FastMath.sin(e[12])); } /** Compute the Moon angles E<sub>i</sub>. * @param date date * @return array of Moon angles, with E<sub>i</sub> stored at index i-1 */ private double[] computeEi(final AbsoluteDate date) { final double d = d(date); return new double[] { FastMath.toRadians(125.045 - 0.0529921 * d), // E1 FastMath.toRadians(250.089 - 0.1059842 * d), // E2 FastMath.toRadians(260.008 + 13.0120009 * d), // E3 FastMath.toRadians(176.625 + 13.3407154 * d), // E4 FastMath.toRadians(357.529 + 0.9856003 * d), // E5 FastMath.toRadians(311.589 + 26.4057084 * d), // E6 FastMath.toRadians(134.963 + 13.0649930 * d), // E7 FastMath.toRadians(276.617 + 0.3287146 * d), // E8 FastMath.toRadians( 34.226 + 1.7484877 * d), // E9 FastMath.toRadians( 15.134 - 0.1589763 * d), // E10 FastMath.toRadians(119.743 + 0.0036096 * d), // E11 FastMath.toRadians(239.961 + 0.1643573 * d), // E12 FastMath.toRadians( 25.053 + 12.9590088 * d) // E13 }; } }; case MARS: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 1471983418540015411L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double t = t(date); return new Vector3D(FastMath.toRadians(317.68143 - 0.1061 * t), FastMath.toRadians( 52.88650 - 0.0609 * t)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(176.630 + 350.89198226 * d(date)); } }; case JUPITER: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 6959753758673537524L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double t = t(date); final double ja = FastMath.toRadians( 99.360714 + 4850.4046 * t); final double jb = FastMath.toRadians(175.895369 + 1191.9605 * t); final double jc = FastMath.toRadians(300.323162 + 262.5475 * t); final double jd = FastMath.toRadians(114.012305 + 6070.2476 * t); final double je = FastMath.toRadians( 49.511251 + 64.3000 * t); return new Vector3D(FastMath.toRadians(268.056595 - 0.006499 * t + 0.000117 * FastMath.sin(ja) + 0.000938 * FastMath.sin(jb) + 0.001432 * FastMath.sin(jc) + 0.000030 * FastMath.sin(jd) + 0.002150 * FastMath.sin(je)), FastMath.toRadians( 64.495303 + 0.002413 * t) + 0.000050 * FastMath.cos(ja) + 0.000404 * FastMath.cos(jb) + 0.000617 * FastMath.cos(jc) - 0.000013 * FastMath.cos(jd) + 0.000926 * FastMath.cos(je)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(284.95 + 870.5360000 * d(date)); } }; case SATURN: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = -1082211873912149774L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double t = t(date); return new Vector3D(FastMath.toRadians(40.589 - 0.036 * t), FastMath.toRadians(83.537 - 0.004 * t)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(38.90 + 810.7939024 * d(date)); } }; case URANUS: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 362792230470085154L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { return new Vector3D(FastMath.toRadians(257.311), FastMath.toRadians(-15.175)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(203.81 - 501.1600928 * d(date)); } }; case NEPTUNE: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = 560614555734665287L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { final double n = FastMath.toRadians(357.85 + 52.316 * t(date)); return new Vector3D(FastMath.toRadians(299.36 + 0.70 * FastMath.sin(n)), FastMath.toRadians( 43.46 - 0.51 * FastMath.cos(n))); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { final double n = FastMath.toRadians(357.85 + 52.316 * t(date)); return FastMath.toRadians(253.18 + 536.3128492 * d(date) - 0.48 * FastMath.sin(n)); } }; case PLUTO: return new IAUPole() { /** Serializable UID. */ private static final long serialVersionUID = -1277113129327018062L; /** {@inheritDoc }*/ public Vector3D getPole(final AbsoluteDate date) { return new Vector3D(FastMath.toRadians(132.993), FastMath.toRadians(-6.163)); } /** {@inheritDoc }*/ public double getPrimeMeridianAngle(final AbsoluteDate date) { return FastMath.toRadians(302.695 + 56.3625225 * d(date)); } }; default: return new GCRFAligned(); } } /** Compute the interval in julian centuries from standard epoch. * @param date date * @return interval between date and standard epoch in julian centuries */ private static double t(final AbsoluteDate date) { return date.durationFrom(AbsoluteDate.J2000_EPOCH) / Constants.JULIAN_CENTURY; } /** Compute the interval in julian days from standard epoch. * @param date date * @return interval between date and standard epoch in julian days */ private static double d(final AbsoluteDate date) { return date.durationFrom(AbsoluteDate.J2000_EPOCH) / Constants.JULIAN_DAY; } /** Default IAUPole implementation for barycenters. * <p> * This implementation defines directions such that the inertially oriented and body * oriented frames are identical and aligned with GCRF. It is used for example * to define the ICRF. * </p> */ private static class GCRFAligned implements IAUPole { /** Serializable UID. */ private static final long serialVersionUID = 20130327L; /** {@inheritDoc} */ public Vector3D getPole(final AbsoluteDate date) { return Vector3D.PLUS_K; } /** {@inheritDoc} */ public double getPrimeMeridianAngle(final AbsoluteDate date) { return 0; } } }
apache-2.0
etirelli/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-api/src/main/java/org/kie/workbench/common/stunner/bpmn/workitem/ServiceTaskFactory.java
4467
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.workitem; import java.util.function.Supplier; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Instance; import javax.inject.Inject; import org.kie.workbench.common.stunner.core.definition.adapter.binding.BindableAdapterUtils; import org.kie.workbench.common.stunner.core.definition.builder.Builder; import org.kie.workbench.common.stunner.core.factory.definition.DefinitionFactory; @ApplicationScoped public class ServiceTaskFactory implements DefinitionFactory<ServiceTask> { private static final String PREFIX = BindableAdapterUtils.getGenericClassName(ServiceTask.class); private final Supplier<WorkItemDefinitionRegistry> registry; // CDI proxy. protected ServiceTaskFactory() { this.registry = null; } @Inject public ServiceTaskFactory(final Instance<WorkItemDefinitionRegistry> registry) { this.registry = registry::get; } public ServiceTaskFactory(final Supplier<WorkItemDefinitionRegistry> registry) { this.registry = registry; } @Override public boolean accepts(final String identifier) { return identifier.startsWith(PREFIX); } @Override public ServiceTask build(final String identifier) { final String name = BindableAdapterUtils.getDynamicId(ServiceTask.class, identifier); return null != name ? buildItem(name) : ServiceTaskBuilder.newInstance(); } private ServiceTask buildItem(final String workItemName) { final WorkItemDefinition workItemDefinition = getRegistry().get(workItemName); if (null != workItemDefinition) { return new ServiceTaskBuilder(workItemDefinition) .build(); } throw new RuntimeException("No service task builder found for [" + workItemName + "]"); } @SuppressWarnings("all") private WorkItemDefinitionRegistry getRegistry() { return registry.get(); } public static class ServiceTaskBuilder implements Builder<ServiceTask> { private final WorkItemDefinition workItemDefinition; public ServiceTaskBuilder(final WorkItemDefinition workItemDefinition) { this.workItemDefinition = workItemDefinition; } public static ServiceTask newInstance() { return new ServiceTask(); } @Override public ServiceTask build() { final ServiceTask serviceTask = newInstance(); final String name = workItemDefinition.getName(); setProperties(workItemDefinition, serviceTask); serviceTask.getExecutionSet().getTaskName().setValue(name); serviceTask.getGeneral().getName().setValue(workItemDefinition.getDisplayName()); serviceTask.getGeneral().getDocumentation().setValue(workItemDefinition.getDocumentation()); serviceTask.setDescription(workItemDefinition.getDescription()); serviceTask.getDataIOSet() .getAssignmentsinfo() .setValue(workItemDefinition.getParameters() + workItemDefinition.getResults()); return serviceTask; } public static ServiceTask setProperties(final WorkItemDefinition workItemDefinition, final ServiceTask serviceTask) { final String name = workItemDefinition.getName(); serviceTask.setName(name); serviceTask.getTaskType().setRawType(name); serviceTask.setCategory(workItemDefinition.getCategory()); serviceTask.setDefaultHandler(workItemDefinition.getDefaultHandler()); return serviceTask; } } }
apache-2.0
sekikn/ambari
ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/StompResponse.java
1377
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.agent.stomp; import com.fasterxml.jackson.annotation.JsonProperty; /** * Default stomp topic response. Minimal response is {#StompResponseStatus.OK}, also is used to return correlation id to agent. */ public class StompResponse { @JsonProperty("status") private StompResponseStatus status = StompResponseStatus.OK; public StompResponseStatus getStatus() { return status; } public void setStatus(StompResponseStatus status) { this.status = status; } public enum StompResponseStatus { OK, FAILED } }
apache-2.0
tgroh/incubator-beam
runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/util/StructsTest.java
7179
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.util; import static org.apache.beam.runners.dataflow.util.Structs.addBoolean; import static org.apache.beam.runners.dataflow.util.Structs.addDouble; import static org.apache.beam.runners.dataflow.util.Structs.addList; import static org.apache.beam.runners.dataflow.util.Structs.addLong; import static org.apache.beam.runners.dataflow.util.Structs.addLongs; import static org.apache.beam.runners.dataflow.util.Structs.addNull; import static org.apache.beam.runners.dataflow.util.Structs.addString; import static org.apache.beam.runners.dataflow.util.Structs.addStringList; import static org.apache.beam.runners.dataflow.util.Structs.getBoolean; import static org.apache.beam.runners.dataflow.util.Structs.getDictionary; import static org.apache.beam.runners.dataflow.util.Structs.getInt; import static org.apache.beam.runners.dataflow.util.Structs.getListOfMaps; import static org.apache.beam.runners.dataflow.util.Structs.getLong; import static org.apache.beam.runners.dataflow.util.Structs.getObject; import static org.apache.beam.runners.dataflow.util.Structs.getString; import static org.apache.beam.runners.dataflow.util.Structs.getStrings; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for Structs. */ @RunWith(JUnit4.class) public class StructsTest { private List<Map<String, Object>> makeCloudObjects() { List<Map<String, Object>> objects = new ArrayList<>(); { CloudObject o = CloudObject.forClassName("string"); addString(o, "singletonStringKey", "stringValue"); objects.add(o); } { CloudObject o = CloudObject.forClassName("long"); addLong(o, "singletonLongKey", 42L); objects.add(o); } return objects; } private Map<String, Object> makeCloudDictionary() { Map<String, Object> o = new HashMap<>(); addList(o, "emptyKey", Collections.emptyList()); addNull(o, "noStringsKey"); addString(o, "singletonStringKey", "stringValue"); addStringList(o, "multipleStringsKey", Arrays.asList("hi", "there", "bob")); addLongs(o, "multipleLongsKey", 47L, 1L << 42, -5L); addLong(o, "singletonLongKey", 42L); addDouble(o, "singletonDoubleKey", 3.14); addBoolean(o, "singletonBooleanKey", true); addNull(o, "noObjectsKey"); addList(o, "multipleObjectsKey", makeCloudObjects()); return o; } @Test public void testGetStringParameter() throws Exception { Map<String, Object> o = makeCloudDictionary(); Assert.assertEquals( "stringValue", getString(o, "singletonStringKey")); Assert.assertEquals( "stringValue", getString(o, "singletonStringKey", "defaultValue")); Assert.assertEquals( "defaultValue", getString(o, "missingKey", "defaultValue")); try { getString(o, "missingKey"); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString( "didn't find required parameter missingKey")); } try { getString(o, "noStringsKey"); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not a string")); } Assert.assertThat(getStrings(o, "noStringsKey", null), Matchers.emptyIterable()); Assert.assertThat(getObject(o, "noStringsKey").keySet(), Matchers.emptyIterable()); Assert.assertThat(getDictionary(o, "noStringsKey").keySet(), Matchers.emptyIterable()); Assert.assertThat(getDictionary(o, "noStringsKey", null).keySet(), Matchers.emptyIterable()); try { getString(o, "multipleStringsKey"); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not a string")); } try { getString(o, "emptyKey"); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not a string")); } } @Test public void testGetBooleanParameter() throws Exception { Map<String, Object> o = makeCloudDictionary(); Assert.assertEquals( true, getBoolean(o, "singletonBooleanKey", false)); Assert.assertEquals( false, getBoolean(o, "missingKey", false)); try { getBoolean(o, "emptyKey", false); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not a boolean")); } } @Test public void testGetLongParameter() throws Exception { Map<String, Object> o = makeCloudDictionary(); Assert.assertEquals( (Long) 42L, getLong(o, "singletonLongKey", 666L)); Assert.assertEquals( (Integer) 42, getInt(o, "singletonLongKey", 666)); Assert.assertEquals( (Long) 666L, getLong(o, "missingKey", 666L)); try { getLong(o, "emptyKey", 666L); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not a long")); } try { getInt(o, "emptyKey", 666); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not an int")); } } @Test public void testGetListOfMaps() throws Exception { Map<String, Object> o = makeCloudDictionary(); Assert.assertEquals( makeCloudObjects(), getListOfMaps(o, "multipleObjectsKey", null)); try { getListOfMaps(o, "singletonLongKey", null); Assert.fail("should have thrown an exception"); } catch (Exception exn) { Assert.assertThat(exn.toString(), Matchers.containsString("not a list")); } } // TODO: Test builder operations. }
apache-2.0
dennishuo/hadoop
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/StateStoreDFSCluster.java
6280
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.federation; import static org.apache.hadoop.hdfs.server.federation.store.FederationStateStoreTestUtils.createMockRegistrationForNamenode; import static org.apache.hadoop.hdfs.server.federation.store.FederationStateStoreTestUtils.getStateStoreConfiguration; import static org.apache.hadoop.hdfs.server.federation.store.FederationStateStoreTestUtils.synchronizeRecords; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.federation.resolver.ActiveNamenodeResolver; import org.apache.hadoop.hdfs.server.federation.resolver.FederationNamenodeServiceState; import org.apache.hadoop.hdfs.server.federation.resolver.FileSubclusterResolver; import org.apache.hadoop.hdfs.server.federation.resolver.MembershipNamenodeResolver; import org.apache.hadoop.hdfs.server.federation.resolver.MountTableResolver; import org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys; import org.apache.hadoop.hdfs.server.federation.store.StateStoreService; import org.apache.hadoop.hdfs.server.federation.store.records.MembershipState; import org.apache.hadoop.hdfs.server.federation.store.records.MountTable; /** * Test utility to mimic a federated HDFS cluster with a router and a state * store. */ public class StateStoreDFSCluster extends MiniRouterDFSCluster { private static final Class<?> DEFAULT_FILE_RESOLVER = MountTableResolver.class; private static final Class<?> DEFAULT_NAMENODE_RESOLVER = MembershipNamenodeResolver.class; public StateStoreDFSCluster(boolean ha, int numNameservices, int numNamenodes, long heartbeatInterval, long cacheFlushInterval) throws IOException, InterruptedException { this(ha, numNameservices, numNamenodes, heartbeatInterval, cacheFlushInterval, DEFAULT_FILE_RESOLVER); } public StateStoreDFSCluster(boolean ha, int numNameservices, int numNamenodes, long heartbeatInterval, long cacheFlushInterval, Class<?> fileResolver) throws IOException, InterruptedException { super(ha, numNameservices, numNamenodes, heartbeatInterval, cacheFlushInterval); // Attach state store and resolvers to router Configuration stateStoreConfig = getStateStoreConfiguration(); // Use state store backed resolvers stateStoreConfig.setClass( RBFConfigKeys.FEDERATION_NAMENODE_RESOLVER_CLIENT_CLASS, DEFAULT_NAMENODE_RESOLVER, ActiveNamenodeResolver.class); stateStoreConfig.setClass( RBFConfigKeys.FEDERATION_FILE_RESOLVER_CLIENT_CLASS, fileResolver, FileSubclusterResolver.class); this.addRouterOverrides(stateStoreConfig); } public StateStoreDFSCluster(boolean ha, int numNameservices, Class<?> fileResolver) throws IOException, InterruptedException { this(ha, numNameservices, 2, DEFAULT_HEARTBEAT_INTERVAL_MS, DEFAULT_CACHE_INTERVAL_MS, fileResolver); } public StateStoreDFSCluster(boolean ha, int numNameservices) throws IOException, InterruptedException { this(ha, numNameservices, 2, DEFAULT_HEARTBEAT_INTERVAL_MS, DEFAULT_CACHE_INTERVAL_MS); } public StateStoreDFSCluster(boolean ha, int numNameservices, int numNamnodes) throws IOException, InterruptedException { this(ha, numNameservices, numNamnodes, DEFAULT_HEARTBEAT_INTERVAL_MS, DEFAULT_CACHE_INTERVAL_MS); } ///////////////////////////////////////////////////////////////////////////// // State Store Test Fixtures ///////////////////////////////////////////////////////////////////////////// /** * Adds test fixtures for NN registation for each NN nameservice -> NS * namenode -> NN rpcAddress -> 0.0.0.0:0 webAddress -> 0.0.0.0:0 state -> * STANDBY safeMode -> false blockPool -> test. * * @param stateStore State Store. * @throws IOException If it cannot register. */ public void createTestRegistration(StateStoreService stateStore) throws IOException { List<MembershipState> entries = new ArrayList<MembershipState>(); for (NamenodeContext nn : this.getNamenodes()) { MembershipState entry = createMockRegistrationForNamenode( nn.getNameserviceId(), nn.getNamenodeId(), FederationNamenodeServiceState.STANDBY); entries.add(entry); } synchronizeRecords( stateStore, entries, MembershipState.class); } public void createTestMountTable(StateStoreService stateStore) throws IOException { List<MountTable> mounts = generateMockMountTable(); synchronizeRecords(stateStore, mounts, MountTable.class); stateStore.refreshCaches(); } public List<MountTable> generateMockMountTable() throws IOException { // create table entries List<MountTable> entries = new ArrayList<>(); for (String ns : this.getNameservices()) { Map<String, String> destMap = new HashMap<>(); destMap.put(ns, getNamenodePathForNS(ns)); // Direct path String fedPath = getFederatedPathForNS(ns); MountTable entry = MountTable.newInstance(fedPath, destMap); entries.add(entry); } // Root path goes to nameservice 1 Map<String, String> destMap = new HashMap<>(); String ns0 = this.getNameservices().get(0); destMap.put(ns0, "/"); MountTable entry = MountTable.newInstance("/", destMap); entries.add(entry); return entries; } }
apache-2.0
john9x/jdbi
core/src/main/java/org/jdbi/v3/core/Jdbi.java
20676
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.core; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Objects; import java.util.Properties; import java.util.ServiceLoader; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicReference; import javax.sql.DataSource; import org.jdbi.v3.core.config.ConfigRegistry; import org.jdbi.v3.core.config.Configurable; import org.jdbi.v3.core.extension.ExtensionCallback; import org.jdbi.v3.core.extension.ExtensionConsumer; import org.jdbi.v3.core.extension.ExtensionFactory; import org.jdbi.v3.core.extension.Extensions; import org.jdbi.v3.core.extension.HandleSupplier; import org.jdbi.v3.core.extension.NoSuchExtensionException; import org.jdbi.v3.core.internal.OnDemandExtensions; import org.jdbi.v3.core.internal.exceptions.Unchecked; import org.jdbi.v3.core.spi.JdbiPlugin; import org.jdbi.v3.core.statement.DefaultStatementBuilder; import org.jdbi.v3.core.statement.StatementBuilder; import org.jdbi.v3.core.statement.StatementBuilderFactory; import org.jdbi.v3.core.transaction.LocalTransactionHandler; import org.jdbi.v3.core.transaction.TransactionHandler; import org.jdbi.v3.core.transaction.TransactionIsolationLevel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; /** * Main entry point; configurable wrapper around a JDBC {@link DataSource}. * Use it to obtain Handle instances and provide configuration * for all handles obtained from it. */ public class Jdbi implements Configurable<Jdbi> { private static final Logger LOG = LoggerFactory.getLogger(Jdbi.class); private final ConfigRegistry config = new ConfigRegistry(); private final ConnectionFactory connectionFactory; private final AtomicReference<TransactionHandler> transactionhandler = new AtomicReference<>(LocalTransactionHandler.binding()); private final AtomicReference<StatementBuilderFactory> statementBuilderFactory = new AtomicReference<>(DefaultStatementBuilder.FACTORY); private final CopyOnWriteArrayList<JdbiPlugin> plugins = new CopyOnWriteArrayList<>(); private final ThreadLocal<HandleSupplier> threadHandleSupplier = new ThreadLocal<>(); private Jdbi(ConnectionFactory connectionFactory) { Objects.requireNonNull(connectionFactory, "null connectionFactory"); this.connectionFactory = connectionFactory; } /** * @param connection db connection * * @return a Jdbi which works on single connection */ public static Jdbi create(Connection connection) { return create(new SingleConnectionFactory(connection)); } /** * @param dataSource the data source. * * @return a Jdbi which uses the given data source as a connection factory. */ public static Jdbi create(DataSource dataSource) { return create(dataSource::getConnection); } /** * Factory used to allow for obtaining a Connection in a customized manner. * * <p> * The {@link ConnectionFactory#openConnection()} method will be invoked to obtain a connection instance * whenever a Handle is opened. * </p> * * @param connectionFactory Provides JDBC connections to Handle instances * * @return a Jdbi which uses the given connection factory. */ public static Jdbi create(ConnectionFactory connectionFactory) { return new Jdbi(connectionFactory); } /** * @param url JDBC URL for connections * * @return a Jdbi which uses {@link DriverManager} as a connection factory. */ public static Jdbi create(final String url) { Objects.requireNonNull(url, "null url"); return create(() -> DriverManager.getConnection(url)); } /** * @param url JDBC URL for connections * @param properties Properties to pass to DriverManager.getConnection(url, props) for each new handle * * @return a Jdbi which uses {@link DriverManager} as a connection factory. */ public static Jdbi create(final String url, final Properties properties) { Objects.requireNonNull(url, "null url"); Objects.requireNonNull(properties, "null properties"); return create(() -> DriverManager.getConnection(url, properties)); } /** * @param url JDBC URL for connections * @param username User name for connection authentication * @param password Password for connection authentication * * @return a Jdbi which uses {@link DriverManager} as a connection factory. */ public static Jdbi create(final String url, final String username, final String password) { Objects.requireNonNull(url, "null url"); Objects.requireNonNull(username, "null username"); Objects.requireNonNull(password, "null password"); return create(() -> DriverManager.getConnection(url, username, password)); } /** * Convenience method used to obtain a handle from a specific data source * * @param dataSource the JDBC data source. * * @return Handle using a Connection obtained from the provided DataSource */ public static Handle open(DataSource dataSource) { return create(dataSource).open(); } /** * Convenience method used to obtain a handle from a {@link ConnectionFactory}. * * @param connectionFactory the connection factory * * @return Handle using a Connection obtained from the provided connection factory */ public static Handle open(ConnectionFactory connectionFactory) { return create(connectionFactory).open(); } /** * Create a Handle wrapping a particular JDBC Connection * * @param connection the JDBC connection * * @return Handle bound to connection */ public static Handle open(final Connection connection) { Objects.requireNonNull(connection, "null connection"); return create(() -> connection).open(); } /** * Obtain a handle with just a JDBC URL * * @param url JDBC Url * * @return newly opened Handle */ public static Handle open(final String url) { return create(url).open(); } /** * Obtain a handle with just a JDBC URL * * @param url JDBC Url * @param username JDBC username for authentication * @param password JDBC password for authentication * * @return newly opened Handle */ public static Handle open(final String url, final String username, final String password) { return create(url, username, password).open(); } /** * Obtain a handle with just a JDBC URL * * @param url JDBC Url * @param props JDBC properties * * @return newly opened Handle */ public static Handle open(final String url, final Properties props) { return create(url, props).open(); } /** * Use the {@link ServiceLoader} API to detect and install plugins automagically. * Some people consider this feature dangerous; some consider it essential -- * use at your own risk. * @return this */ public Jdbi installPlugins() { ServiceLoader.load(JdbiPlugin.class).forEach(this::installPlugin); LOG.debug("Automatically installed plugins {}", plugins); return this; } /** * Install a given {@link JdbiPlugin} instance that will configure any * provided {@link Handle} instances. * @param plugin the plugin to install * @return this */ public Jdbi installPlugin(JdbiPlugin plugin) { if (plugins.addIfAbsent(plugin)) { Unchecked.consumer(plugin::customizeJdbi).accept(this); } return this; } /** * Allows customization of how prepared statements are created. When a Handle is created * against this Jdbi instance the factory will be used to create a StatementBuilder for * that specific handle. When the handle is closed, the StatementBuilder's close method * will be invoked. * * @param factory the new statement builder factory. * @return this */ public Jdbi setStatementBuilderFactory(StatementBuilderFactory factory) { this.statementBuilderFactory.set(factory); return this; } /** * @return the current {@link StatementBuilderFactory} */ public StatementBuilderFactory getStatementBuilderFactory() { return this.statementBuilderFactory.get(); } @Override public ConfigRegistry getConfig() { return config; } /** * Specify the TransactionHandler instance to use. This allows overriding * transaction semantics, or mapping into different transaction * management systems. * <p> * The default version uses local transactions on the database Connection * instances obtained. * </p> * * @param handler The TransactionHandler to use for all Handle instances obtained * from this Jdbi * @return this */ public Jdbi setTransactionHandler(TransactionHandler handler) { Objects.requireNonNull(handler, "null transaction handler"); this.transactionhandler.set(handler); return this; } /** * @return the {@link TransactionHandler} */ public TransactionHandler getTransactionHandler() { return this.transactionhandler.get(); } /** * Obtain a Handle to the data source wrapped by this Jdbi instance. * You own this expensive resource and are required to close it or * risk leaks. Using a {@code try-with-resources} block is recommended. * * @return an open Handle instance * @see #useHandle(HandleConsumer) * @see #withHandle(HandleCallback) */ public Handle open() { try { final long start = System.nanoTime(); @SuppressWarnings("PMD.CloseResource") Connection conn = Objects.requireNonNull(connectionFactory.openConnection(), () -> "Connection factory " + connectionFactory + " returned a null connection"); final long stop = System.nanoTime(); for (JdbiPlugin p : plugins) { conn = p.customizeConnection(conn); } StatementBuilder cache = statementBuilderFactory.get().createStatementBuilder(conn); Handle h = new Handle(this, config.createCopy(), connectionFactory::closeConnection, transactionhandler.get(), cache, conn); for (JdbiPlugin p : plugins) { h = p.customizeHandle(h); } LOG.trace("Jdbi [{}] obtain handle [{}] in {}ms", this, h, MILLISECONDS.convert(stop - start, NANOSECONDS)); return h; } catch (SQLException e) { throw new ConnectionException(e); } } /** * A convenience function which manages the lifecycle of a handle and yields it to a callback * for use by clients. * * @param callback A callback which will receive an open Handle * @param <R> type returned by the callback * @param <X> exception type thrown by the callback, if any. * * @return the value returned by callback * * @throws X any exception thrown by the callback */ public <R, X extends Exception> R withHandle(HandleCallback<R, X> callback) throws X { if (threadHandleSupplier.get() != null) { return callback.withHandle(threadHandleSupplier.get().getHandle()); } try (Handle h = this.open()) { threadHandleSupplier.set(new ConstantHandleSupplier(h)); return callback.withHandle(h); } finally { threadHandleSupplier.remove(); } } /** * A convenience function which manages the lifecycle of a handle and yields it to a callback * for use by clients. * * @param consumer A callback which will receive an open Handle * @param <X> exception type thrown by the callback, if any. * * @throws X any exception thrown by the callback */ public <X extends Exception> void useHandle(final HandleConsumer<X> consumer) throws X { withHandle(consumer.asCallback()); } /** * A convenience function which manages the lifecycle of a handle and yields it to a callback * for use by clients. The handle will be in a transaction when the callback is invoked, and * that transaction will be committed if the callback finishes normally, or rolled back if the * callback raises an exception. * * @param callback A callback which will receive an open Handle, in a transaction * @param <R> type returned by the callback * @param <X> exception type thrown by the callback, if any. * * @return the value returned by callback * * @throws X any exception thrown by the callback */ public <R, X extends Exception> R inTransaction(final HandleCallback<R, X> callback) throws X { return withHandle(handle -> handle.<R, X>inTransaction(callback)); } /** * A convenience function which manages the lifecycle of a handle and yields it to a callback * for use by clients. The handle will be in a transaction when the callback is invoked, and * that transaction will be committed if the callback finishes normally, or rolled back if the * callback raises an exception. * * @param callback A callback which will receive an open Handle, in a transaction * @param <X> exception type thrown by the callback, if any. * * @throws X any exception thrown by the callback */ public <X extends Exception> void useTransaction(final HandleConsumer<X> callback) throws X { useHandle(handle -> handle.useTransaction(callback)); } /** * A convenience function which manages the lifecycle of a handle and yields it to a callback * for use by clients. The handle will be in a transaction when the callback is invoked, and * that transaction will be committed if the callback finishes normally, or rolled back if the * callback raises an exception. * * <p> * This form accepts a transaction isolation level which will be applied to the connection * for the scope of this transaction, after which the original isolation level will be restored. * </p> * * @param level the transaction isolation level which will be applied to the connection for the scope of this * transaction, after which the original isolation level will be restored. * @param callback A callback which will receive an open Handle, in a transaction * @param <R> type returned by the callback * @param <X> exception type thrown by the callback, if any. * * @return the value returned by callback * * @throws X any exception thrown by the callback */ public <R, X extends Exception> R inTransaction(final TransactionIsolationLevel level, final HandleCallback<R, X> callback) throws X { return withHandle(handle -> handle.inTransaction(level, callback)); } /** * A convenience function which manages the lifecycle of a handle and yields it to a callback * for use by clients. The handle will be in a transaction when the callback is invoked, and * that transaction will be committed if the callback finishes normally, or rolled back if the * callback raises an exception. * * <p> * This form accepts a transaction isolation level which will be applied to the connection * for the scope of this transaction, after which the original isolation level will be restored. * </p> * * @param level the transaction isolation level which will be applied to the connection for the scope of this * transaction, after which the original isolation level will be restored. * @param callback A callback which will receive an open Handle, in a transaction * @param <X> exception type thrown by the callback, if any. * * @throws X any exception thrown by the callback */ public <X extends Exception> void useTransaction(final TransactionIsolationLevel level, final HandleConsumer<X> callback) throws X { useHandle(handle -> handle.useTransaction(level, callback)); } /** * A convenience method which opens an extension of the given type, yields it to a callback, and returns the result * of the callback. A handle is opened if needed by the extension, and closed before returning to the caller. * * @param extensionType the type of extension. * @param callback a callback which will receive the extension. * @param <R> the return type * @param <E> the extension type * @param <X> the exception type optionally thrown by the callback * @return the value returned by the callback. * @throws NoSuchExtensionException if no {@link ExtensionFactory} is registered which supports the given extension * type. * @throws X if thrown by the callback. */ public <R, E, X extends Exception> R withExtension(Class<E> extensionType, ExtensionCallback<R, E, X> callback) throws X { if (threadHandleSupplier.get() != null) { return callWithExtension(extensionType, callback, threadHandleSupplier.get()); } try (LazyHandleSupplier handleSupplier = new LazyHandleSupplier(this, config)) { threadHandleSupplier.set(handleSupplier); return callWithExtension(extensionType, callback, handleSupplier); } finally { threadHandleSupplier.remove(); } } private <R, E, X extends Exception> R callWithExtension(Class<E> extensionType, ExtensionCallback<R, E, X> callback, HandleSupplier handle) throws X { E extension = getConfig(Extensions.class) .findFor(extensionType, handle) .orElseThrow(() -> new NoSuchExtensionException(extensionType)); return callback.withExtension(extension); } /** * A convenience method which opens an extension of the given type, and yields it to a callback. A handle is opened * if needed by the extention, and closed before returning to the caller. * * @param extensionType the type of extension * @param callback a callback which will receive the extension * @param <E> the extension type * @param <X> the exception type optionally thrown by the callback * @throws NoSuchExtensionException if no {@link ExtensionFactory} is registered which supports the given extension type. * @throws X if thrown by the callback. */ public <E, X extends Exception> void useExtension(Class<E> extensionType, ExtensionConsumer<E, X> callback) throws X { withExtension(extensionType, extension -> { callback.useExtension(extension); return null; }); } /** * @param extensionType the type of extension. Must be a public interface type. * @param <E> the extension type * * @return an extension which opens and closes handles (as needed) for individual method calls. Only public * interface types may be used as on-demand extensions. */ public <E> E onDemand(Class<E> extensionType) { if (!extensionType.isInterface()) { throw new IllegalArgumentException("On-demand extensions are only supported for interfaces."); } if (!getConfig(Extensions.class).hasExtensionFor(extensionType)) { throw new NoSuchExtensionException("Extension not found: " + extensionType); } return getConfig(OnDemandExtensions.class).create(this, extensionType); } }
apache-2.0
ronny-macmaster/hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl.java
24086
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.api.protocolrecords.impl.pb; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.AMCommand; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NMToken; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.PreemptionMessage; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.UpdateContainerError; import org.apache.hadoop.yarn.api.records.UpdatedContainer; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerStatusPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.NMTokenPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.NodeReportPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionMessagePBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.TokenPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.UpdatedContainerPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto; import org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto; import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto; import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto; import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder; import org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto; import com.google.protobuf.TextFormat; @Private @Unstable public class AllocateResponsePBImpl extends AllocateResponse { AllocateResponseProto proto = AllocateResponseProto.getDefaultInstance(); AllocateResponseProto.Builder builder = null; boolean viaProto = false; Resource limit; private List<Container> allocatedContainers = null; private List<NMToken> nmTokens = null; private List<ContainerStatus> completedContainersStatuses = null; private List<UpdatedContainer> updatedContainers = null; private List<NodeReport> updatedNodes = null; private List<UpdateContainerError> updateErrors = null; private PreemptionMessage preempt; private Token amrmToken = null; private Priority appPriority = null; public AllocateResponsePBImpl() { builder = AllocateResponseProto.newBuilder(); } public AllocateResponsePBImpl(AllocateResponseProto proto) { this.proto = proto; viaProto = true; } public synchronized AllocateResponseProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } @Override public int hashCode() { return getProto().hashCode(); } @Override public boolean equals(Object other) { if (other == null) return false; if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } return false; } @Override public String toString() { return TextFormat.shortDebugString(getProto()); } private synchronized void mergeLocalToBuilder() { if (this.allocatedContainers != null) { builder.clearAllocatedContainers(); Iterable<ContainerProto> iterable = getContainerProtoIterable(this.allocatedContainers); builder.addAllAllocatedContainers(iterable); } if (nmTokens != null) { builder.clearNmTokens(); Iterable<NMTokenProto> iterable = getTokenProtoIterable(nmTokens); builder.addAllNmTokens(iterable); } if (this.completedContainersStatuses != null) { builder.clearCompletedContainerStatuses(); Iterable<ContainerStatusProto> iterable = getContainerStatusProtoIterable(this.completedContainersStatuses); builder.addAllCompletedContainerStatuses(iterable); } if (this.updatedNodes != null) { builder.clearUpdatedNodes(); Iterable<NodeReportProto> iterable = getNodeReportProtoIterable(this.updatedNodes); builder.addAllUpdatedNodes(iterable); } if (this.limit != null) { builder.setLimit(convertToProtoFormat(this.limit)); } if (this.preempt != null) { builder.setPreempt(convertToProtoFormat(this.preempt)); } if (this.updatedContainers != null) { builder.clearUpdatedContainers(); Iterable<YarnServiceProtos.UpdatedContainerProto> iterable = getUpdatedContainerProtoIterable(this.updatedContainers); builder.addAllUpdatedContainers(iterable); } if (this.updateErrors != null) { builder.clearUpdateErrors(); Iterable<YarnServiceProtos.UpdateContainerErrorProto> iterable = getUpdateErrorsIterable(this.updateErrors); builder.addAllUpdateErrors(iterable); } if (this.amrmToken != null) { builder.setAmRmToken(convertToProtoFormat(this.amrmToken)); } if (this.appPriority != null) { builder.setApplicationPriority(convertToProtoFormat(this.appPriority)); } } private synchronized void mergeLocalToProto() { if (viaProto) maybeInitBuilder(); mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private synchronized void maybeInitBuilder() { if (viaProto || builder == null) { builder = AllocateResponseProto.newBuilder(proto); } viaProto = false; } @Override public synchronized AMCommand getAMCommand() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasAMCommand()) { return null; } return ProtoUtils.convertFromProtoFormat(p.getAMCommand()); } @Override public synchronized void setAMCommand(AMCommand command) { maybeInitBuilder(); if (command == null) { builder.clearAMCommand(); return; } builder.setAMCommand(ProtoUtils.convertToProtoFormat(command)); } @Override public synchronized int getResponseId() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; return (p.getResponseId()); } @Override public synchronized void setResponseId(int responseId) { maybeInitBuilder(); builder.setResponseId((responseId)); } @Override public synchronized Resource getAvailableResources() { if (this.limit != null) { return this.limit; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasLimit()) { return null; } this.limit = convertFromProtoFormat(p.getLimit()); return this.limit; } @Override public synchronized void setAvailableResources(Resource limit) { maybeInitBuilder(); if (limit == null) builder.clearLimit(); this.limit = limit; } @Override public synchronized List<NodeReport> getUpdatedNodes() { initLocalNewNodeReportList(); return this.updatedNodes; } @Override public synchronized void setUpdatedNodes( final List<NodeReport> updatedNodes) { if (updatedNodes == null) { this.updatedNodes.clear(); return; } this.updatedNodes = new ArrayList<NodeReport>(updatedNodes.size()); this.updatedNodes.addAll(updatedNodes); } @Override public synchronized List<UpdateContainerError> getUpdateErrors() { initLocalUpdateErrorsList(); return this.updateErrors; } @Override public synchronized void setUpdateErrors( List<UpdateContainerError> updateErrors) { if (updateErrors == null) { if (this.updateErrors != null) { this.updateErrors.clear(); } return; } this.updateErrors = new ArrayList<>( updateErrors.size()); this.updateErrors.addAll(updateErrors); } @Override public synchronized List<Container> getAllocatedContainers() { initLocalNewContainerList(); return this.allocatedContainers; } @Override public synchronized void setAllocatedContainers( final List<Container> containers) { if (containers == null) return; initLocalNewContainerList(); allocatedContainers.clear(); allocatedContainers.addAll(containers); } @Override public synchronized List<UpdatedContainer> getUpdatedContainers() { initLocalUpdatedContainerList(); return this.updatedContainers; } @Override public synchronized void setUpdatedContainers( final List<UpdatedContainer> containers) { if (containers == null) return; initLocalUpdatedContainerList(); updatedContainers.clear(); updatedContainers.addAll(containers); } //// Finished containers @Override public synchronized List<ContainerStatus> getCompletedContainersStatuses() { initLocalFinishedContainerList(); return this.completedContainersStatuses; } @Override public synchronized void setCompletedContainersStatuses( final List<ContainerStatus> containers) { if (containers == null) return; initLocalFinishedContainerList(); completedContainersStatuses.clear(); completedContainersStatuses.addAll(containers); } @Override public synchronized void setNMTokens(List<NMToken> nmTokens) { if (nmTokens == null || nmTokens.isEmpty()) { if (this.nmTokens != null) { this.nmTokens.clear(); } builder.clearNmTokens(); return; } // Implementing it as an append rather than set for consistency initLocalNewNMTokenList(); this.nmTokens.addAll(nmTokens); } @Override public synchronized List<NMToken> getNMTokens() { initLocalNewNMTokenList(); return nmTokens; } @Override public synchronized int getNumClusterNodes() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; return p.getNumClusterNodes(); } @Override public synchronized void setNumClusterNodes(int numNodes) { maybeInitBuilder(); builder.setNumClusterNodes(numNodes); } @Override public synchronized PreemptionMessage getPreemptionMessage() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; if (this.preempt != null) { return this.preempt; } if (!p.hasPreempt()) { return null; } this.preempt = convertFromProtoFormat(p.getPreempt()); return this.preempt; } @Override public synchronized void setPreemptionMessage(PreemptionMessage preempt) { maybeInitBuilder(); if (null == preempt) { builder.clearPreempt(); } this.preempt = preempt; } @Override public synchronized Token getAMRMToken() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; if (amrmToken != null) { return amrmToken; } if (!p.hasAmRmToken()) { return null; } this.amrmToken = convertFromProtoFormat(p.getAmRmToken()); return amrmToken; } @Override public synchronized void setAMRMToken(Token amRMToken) { maybeInitBuilder(); if (amRMToken == null) { builder.clearAmRmToken(); } this.amrmToken = amRMToken; } @Override public synchronized String getCollectorAddr() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; return p.getCollectorAddr(); } @Override public synchronized void setCollectorAddr(String collectorAddr) { maybeInitBuilder(); if (collectorAddr == null) { builder.clearCollectorAddr(); return; } builder.setCollectorAddr(collectorAddr); } @Override public synchronized Priority getApplicationPriority() { AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; if (this.appPriority != null) { return this.appPriority; } if (!p.hasApplicationPriority()) { return null; } this.appPriority = convertFromProtoFormat(p.getApplicationPriority()); return this.appPriority; } @Override public synchronized void setApplicationPriority(Priority priority) { maybeInitBuilder(); if (priority == null) builder.clearApplicationPriority(); this.appPriority = priority; } private synchronized void initLocalUpdatedContainerList() { if (this.updatedContainers != null) { return; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; List<YarnServiceProtos.UpdatedContainerProto> list = p.getUpdatedContainersList(); updatedContainers = new ArrayList<>(); for (YarnServiceProtos.UpdatedContainerProto c : list) { updatedContainers.add(convertFromProtoFormat(c)); } } // Once this is called. updatedNodes will never be null - until a getProto is // called. private synchronized void initLocalNewNodeReportList() { if (this.updatedNodes != null) { return; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; List<NodeReportProto> list = p.getUpdatedNodesList(); updatedNodes = new ArrayList<NodeReport>(list.size()); for (NodeReportProto n : list) { updatedNodes.add(convertFromProtoFormat(n)); } } // Once this is called. containerList will never be null - until a getProto // is called. private synchronized void initLocalNewContainerList() { if (this.allocatedContainers != null) { return; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; List<ContainerProto> list = p.getAllocatedContainersList(); allocatedContainers = new ArrayList<Container>(); for (ContainerProto c : list) { allocatedContainers.add(convertFromProtoFormat(c)); } } private synchronized void initLocalNewNMTokenList() { if (nmTokens != null) { return; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; List<NMTokenProto> list = p.getNmTokensList(); nmTokens = new ArrayList<NMToken>(); for (NMTokenProto t : list) { nmTokens.add(convertFromProtoFormat(t)); } } private synchronized void initLocalUpdateErrorsList() { if (updateErrors != null) { return; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; List<YarnServiceProtos.UpdateContainerErrorProto> list = p.getUpdateErrorsList(); this.updateErrors = new ArrayList<UpdateContainerError>(); for (YarnServiceProtos.UpdateContainerErrorProto t : list) { updateErrors.add(ProtoUtils.convertFromProtoFormat(t)); } } private synchronized Iterable<YarnServiceProtos.UpdateContainerErrorProto> getUpdateErrorsIterable( final List<UpdateContainerError> updateErrorsList) { maybeInitBuilder(); return new Iterable<YarnServiceProtos.UpdateContainerErrorProto>() { @Override public synchronized Iterator<YarnServiceProtos .UpdateContainerErrorProto> iterator() { return new Iterator<YarnServiceProtos.UpdateContainerErrorProto>() { private Iterator<UpdateContainerError> iter = updateErrorsList.iterator(); @Override public synchronized boolean hasNext() { return iter.hasNext(); } @Override public synchronized YarnServiceProtos.UpdateContainerErrorProto next() { return ProtoUtils.convertToProtoFormat(iter.next()); } @Override public synchronized void remove() { throw new UnsupportedOperationException(); } }; } }; } private synchronized Iterable<ContainerProto> getContainerProtoIterable( final List<Container> newContainersList) { maybeInitBuilder(); return new Iterable<ContainerProto>() { @Override public synchronized Iterator<ContainerProto> iterator() { return new Iterator<ContainerProto>() { Iterator<Container> iter = newContainersList.iterator(); @Override public synchronized boolean hasNext() { return iter.hasNext(); } @Override public synchronized ContainerProto next() { return convertToProtoFormat(iter.next()); } @Override public synchronized void remove() { throw new UnsupportedOperationException(); } }; } }; } private synchronized Iterable<YarnServiceProtos.UpdatedContainerProto> getUpdatedContainerProtoIterable( final List<UpdatedContainer> newUpdatedContainersList) { maybeInitBuilder(); return new Iterable<YarnServiceProtos.UpdatedContainerProto>() { @Override public synchronized Iterator<YarnServiceProtos.UpdatedContainerProto> iterator() { return new Iterator<YarnServiceProtos.UpdatedContainerProto>() { private Iterator<UpdatedContainer> iter = newUpdatedContainersList.iterator(); @Override public synchronized boolean hasNext() { return iter.hasNext(); } @Override public synchronized YarnServiceProtos.UpdatedContainerProto next() { return convertToProtoFormat(iter.next()); } @Override public synchronized void remove() { throw new UnsupportedOperationException(); } }; } }; } private synchronized Iterable<NMTokenProto> getTokenProtoIterable( final List<NMToken> nmTokenList) { maybeInitBuilder(); return new Iterable<NMTokenProto>() { @Override public synchronized Iterator<NMTokenProto> iterator() { return new Iterator<NMTokenProto>() { Iterator<NMToken> iter = nmTokenList.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public NMTokenProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } private synchronized Iterable<ContainerStatusProto> getContainerStatusProtoIterable( final List<ContainerStatus> newContainersList) { maybeInitBuilder(); return new Iterable<ContainerStatusProto>() { @Override public synchronized Iterator<ContainerStatusProto> iterator() { return new Iterator<ContainerStatusProto>() { Iterator<ContainerStatus> iter = newContainersList.iterator(); @Override public synchronized boolean hasNext() { return iter.hasNext(); } @Override public synchronized ContainerStatusProto next() { return convertToProtoFormat(iter.next()); } @Override public synchronized void remove() { throw new UnsupportedOperationException(); } }; } }; } private synchronized Iterable<NodeReportProto> getNodeReportProtoIterable( final List<NodeReport> newNodeReportsList) { maybeInitBuilder(); return new Iterable<NodeReportProto>() { @Override public synchronized Iterator<NodeReportProto> iterator() { return new Iterator<NodeReportProto>() { Iterator<NodeReport> iter = newNodeReportsList.iterator(); @Override public synchronized boolean hasNext() { return iter.hasNext(); } @Override public synchronized NodeReportProto next() { return convertToProtoFormat(iter.next()); } @Override public synchronized void remove() { throw new UnsupportedOperationException(); } }; } }; } // Once this is called. containerList will never be null - until a getProto // is called. private synchronized void initLocalFinishedContainerList() { if (this.completedContainersStatuses != null) { return; } AllocateResponseProtoOrBuilder p = viaProto ? proto : builder; List<ContainerStatusProto> list = p.getCompletedContainerStatusesList(); completedContainersStatuses = new ArrayList<ContainerStatus>(); for (ContainerStatusProto c : list) { completedContainersStatuses.add(convertFromProtoFormat(c)); } } private synchronized NodeReportPBImpl convertFromProtoFormat( NodeReportProto p) { return new NodeReportPBImpl(p); } private synchronized NodeReportProto convertToProtoFormat(NodeReport t) { return ((NodeReportPBImpl)t).getProto(); } private synchronized ContainerPBImpl convertFromProtoFormat( ContainerProto p) { return new ContainerPBImpl(p); } private synchronized ContainerProto convertToProtoFormat( Container t) { return ((ContainerPBImpl)t).getProto(); } private synchronized UpdatedContainerPBImpl convertFromProtoFormat( YarnServiceProtos.UpdatedContainerProto p) { return new UpdatedContainerPBImpl(p); } private synchronized YarnServiceProtos.UpdatedContainerProto convertToProtoFormat(UpdatedContainer t) { return ((UpdatedContainerPBImpl)t).getProto(); } private synchronized ContainerStatusPBImpl convertFromProtoFormat( ContainerStatusProto p) { return new ContainerStatusPBImpl(p); } private synchronized ContainerStatusProto convertToProtoFormat( ContainerStatus t) { return ((ContainerStatusPBImpl)t).getProto(); } private synchronized ResourcePBImpl convertFromProtoFormat(ResourceProto p) { return new ResourcePBImpl(p); } private synchronized ResourceProto convertToProtoFormat(Resource r) { return ProtoUtils.convertToProtoFormat(r); } private synchronized PreemptionMessagePBImpl convertFromProtoFormat(PreemptionMessageProto p) { return new PreemptionMessagePBImpl(p); } private synchronized PreemptionMessageProto convertToProtoFormat(PreemptionMessage r) { return ((PreemptionMessagePBImpl)r).getProto(); } private synchronized NMTokenProto convertToProtoFormat(NMToken token) { return ((NMTokenPBImpl)token).getProto(); } private synchronized NMToken convertFromProtoFormat(NMTokenProto proto) { return new NMTokenPBImpl(proto); } private TokenPBImpl convertFromProtoFormat(TokenProto p) { return new TokenPBImpl(p); } private TokenProto convertToProtoFormat(Token t) { return ((TokenPBImpl)t).getProto(); } private PriorityPBImpl convertFromProtoFormat(PriorityProto p) { return new PriorityPBImpl(p); } private PriorityProto convertToProtoFormat(Priority t) { return ((PriorityPBImpl)t).getProto(); } }
apache-2.0
drlebedev/nd4j
nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/shape/loop/one/LoopFunction1.java
712
package org.nd4j.linalg.api.shape.loop.one; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.shape.loop.four.*; import org.nd4j.linalg.api.shape.loop.four.RawArrayIterationInformation4; /** * Used for raw iteration in loops * * @author Adam Gibson */ public interface LoopFunction1 { /** * Perform an operation * given 2 buffers * @param a the first buffer * @param aOffset the first buffer offset * @param b the second buffer * @param bOffset the second buffer offset */ void perform(int i, RawArrayIterationInformation4 info, DataBuffer a, int aOffset, DataBuffer b, int bOffset, DataBuffer c, int cOffset, DataBuffer d, int dOffset); }
apache-2.0
pjain1/druid
server/src/main/java/org/apache/druid/metadata/SqlSegmentsMetadataManager.java
47644
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; import com.google.errorprone.annotations.concurrent.GuardedBy; import com.google.inject.Inject; import org.apache.druid.client.DataSourcesSnapshot; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.MapUtils; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.Partitions; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Interval; import org.skife.jdbi.v2.BaseResultSetMapper; import org.skife.jdbi.v2.Batch; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.Query; import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.TransactionCallback; import org.skife.jdbi.v2.TransactionStatus; import org.skife.jdbi.v2.tweak.ResultSetMapper; import javax.annotation.Nullable; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import java.util.stream.StreamSupport; /** * */ @ManageLifecycle public class SqlSegmentsMetadataManager implements SegmentsMetadataManager { private static final EmittingLogger log = new EmittingLogger(SqlSegmentsMetadataManager.class); /** * Marker interface for objects stored in {@link #latestDatabasePoll}. See the comment for that field for details. */ private interface DatabasePoll {} /** Represents periodic {@link #poll}s happening from {@link #exec}. */ @VisibleForTesting static class PeriodicDatabasePoll implements DatabasePoll { /** * This future allows to wait until {@link #dataSourcesSnapshot} is initialized in the first {@link #poll()} * happening since {@link #startPollingDatabasePeriodically()} is called for the first time, or since the last * visible (in happens-before terms) call to {@link #startPollingDatabasePeriodically()} in case of Coordinator's * leadership changes. */ final CompletableFuture<Void> firstPollCompletionFuture = new CompletableFuture<>(); long lastPollStartTimestampInMs = -1; } /** * Represents on-demand {@link #poll} initiated at periods of time when SqlSegmentsMetadataManager doesn't poll the database * periodically. */ @VisibleForTesting static class OnDemandDatabasePoll implements DatabasePoll { final long initiationTimeNanos = System.nanoTime(); final CompletableFuture<Void> pollCompletionFuture = new CompletableFuture<>(); long nanosElapsedFromInitiation() { return System.nanoTime() - initiationTimeNanos; } } /** * Use to synchronize {@link #startPollingDatabasePeriodically}, {@link #stopPollingDatabasePeriodically}, {@link * #poll}, and {@link #isPollingDatabasePeriodically}. These methods should be synchronized to prevent from being * called at the same time if two different threads are calling them. This might be possible if Coordinator gets and * drops leadership repeatedly in quick succession. * * This lock is also used to synchronize {@link #useLatestIfWithinDelayOrPerformNewDatabasePoll} for times when SqlSegmentsMetadataManager * is not polling the database periodically (in other words, when the Coordinator is not the leader). */ private final ReentrantReadWriteLock startStopPollLock = new ReentrantReadWriteLock(); /** * Used to ensure that {@link #poll()} is never run concurrently. It should already be so (at least in production * code), where {@link #poll()} is called only from the task created in {@link #createPollTaskForStartOrder} and is * scheduled in a single-threaded {@link #exec}, so this lock is an additional safety net in case there are bugs in * the code, and for tests, where {@link #poll()} is called from the outside code. * * Not using {@link #startStopPollLock}.writeLock() in order to still be able to run {@link #poll()} concurrently * with {@link #isPollingDatabasePeriodically()}. */ private final Object pollLock = new Object(); private final ObjectMapper jsonMapper; private final Duration periodicPollDelay; private final Supplier<MetadataStorageTablesConfig> dbTables; private final SQLMetadataConnector connector; /** * This field is made volatile to avoid "ghost secondary reads" that may result in NPE, see * https://github.com/code-review-checklists/java-concurrency#safe-local-dcl (note that dataSourcesSnapshot resembles * a lazily initialized field). Alternative is to always read the field in a snapshot local variable, but it's too * easy to forget to do. * * This field may be updated from {@link #exec}, or from whatever thread calling {@link #doOnDemandPoll} via {@link * #useLatestIfWithinDelayOrPerformNewDatabasePoll()} via one of the public methods of SqlSegmentsMetadataManager. */ private volatile @MonotonicNonNull DataSourcesSnapshot dataSourcesSnapshot = null; /** * The latest {@link DatabasePoll} represent {@link #poll()} calls which update {@link #dataSourcesSnapshot}, either * periodically (see {@link PeriodicDatabasePoll}, {@link #startPollingDatabasePeriodically}, {@link * #stopPollingDatabasePeriodically}) or "on demand" (see {@link OnDemandDatabasePoll}), when one of the methods that * accesses {@link #dataSourcesSnapshot}'s state (such as {@link #getImmutableDataSourceWithUsedSegments}) is * called when the Coordinator is not the leader and therefore SqlSegmentsMetadataManager isn't polling the database * periodically. * * Note that if there is a happens-before relationship between a call to {@link #startPollingDatabasePeriodically()} * (on Coordinators' leadership change) and one of the methods accessing the {@link #dataSourcesSnapshot}'s state in * this class the latter is guaranteed to await for the initiated periodic poll. This is because when the latter * method calls to {@link #useLatestSnapshotIfWithinDelay()} via {@link #useLatestIfWithinDelayOrPerformNewDatabasePoll}, they will * see the latest {@link PeriodicDatabasePoll} value (stored in this field, latestDatabasePoll, in {@link * #startPollingDatabasePeriodically()}) and to await on its {@link PeriodicDatabasePoll#firstPollCompletionFuture}. * * However, the guarantee explained above doesn't make any actual semantic difference, because on both periodic and * on-demand database polls the same invariant is maintained that the results not older than {@link * #periodicPollDelay} are used. The main difference is in performance: since on-demand polls are irregular and happen * in the context of the thread wanting to access the {@link #dataSourcesSnapshot}, that may cause delays in the * logic. On the other hand, periodic polls are decoupled into {@link #exec} and {@link * #dataSourcesSnapshot}-accessing methods should be generally "wait free" for database polls. * * The notion and the complexity of "on demand" database polls was introduced to simplify the interface of {@link * SegmentsMetadataManager} and guarantee that it always returns consistent and relatively up-to-date data from methods * like {@link #getImmutableDataSourceWithUsedSegments}, while avoiding excessive repetitive polls. The last part * is achieved via "hooking on" other polls by awaiting on {@link PeriodicDatabasePoll#firstPollCompletionFuture} or * {@link OnDemandDatabasePoll#pollCompletionFuture}, see {@link #useLatestIfWithinDelayOrPerformNewDatabasePoll} method * implementation for details. * * Note: the overall implementation of periodic/on-demand polls is not completely optimal: for example, when the * Coordinator just stopped leading, the latest periodic {@link #poll} (which is still "fresh") is not considered * and a new on-demand poll is always initiated. This is done to simplify the implementation, while the efficiency * during Coordinator leadership switches is not a priority. * * This field is {@code volatile} because it's checked and updated in a double-checked locking manner in {@link * #useLatestIfWithinDelayOrPerformNewDatabasePoll()}. */ private volatile @Nullable DatabasePoll latestDatabasePoll = null; /** Used to cancel periodic poll task in {@link #stopPollingDatabasePeriodically}. */ @GuardedBy("startStopPollLock") private @Nullable Future<?> periodicPollTaskFuture = null; /** The number of times {@link #startPollingDatabasePeriodically} was called. */ @GuardedBy("startStopPollLock") private long startPollingCount = 0; /** * Equal to the current {@link #startPollingCount} value if the SqlSegmentsMetadataManager is currently started; -1 if * currently stopped. * * This field is used to implement a simple stamp mechanism instead of just a boolean "started" flag to prevent * the theoretical situation of two or more tasks scheduled in {@link #startPollingDatabasePeriodically()} calling * {@link #isPollingDatabasePeriodically()} and {@link #poll()} concurrently, if the sequence of {@link * #startPollingDatabasePeriodically()} - {@link #stopPollingDatabasePeriodically()} - {@link * #startPollingDatabasePeriodically()} actions occurs quickly. * * {@link SQLMetadataRuleManager} also has a similar issue. */ @GuardedBy("startStopPollLock") private long currentStartPollingOrder = -1; @GuardedBy("startStopPollLock") private @Nullable ScheduledExecutorService exec = null; @Inject public SqlSegmentsMetadataManager( ObjectMapper jsonMapper, Supplier<SegmentsMetadataManagerConfig> config, Supplier<MetadataStorageTablesConfig> dbTables, SQLMetadataConnector connector ) { this.jsonMapper = jsonMapper; this.periodicPollDelay = config.get().getPollDuration().toStandardDuration(); this.dbTables = dbTables; this.connector = connector; } /** * Don't confuse this method with {@link #startPollingDatabasePeriodically}. This is a lifecycle starting method to * be executed just once for an instance of SqlSegmentsMetadataManager. */ @LifecycleStart public void start() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (exec != null) { return; // Already started } exec = Execs.scheduledSingleThreaded(getClass().getName() + "-Exec--%d"); } finally { lock.unlock(); } } /** * Don't confuse this method with {@link #stopPollingDatabasePeriodically}. This is a lifecycle stopping method to * be executed just once for an instance of SqlSegmentsMetadataManager. */ @LifecycleStop public void stop() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { exec.shutdownNow(); exec = null; } finally { lock.unlock(); } } @Override public void startPollingDatabasePeriodically() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (exec == null) { throw new IllegalStateException(getClass().getName() + " is not started"); } if (isPollingDatabasePeriodically()) { return; } PeriodicDatabasePoll periodicDatabasePoll = new PeriodicDatabasePoll(); latestDatabasePoll = periodicDatabasePoll; startPollingCount++; currentStartPollingOrder = startPollingCount; final long localStartOrder = currentStartPollingOrder; periodicPollTaskFuture = exec.scheduleWithFixedDelay( createPollTaskForStartOrder(localStartOrder, periodicDatabasePoll), 0, periodicPollDelay.getMillis(), TimeUnit.MILLISECONDS ); } finally { lock.unlock(); } } private Runnable createPollTaskForStartOrder(long startOrder, PeriodicDatabasePoll periodicDatabasePoll) { return () -> { // If latest poll was an OnDemandDatabasePoll that started less than periodicPollDelay, // We will wait for (periodicPollDelay - currentTime - LatestOnDemandDatabasePollStartTime) then check again. try { long periodicPollDelayNanos = TimeUnit.MILLISECONDS.toNanos(periodicPollDelay.getMillis()); while (latestDatabasePoll != null && latestDatabasePoll instanceof OnDemandDatabasePoll && ((OnDemandDatabasePoll) latestDatabasePoll).nanosElapsedFromInitiation() < periodicPollDelayNanos) { long sleepNano = periodicPollDelayNanos - ((OnDemandDatabasePoll) latestDatabasePoll).nanosElapsedFromInitiation(); TimeUnit.NANOSECONDS.sleep(sleepNano); } } catch (Exception e) { log.debug(e, "Exception found while waiting for next periodic poll"); } // poll() is synchronized together with startPollingDatabasePeriodically(), stopPollingDatabasePeriodically() and // isPollingDatabasePeriodically() to ensure that when stopPollingDatabasePeriodically() exits, poll() won't // actually run anymore after that (it could only enter the synchronized section and exit immediately because the // localStartedOrder doesn't match the new currentStartPollingOrder). It's needed to avoid flakiness in // SqlSegmentsMetadataManagerTest. See https://github.com/apache/druid/issues/6028 ReentrantReadWriteLock.ReadLock lock = startStopPollLock.readLock(); lock.lock(); try { if (startOrder == currentStartPollingOrder) { periodicDatabasePoll.lastPollStartTimestampInMs = System.currentTimeMillis(); poll(); periodicDatabasePoll.firstPollCompletionFuture.complete(null); latestDatabasePoll = periodicDatabasePoll; } else { log.debug("startOrder = currentStartPollingOrder = %d, skipping poll()", startOrder); } } catch (Throwable t) { log.makeAlert(t, "Uncaught exception in %s's polling thread", SqlSegmentsMetadataManager.class).emit(); // Swallow the exception, so that scheduled polling goes on. Leave firstPollFutureSinceLastStart uncompleted // for now, so that it may be completed during the next poll. if (!(t instanceof Exception)) { // Don't try to swallow a Throwable which is not an Exception (that is, a Error). periodicDatabasePoll.firstPollCompletionFuture.completeExceptionally(t); throw t; } } finally { lock.unlock(); } }; } @Override public boolean isPollingDatabasePeriodically() { // isPollingDatabasePeriodically() is synchronized together with startPollingDatabasePeriodically(), // stopPollingDatabasePeriodically() and poll() to ensure that the latest currentStartPollingOrder is always // visible. readLock should be used to avoid unexpected performance degradation of DruidCoordinator. ReentrantReadWriteLock.ReadLock lock = startStopPollLock.readLock(); lock.lock(); try { return currentStartPollingOrder >= 0; } finally { lock.unlock(); } } @Override public void stopPollingDatabasePeriodically() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (!isPollingDatabasePeriodically()) { return; } periodicPollTaskFuture.cancel(false); latestDatabasePoll = null; // NOT nulling dataSourcesSnapshot, allowing to query the latest polled data even when this SegmentsMetadataManager // object is stopped. currentStartPollingOrder = -1; } finally { lock.unlock(); } } private void useLatestIfWithinDelayOrPerformNewDatabasePoll() { // Double-checked locking with useLatestSnapshotIfWithinDelay() call playing the role of the "check". if (useLatestSnapshotIfWithinDelay()) { return; } ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (useLatestSnapshotIfWithinDelay()) { return; } OnDemandDatabasePoll onDemandDatabasePoll = new OnDemandDatabasePoll(); this.latestDatabasePoll = onDemandDatabasePoll; doOnDemandPoll(onDemandDatabasePoll); } finally { lock.unlock(); } } /** * This method returns true without waiting for database poll if the latest {@link DatabasePoll} is a * {@link PeriodicDatabasePoll} that has completed it's first poll, or an {@link OnDemandDatabasePoll} that is * made not longer than {@link #periodicPollDelay} from current time. * This method does wait untill completion for if the latest {@link DatabasePoll} is a * {@link PeriodicDatabasePoll} that has not completed it's first poll, or an {@link OnDemandDatabasePoll} that is * already in the process of polling the database. * This means that any method using this check can read from snapshot that is * up to {@link SqlSegmentsMetadataManager#periodicPollDelay} old. */ @VisibleForTesting boolean useLatestSnapshotIfWithinDelay() { DatabasePoll latestDatabasePoll = this.latestDatabasePoll; if (latestDatabasePoll instanceof PeriodicDatabasePoll) { Futures.getUnchecked(((PeriodicDatabasePoll) latestDatabasePoll).firstPollCompletionFuture); return true; } if (latestDatabasePoll instanceof OnDemandDatabasePoll) { long periodicPollDelayNanos = TimeUnit.MILLISECONDS.toNanos(periodicPollDelay.getMillis()); OnDemandDatabasePoll latestOnDemandPoll = (OnDemandDatabasePoll) latestDatabasePoll; boolean latestDatabasePollIsFresh = latestOnDemandPoll.nanosElapsedFromInitiation() < periodicPollDelayNanos; if (latestDatabasePollIsFresh) { Futures.getUnchecked(latestOnDemandPoll.pollCompletionFuture); return true; } // Latest on-demand poll is not fresh. Fall through to return false from this method. } else { assert latestDatabasePoll == null; // No periodic database polls and no on-demand poll have been done yet, nothing to await for. } return false; } /** * This method will always force a database poll if there is no ongoing database poll. This method will then * waits for the new poll or the ongoing poll to completes before returning. * This means that any method using this check can be sure that the latest poll for the snapshot was completed after * this method was called. */ @VisibleForTesting void forceOrWaitOngoingDatabasePoll() { long checkStartTime = System.currentTimeMillis(); ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { DatabasePoll latestDatabasePoll = this.latestDatabasePoll; try { //Verify if there was a periodic poll completed while we were waiting for the lock if (latestDatabasePoll instanceof PeriodicDatabasePoll && ((PeriodicDatabasePoll) latestDatabasePoll).lastPollStartTimestampInMs > checkStartTime) { return; } // Verify if there was a on-demand poll completed while we were waiting for the lock if (latestDatabasePoll instanceof OnDemandDatabasePoll) { long checkStartTimeNanos = TimeUnit.MILLISECONDS.toNanos(checkStartTime); OnDemandDatabasePoll latestOnDemandPoll = (OnDemandDatabasePoll) latestDatabasePoll; if (latestOnDemandPoll.initiationTimeNanos > checkStartTimeNanos) { return; } } } catch (Exception e) { // Latest poll was unsuccessful, try to do a new poll log.debug(e, "Latest poll was unsuccessful. Starting a new poll..."); } // Force a database poll OnDemandDatabasePoll onDemandDatabasePoll = new OnDemandDatabasePoll(); this.latestDatabasePoll = onDemandDatabasePoll; doOnDemandPoll(onDemandDatabasePoll); } finally { lock.unlock(); } } private void doOnDemandPoll(OnDemandDatabasePoll onDemandPoll) { try { poll(); onDemandPoll.pollCompletionFuture.complete(null); } catch (Throwable t) { onDemandPoll.pollCompletionFuture.completeExceptionally(t); throw t; } } @Override public boolean markSegmentAsUsed(final String segmentId) { try { int numUpdatedDatabaseEntries = connector.getDBI().withHandle( (Handle handle) -> handle .createStatement(StringUtils.format("UPDATE %s SET used=true WHERE id = :id", getSegmentsTable())) .bind("id", segmentId) .execute() ); // Unlike bulk markAsUsed methods: markAsUsedAllNonOvershadowedSegmentsInDataSource(), // markAsUsedNonOvershadowedSegmentsInInterval(), and markAsUsedNonOvershadowedSegments() we don't put the marked // segment into the respective data source, because we don't have it fetched from the database. It's probably not // worth complicating the implementation and making two database queries just to add the segment because it will // be anyway fetched during the next poll(). Segment putting that is done in the bulk markAsUsed methods is a nice // to have thing, but doesn't formally affects the external guarantees of SegmentsMetadataManager class. return numUpdatedDatabaseEntries > 0; } catch (RuntimeException e) { log.error(e, "Exception marking segment %s as used", segmentId); throw e; } } @Override public int markAsUsedAllNonOvershadowedSegmentsInDataSource(final String dataSource) { return doMarkAsUsedNonOvershadowedSegments(dataSource, null); } @Override public int markAsUsedNonOvershadowedSegmentsInInterval(final String dataSource, final Interval interval) { Preconditions.checkNotNull(interval); return doMarkAsUsedNonOvershadowedSegments(dataSource, interval); } /** * Implementation for both {@link #markAsUsedAllNonOvershadowedSegmentsInDataSource} (if the given interval is null) * and {@link #markAsUsedNonOvershadowedSegmentsInInterval}. */ private int doMarkAsUsedNonOvershadowedSegments(String dataSourceName, @Nullable Interval interval) { List<DataSegment> usedSegmentsOverlappingInterval = new ArrayList<>(); List<DataSegment> unusedSegmentsInInterval = new ArrayList<>(); connector.inReadOnlyTransaction( (handle, status) -> { String queryString = StringUtils.format("SELECT used, payload FROM %1$s WHERE dataSource = :dataSource", getSegmentsTable()); if (interval != null) { queryString += StringUtils.format(" AND start < :end AND %1$send%1$s > :start", connector.getQuoteString()); } Query<?> query = handle .createQuery(queryString) .setFetchSize(connector.getStreamingFetchSize()) .bind("dataSource", dataSourceName); if (interval != null) { query = query .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()); } query = query .map((int index, ResultSet resultSet, StatementContext context) -> { DataSegment segment = JacksonUtils.readValue(jsonMapper, resultSet.getBytes("payload"), DataSegment.class); if (resultSet.getBoolean("used")) { usedSegmentsOverlappingInterval.add(segment); } else { if (interval == null || interval.contains(segment.getInterval())) { unusedSegmentsInInterval.add(segment); } } //noinspection ReturnOfNull: intentional, consume() call below doesn't use the results. return null; }); // Consume the query results to ensure usedSegmentsOverlappingInterval and unusedSegmentsInInterval are // populated. consume(query.iterator()); return null; } ); VersionedIntervalTimeline<String, DataSegment> versionedIntervalTimeline = VersionedIntervalTimeline.forSegments( Iterators.concat(usedSegmentsOverlappingInterval.iterator(), unusedSegmentsInInterval.iterator()) ); return markNonOvershadowedSegmentsAsUsed(unusedSegmentsInInterval, versionedIntervalTimeline); } private static void consume(Iterator<?> iterator) { while (iterator.hasNext()) { iterator.next(); } } private int markNonOvershadowedSegmentsAsUsed( List<DataSegment> unusedSegments, VersionedIntervalTimeline<String, DataSegment> timeline ) { List<String> segmentIdsToMarkAsUsed = new ArrayList<>(); for (DataSegment segment : unusedSegments) { if (timeline.isOvershadowed(segment.getInterval(), segment.getVersion(), segment)) { continue; } segmentIdsToMarkAsUsed.add(segment.getId().toString()); } return markSegmentsAsUsed(segmentIdsToMarkAsUsed); } @Override public int markAsUsedNonOvershadowedSegments(final String dataSource, final Set<String> segmentIds) throws UnknownSegmentIdsException { try { Pair<List<DataSegment>, VersionedIntervalTimeline<String, DataSegment>> unusedSegmentsAndTimeline = connector .inReadOnlyTransaction( (handle, status) -> { List<DataSegment> unusedSegments = retrieveUnusedSegments(dataSource, segmentIds, handle); List<Interval> unusedSegmentsIntervals = JodaUtils.condenseIntervals( unusedSegments.stream().map(DataSegment::getInterval).collect(Collectors.toList()) ); Iterator<DataSegment> usedSegmentsOverlappingUnusedSegmentsIntervals = retrieveUsedSegmentsOverlappingIntervals(dataSource, unusedSegmentsIntervals, handle); VersionedIntervalTimeline<String, DataSegment> timeline = VersionedIntervalTimeline.forSegments( Iterators.concat(usedSegmentsOverlappingUnusedSegmentsIntervals, unusedSegments.iterator()) ); return new Pair<>(unusedSegments, timeline); } ); List<DataSegment> unusedSegments = unusedSegmentsAndTimeline.lhs; VersionedIntervalTimeline<String, DataSegment> timeline = unusedSegmentsAndTimeline.rhs; return markNonOvershadowedSegmentsAsUsed(unusedSegments, timeline); } catch (Exception e) { Throwable rootCause = Throwables.getRootCause(e); if (rootCause instanceof UnknownSegmentIdsException) { throw (UnknownSegmentIdsException) rootCause; } else { throw e; } } } private List<DataSegment> retrieveUnusedSegments( final String dataSource, final Set<String> segmentIds, final Handle handle ) throws UnknownSegmentIdsException { List<String> unknownSegmentIds = new ArrayList<>(); List<DataSegment> segments = segmentIds .stream() .map( segmentId -> { Iterator<DataSegment> segmentResultIterator = handle .createQuery( StringUtils.format( "SELECT used, payload FROM %1$s WHERE dataSource = :dataSource AND id = :id", getSegmentsTable() ) ) .bind("dataSource", dataSource) .bind("id", segmentId) .map((int index, ResultSet resultSet, StatementContext context) -> { try { if (!resultSet.getBoolean("used")) { return jsonMapper.readValue(resultSet.getBytes("payload"), DataSegment.class); } else { // We emit nulls for used segments. They are filtered out below in this method. return null; } } catch (IOException e) { throw new RuntimeException(e); } }) .iterator(); if (!segmentResultIterator.hasNext()) { unknownSegmentIds.add(segmentId); return null; } else { @Nullable DataSegment segment = segmentResultIterator.next(); if (segmentResultIterator.hasNext()) { log.error( "There is more than one row corresponding to segment id [%s] in data source [%s] in the database", segmentId, dataSource ); } return segment; } } ) .filter(Objects::nonNull) // Filter nulls corresponding to used segments. .collect(Collectors.toList()); if (!unknownSegmentIds.isEmpty()) { throw new UnknownSegmentIdsException(unknownSegmentIds); } return segments; } private Iterator<DataSegment> retrieveUsedSegmentsOverlappingIntervals( final String dataSource, final Collection<Interval> intervals, final Handle handle ) { return intervals .stream() .flatMap(interval -> { Iterable<DataSegment> segmentResultIterable = () -> handle .createQuery( StringUtils.format( "SELECT payload FROM %1$s " + "WHERE dataSource = :dataSource AND start < :end AND %2$send%2$s > :start AND used = true", getSegmentsTable(), connector.getQuoteString() ) ) .setFetchSize(connector.getStreamingFetchSize()) .bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .map((int index, ResultSet resultSet, StatementContext context) -> JacksonUtils.readValue(jsonMapper, resultSet.getBytes("payload"), DataSegment.class) ) .iterator(); return StreamSupport.stream(segmentResultIterable.spliterator(), false); }) .iterator(); } private int markSegmentsAsUsed(final List<String> segmentIds) { if (segmentIds.isEmpty()) { log.info("No segments found to update!"); return 0; } return connector.getDBI().withHandle(handle -> { Batch batch = handle.createBatch(); segmentIds.forEach(segmentId -> batch.add( StringUtils.format("UPDATE %s SET used=true WHERE id = '%s'", getSegmentsTable(), segmentId) )); int[] segmentChanges = batch.execute(); return computeNumChangedSegments(segmentIds, segmentChanges); }); } @Override public int markAsUnusedAllSegmentsInDataSource(final String dataSource) { try { final int numUpdatedDatabaseEntries = connector.getDBI().withHandle( (Handle handle) -> handle .createStatement( StringUtils.format("UPDATE %s SET used=false WHERE dataSource = :dataSource", getSegmentsTable()) ) .bind("dataSource", dataSource) .execute() ); return numUpdatedDatabaseEntries; } catch (RuntimeException e) { log.error(e, "Exception marking all segments as unused in data source [%s]", dataSource); throw e; } } /** * This method does not update {@link #dataSourcesSnapshot}, see the comments in {@link #doPoll()} about * snapshot update. The update of the segment's state will be reflected after the next {@link DatabasePoll}. */ @Override public boolean markSegmentAsUnused(final String segmentId) { try { return markSegmentAsUnusedInDatabase(segmentId); } catch (RuntimeException e) { log.error(e, "Exception marking segment [%s] as unused", segmentId); throw e; } } @Override public int markSegmentsAsUnused(String dataSourceName, Set<String> segmentIds) { if (segmentIds.isEmpty()) { return 0; } final List<String> segmentIdList = new ArrayList<>(segmentIds); try { return connector.getDBI().withHandle(handle -> { Batch batch = handle.createBatch(); segmentIdList.forEach(segmentId -> batch.add( StringUtils.format( "UPDATE %s SET used=false WHERE datasource = '%s' AND id = '%s'", getSegmentsTable(), dataSourceName, segmentId ) )); final int[] segmentChanges = batch.execute(); return computeNumChangedSegments(segmentIdList, segmentChanges); }); } catch (Exception e) { throw new RuntimeException(e); } } @Override public int markAsUnusedSegmentsInInterval(String dataSourceName, Interval interval) { try { Integer numUpdatedDatabaseEntries = connector.getDBI().withHandle( handle -> handle .createStatement( StringUtils .format( "UPDATE %s SET used=false WHERE datasource = :datasource " + "AND start >= :start AND %2$send%2$s <= :end", getSegmentsTable(), connector.getQuoteString() )) .bind("datasource", dataSourceName) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .execute() ); return numUpdatedDatabaseEntries; } catch (Exception e) { throw new RuntimeException(e); } } private boolean markSegmentAsUnusedInDatabase(String segmentId) { final int numUpdatedRows = connector.getDBI().withHandle( handle -> handle .createStatement(StringUtils.format("UPDATE %s SET used=false WHERE id = :segmentID", getSegmentsTable())) .bind("segmentID", segmentId) .execute() ); if (numUpdatedRows < 0) { log.assertionError( "Negative number of rows updated for segment id [%s]: %d", segmentId, numUpdatedRows ); } else if (numUpdatedRows > 1) { log.error( "More than one row updated for segment id [%s]: %d, " + "there may be more than one row for the segment id in the database", segmentId, numUpdatedRows ); } return numUpdatedRows > 0; } private static int computeNumChangedSegments(List<String> segmentIds, int[] segmentChanges) { int numChangedSegments = 0; for (int i = 0; i < segmentChanges.length; i++) { int numUpdatedRows = segmentChanges[i]; if (numUpdatedRows < 0) { log.assertionError( "Negative number of rows updated for segment id [%s]: %d", segmentIds.get(i), numUpdatedRows ); } else if (numUpdatedRows > 1) { log.error( "More than one row updated for segment id [%s]: %d, " + "there may be more than one row for the segment id in the database", segmentIds.get(i), numUpdatedRows ); } if (numUpdatedRows > 0) { numChangedSegments += 1; } } return numChangedSegments; } @Override public @Nullable ImmutableDruidDataSource getImmutableDataSourceWithUsedSegments(String dataSourceName) { return getSnapshotOfDataSourcesWithAllUsedSegments().getDataSource(dataSourceName); } @Override public Collection<ImmutableDruidDataSource> getImmutableDataSourcesWithAllUsedSegments() { return getSnapshotOfDataSourcesWithAllUsedSegments().getDataSourcesWithAllUsedSegments(); } @Override public Set<SegmentId> getOvershadowedSegments() { return getSnapshotOfDataSourcesWithAllUsedSegments().getOvershadowedSegments(); } @Override public DataSourcesSnapshot getSnapshotOfDataSourcesWithAllUsedSegments() { useLatestIfWithinDelayOrPerformNewDatabasePoll(); return dataSourcesSnapshot; } @VisibleForTesting DataSourcesSnapshot getDataSourcesSnapshot() { return dataSourcesSnapshot; } @VisibleForTesting DatabasePoll getLatestDatabasePoll() { return latestDatabasePoll; } @Override public Iterable<DataSegment> iterateAllUsedSegments() { useLatestIfWithinDelayOrPerformNewDatabasePoll(); return dataSourcesSnapshot.iterateAllUsedSegmentsInSnapshot(); } @Override public Optional<Iterable<DataSegment>> iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(String datasource, Interval interval, boolean requiresLatest) { if (requiresLatest) { forceOrWaitOngoingDatabasePoll(); } else { useLatestIfWithinDelayOrPerformNewDatabasePoll(); } VersionedIntervalTimeline<String, DataSegment> usedSegmentsTimeline = dataSourcesSnapshot.getUsedSegmentsTimelinesPerDataSource().get(datasource); return Optional.fromNullable(usedSegmentsTimeline) .transform(timeline -> timeline.findNonOvershadowedObjectsInInterval(interval, Partitions.ONLY_COMPLETE)); } @Override public Set<String> retrieveAllDataSourceNames() { return connector.getDBI().withHandle( handle -> handle .createQuery(StringUtils.format("SELECT DISTINCT(datasource) FROM %s", getSegmentsTable())) .fold( new HashSet<>(), (Set<String> druidDataSources, Map<String, Object> stringObjectMap, FoldController foldController, StatementContext statementContext) -> { druidDataSources.add(MapUtils.getString(stringObjectMap, "datasource")); return druidDataSources; } ) ); } @Override public void poll() { // See the comment to the pollLock field, explaining this synchronized block synchronized (pollLock) { doPoll(); } } /** This method is extracted from {@link #poll()} solely to reduce code nesting. */ @GuardedBy("pollLock") private void doPoll() { log.debug("Starting polling of segment table"); // some databases such as PostgreSQL require auto-commit turned off // to stream results back, enabling transactions disables auto-commit // // setting connection to read-only will allow some database such as MySQL // to automatically use read-only transaction mode, further optimizing the query final List<DataSegment> segments = connector.inReadOnlyTransaction( new TransactionCallback<List<DataSegment>>() { @Override public List<DataSegment> inTransaction(Handle handle, TransactionStatus status) { return handle .createQuery(StringUtils.format("SELECT payload FROM %s WHERE used=true", getSegmentsTable())) .setFetchSize(connector.getStreamingFetchSize()) .map( new ResultSetMapper<DataSegment>() { @Override public DataSegment map(int index, ResultSet r, StatementContext ctx) throws SQLException { try { DataSegment segment = jsonMapper.readValue(r.getBytes("payload"), DataSegment.class); return replaceWithExistingSegmentIfPresent(segment); } catch (IOException e) { log.makeAlert(e, "Failed to read segment from db.").emit(); // If one entry in database is corrupted doPoll() should continue to work overall. See // filter by `Objects::nonNull` below in this method. return null; } } } ) .list(); } } ); Preconditions.checkNotNull( segments, "Unexpected 'null' when polling segments from the db, aborting snapshot update." ); // dataSourcesSnapshot is updated only here and the DataSourcesSnapshot object is immutable. If data sources or // segments are marked as used or unused directly (via markAs...() methods in SegmentsMetadataManager), the // dataSourcesSnapshot can become invalid until the next database poll. // DataSourcesSnapshot computes the overshadowed segments, which makes it an expensive operation if the // snapshot was invalidated on each segment mark as unused or used, especially if a user issues a lot of single // segment mark calls in rapid succession. So the snapshot update is not done outside of database poll at this time. // Updates outside of database polls were primarily for the user experience, so users would immediately see the // effect of a segment mark call reflected in MetadataResource API calls. ImmutableMap<String, String> dataSourceProperties = createDefaultDataSourceProperties(); if (segments.isEmpty()) { log.info("No segments found in the database!"); } else { log.info("Polled and found %,d segments in the database", segments.size()); } dataSourcesSnapshot = DataSourcesSnapshot.fromUsedSegments( Iterables.filter(segments, Objects::nonNull), // Filter corrupted entries (see above in this method). dataSourceProperties ); } private static ImmutableMap<String, String> createDefaultDataSourceProperties() { return ImmutableMap.of("created", DateTimes.nowUtc().toString()); } /** * For the garbage collector in Java, it's better to keep new objects short-living, but once they are old enough * (i. e. promoted to old generation), try to keep them alive. In {@link #poll()}, we fetch and deserialize all * existing segments each time, and then replace them in {@link #dataSourcesSnapshot}. This method allows to use * already existing (old) segments when possible, effectively interning them a-la {@link String#intern} or {@link * com.google.common.collect.Interner}, aiming to make the majority of {@link DataSegment} objects garbage soon after * they are deserialized and to die in young generation. It allows to avoid fragmentation of the old generation and * full GCs. */ private DataSegment replaceWithExistingSegmentIfPresent(DataSegment segment) { @MonotonicNonNull DataSourcesSnapshot dataSourcesSnapshot = this.dataSourcesSnapshot; if (dataSourcesSnapshot == null) { return segment; } @Nullable ImmutableDruidDataSource dataSource = dataSourcesSnapshot.getDataSource(segment.getDataSource()); if (dataSource == null) { return segment; } DataSegment alreadyExistingSegment = dataSource.getSegment(segment.getId()); return alreadyExistingSegment != null ? alreadyExistingSegment : segment; } private String getSegmentsTable() { return dbTables.get().getSegmentsTable(); } @Override public List<Interval> getUnusedSegmentIntervals(final String dataSource, final DateTime maxEndTime, final int limit) { return connector.inReadOnlyTransaction( new TransactionCallback<List<Interval>>() { @Override public List<Interval> inTransaction(Handle handle, TransactionStatus status) { Iterator<Interval> iter = handle .createQuery( StringUtils.format( "SELECT start, %2$send%2$s FROM %1$s WHERE dataSource = :dataSource AND " + "%2$send%2$s <= :end AND used = false ORDER BY start, %2$send%2$s", getSegmentsTable(), connector.getQuoteString() ) ) .setFetchSize(connector.getStreamingFetchSize()) .setMaxRows(limit) .bind("dataSource", dataSource) .bind("end", maxEndTime.toString()) .map( new BaseResultSetMapper<Interval>() { @Override protected Interval mapInternal(int index, Map<String, Object> row) { return new Interval( DateTimes.of((String) row.get("start")), DateTimes.of((String) row.get("end")) ); } } ) .iterator(); List<Interval> result = Lists.newArrayListWithCapacity(limit); for (int i = 0; i < limit && iter.hasNext(); i++) { try { result.add(iter.next()); } catch (Exception e) { throw new RuntimeException(e); } } return result; } } ); } }
apache-2.0
alien4cloud/alien4cloud
alien4cloud-common/src/main/java/alien4cloud/exception/ReferencedResourceException.java
450
package alien4cloud.exception; import alien4cloud.model.common.Usage; import lombok.Getter; /** * Exception to be thrown when an operation cannot be done because a resource is used (referenced). */ @Getter public class ReferencedResourceException extends TechnicalException { private final Usage[] usages; public ReferencedResourceException(String message, Usage[] usages) { super(message); this.usages = usages; } }
apache-2.0
yanjunh/elasticsearch
plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java
7374
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script.javascript; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); final Object compiled = se.compile(null, "x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[between(3, 12)]; final CountDownLatch latch = new CountDownLatch(threads.length); final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread(new Runnable() { @Override public void run() { try { barrier.await(); long x = Randomness.get().nextInt(); long y = Randomness.get().nextInt(); long addition = x + y; Map<String, Object> vars = new HashMap<String, Object>(); vars.put("x", x); vars.put("y", y); ExecutableScript script = se.executable(new CompiledScript(ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), vars); for (int i = 0; i < between(100, 1000); i++) { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } } catch (Exception e) { failed.set(true); logger.error("failed", e); } finally { latch.countDown(); } } }); } for (int i = 0; i < threads.length; i++) { threads[i].start(); } barrier.await(); latch.await(); assertThat(failed.get(), equalTo(false)); } public void testExecutableWithRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); final Object compiled = se.compile(null, "x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[between(3, 12)]; final CountDownLatch latch = new CountDownLatch(threads.length); final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread(new Runnable() { @Override public void run() { try { barrier.await(); long x = Randomness.get().nextInt(); Map<String, Object> vars = new HashMap<String, Object>(); vars.put("x", x); ExecutableScript script = se.executable(new CompiledScript(ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), vars); for (int i = 0; i < between(100, 1000); i++) { long y = Randomness.get().nextInt(); long addition = x + y; script.setNextVar("y", y); long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } } catch (Exception e) { failed.set(true); logger.error("failed", e); } finally { latch.countDown(); } } }); } for (int i = 0; i < threads.length; i++) { threads[i].start(); } barrier.await(); latch.await(); assertThat(failed.get(), equalTo(false)); } public void testExecute() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); final Object compiled = se.compile(null, "x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[between(3, 12)]; final CountDownLatch latch = new CountDownLatch(threads.length); final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread(new Runnable() { @Override public void run() { try { barrier.await(); Map<String, Object> runtimeVars = new HashMap<String, Object>(); for (int i = 0; i < between(100, 1000); i++) { long x = Randomness.get().nextInt(); long y = Randomness.get().nextInt(); long addition = x + y; runtimeVars.put("x", x); runtimeVars.put("y", y); long result = ((Number) se.executable(new CompiledScript(ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), runtimeVars).run()).longValue(); assertThat(result, equalTo(addition)); } } catch (Exception e) { failed.set(true); logger.error("failed", e); } finally { latch.countDown(); } } }); } for (int i = 0; i < threads.length; i++) { threads[i].start(); } barrier.await(); latch.await(); assertThat(failed.get(), equalTo(false)); } }
apache-2.0
Rajith90/carbon-apimgt
components/apimgt/org.wso2.carbon.apimgt.rest.api.store.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/store/v1/impl/ApisApiServiceImpl.java
56430
/* * Copyright (c) 2019 WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.rest.api.store.v1.impl; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.cxf.jaxrs.ext.MessageContext; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.wso2.carbon.apimgt.api.APIConsumer; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.ExceptionCodes; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIRating; import org.wso2.carbon.apimgt.api.model.ApiTypeWrapper; import org.wso2.carbon.apimgt.api.model.Comment; import org.wso2.carbon.apimgt.api.model.Documentation; import org.wso2.carbon.apimgt.api.model.DocumentationContent; import org.wso2.carbon.apimgt.api.model.Identifier; import org.wso2.carbon.apimgt.api.model.ResourceFile; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.graphql.queryanalysis.GraphqlComplexityInfo; import org.wso2.carbon.apimgt.api.model.graphql.queryanalysis.GraphqlSchemaType; import org.wso2.carbon.apimgt.api.model.webhooks.Topic; import org.wso2.carbon.apimgt.impl.APIClientGenerationException; import org.wso2.carbon.apimgt.impl.APIClientGenerationManager; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.definitions.GraphQLSchemaDefinition; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil; import org.wso2.carbon.apimgt.rest.api.store.v1.ApisApiService; import java.io.File; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.HashMap; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.*; import org.wso2.carbon.apimgt.rest.api.store.v1.mappings.APIMappingUtil; import org.wso2.carbon.apimgt.rest.api.store.v1.mappings.CommentMappingUtil; import org.wso2.carbon.apimgt.rest.api.store.v1.mappings.DocumentationMappingUtil; import org.wso2.carbon.apimgt.rest.api.store.v1.mappings.GraphqlQueryAnalysisMappingUtil; import org.wso2.carbon.apimgt.rest.api.common.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.store.v1.mappings.AsyncAPIMappingUtil; import org.wso2.carbon.apimgt.rest.api.util.utils.RestAPIStoreUtils; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil; import org.wso2.carbon.user.api.UserStoreException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; public class ApisApiServiceImpl implements ApisApiService { private static final Log log = LogFactory.getLog(ApisApiServiceImpl.class); @Override public Response apisGet(Integer limit, Integer offset, String xWSO2Tenant, String query, String ifNoneMatch, MessageContext messageContext) { limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT; offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT; query = query == null ? "" : query; String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); APIListDTO apiListDTO = new APIListDTO(); try { String username = RestApiCommonUtil.getLoggedInUsername(); APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } //revert content search back to normal search by name to avoid doc result complexity and to comply with REST api practices if (query.startsWith(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + ":")) { query = query .replace(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + ":", APIConstants.NAME_TYPE_PREFIX + ":"); } Map allMatchedApisMap = apiConsumer.searchPaginatedAPIs(query, requestedTenantDomain, offset, limit); Set<Object> sortedSet = (Set<Object>) allMatchedApisMap.get("apis"); // This is a SortedSet ArrayList<Object> allMatchedApis = new ArrayList<>(sortedSet); apiListDTO = APIMappingUtil.fromAPIListToDTO(allMatchedApis); //Add pagination section in the response Object totalLength = allMatchedApisMap.get("length"); Integer totalAvailableAPis = 0; if (totalLength != null) { totalAvailableAPis = (Integer) totalLength; } APIMappingUtil .setPaginationParams(apiListDTO, query, offset, limit, totalAvailableAPis); return Response.ok().entity(apiListDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.rootCauseMessageMatches(e, "start index seems to be greater than the limit count")) { //this is not an error of the user as he does not know the total number of apis available. Thus sends // an empty response apiListDTO.setCount(0); apiListDTO.setPagination(new PaginationDTO()); return Response.ok().entity(apiListDTO).build(); } else { String errorMessage = "Error while retrieving APIs"; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdGet(String apiId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) { return Response.ok().entity(getAPIByAPIId(apiId, xWSO2Tenant)).build(); } /** * Get complexity details of a given API * * @param apiId apiId * @param messageContext message context * @return Response with complexity details of the GraphQL API */ @Override public Response apisApiIdGraphqlPoliciesComplexityGet(String apiId, MessageContext messageContext) { try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain(); APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain); API api = apiConsumer.getLightweightAPIByUUID(apiId, tenantDomain); if (APIConstants.GRAPHQL_API.equals(api.getType())) { GraphqlComplexityInfo graphqlComplexityInfo = apiConsumer.getComplexityDetails(apiIdentifier); GraphQLQueryComplexityInfoDTO graphQLQueryComplexityInfoDTO = GraphqlQueryAnalysisMappingUtil.fromGraphqlComplexityInfotoDTO(graphqlComplexityInfo); return Response.ok().entity(graphQLQueryComplexityInfoDTO).build(); } else { throw new APIManagementException(ExceptionCodes.API_NOT_GRAPHQL); } } catch (APIManagementException e) { //Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need // to expose the existence of the resource if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (isAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure( "Authorization failure while retrieving complexity details of API : " + apiId, e, log); } else { String msg = "Error while retrieving complexity details of API " + apiId; RestApiUtil.handleInternalServerError(msg, e, log); } } return null; } @Override public Response apisApiIdGraphqlPoliciesComplexityTypesGet(String apiId, MessageContext messageContext) throws APIManagementException { GraphQLSchemaDefinition graphql = new GraphQLSchemaDefinition(); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain(); APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain); API api = apiConsumer.getLightweightAPIByUUID(apiId, tenantDomain); if (APIConstants.GRAPHQL_API.equals(api.getType())) { String schemaContent = apiConsumer.getGraphqlSchema(apiIdentifier); List<GraphqlSchemaType> typeList = graphql.extractGraphQLTypeList(schemaContent); GraphQLSchemaTypeListDTO graphQLSchemaTypeListDTO = GraphqlQueryAnalysisMappingUtil.fromGraphqlSchemaTypeListtoDTO(typeList); return Response.ok().entity(graphQLSchemaTypeListDTO).build(); } else { throw new APIManagementException(ExceptionCodes.API_NOT_GRAPHQL); } } catch (APIManagementException e) { //Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need // to expose the existence of the resource if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (isAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure( "Authorization failure while retrieving types and fields of API : " + apiId, e, log); } else { String msg = "Error while retrieving types and fields of the schema of API " + apiId; RestApiUtil.handleInternalServerError(msg, e, log); } } return null; } @Override public Response apisApiIdGraphqlSchemaGet(String apiId, String ifNoneMatch, String xWSO2Tenant, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, requestedTenantDomain); String graphQLSchema = apiConsumer.getGraphqlSchemaDefinition(apiId, requestedTenantDomain); return Response.ok().entity(graphQLSchema).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while retrieving API : " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } return null; } @Override public Response addCommentToAPI(String apiId, CommentDTO body, MessageContext messageContext) { String username = RestApiCommonUtil.getLoggedInUsername(); String requestedTenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain(); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(apiId, requestedTenantDomain); Identifier identifier; if (apiTypeWrapper.isAPIProduct()) { identifier = apiTypeWrapper.getApiProduct().getId(); } else { identifier = apiTypeWrapper.getApi().getId(); } Comment comment = CommentMappingUtil.fromDTOToComment(body, username, apiId); String createdCommentId = apiConsumer.addComment(identifier, comment, username); Comment createdComment = apiConsumer.getComment(identifier, createdCommentId); CommentDTO commentDTO = CommentMappingUtil.fromCommentToDTO(createdComment); String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId + RestApiConstants.RESOURCE_PATH_COMMENTS + "/" + createdCommentId; URI uri = new URI(uriString); return Response.created(uri).entity(commentDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { RestApiUtil.handleInternalServerError("Failed to add comment to the API " + apiId, e, log); } } catch (URISyntaxException e) { String errorMessage = "Error while retrieving comment content location for API " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response getAllCommentsOfAPI(String apiId, String xWSO2Tenant, Integer limit, Integer offset, Boolean includeCommenterInfo, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(apiId, requestedTenantDomain); Comment[] comments = apiConsumer.getComments(apiTypeWrapper); CommentListDTO commentDTO = CommentMappingUtil.fromCommentListToDTO(comments, limit, offset, includeCommenterInfo); String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId + RestApiConstants.RESOURCE_PATH_COMMENTS; URI uri = new URI(uriString); return Response.created(uri).entity(commentDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { RestApiUtil.handleInternalServerError("Failed to get comments of API " + apiId, e, log); } } catch (URISyntaxException e) { String errorMessage = "Error while retrieving comments content location for API " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response getCommentOfAPI(String commentId, String apiId, String xWSO2Tenant, String ifNoneMatch, Boolean includeCommenterInfo, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(apiId, requestedTenantDomain); Identifier identifier; if (apiTypeWrapper.isAPIProduct()) { identifier = apiTypeWrapper.getApiProduct().getId(); } else { identifier = apiTypeWrapper.getApi().getId(); } Comment comment = apiConsumer.getComment(identifier, commentId); if (comment != null) { CommentDTO commentDTO; if (includeCommenterInfo) { Map<String, Map<String, String>> userClaimsMap = CommentMappingUtil .retrieveUserClaims(comment.getUser(), new HashMap<>()); commentDTO = CommentMappingUtil.fromCommentToDTOWithUserInfo(comment, userClaimsMap); } else { commentDTO = CommentMappingUtil.fromCommentToDTO(comment); } String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId + RestApiConstants.RESOURCE_PATH_COMMENTS + "/" + commentId; URI uri = new URI(uriString); return Response.created(uri).entity(commentDTO).build(); } else { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_COMMENTS, String.valueOf(commentId), log); } } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while retrieving comment for API : " + apiId + "with comment ID " + commentId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (URISyntaxException e) { String errorMessage = "Error while retrieving comment content location : " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response deleteComment(String commentId, String apiId, String ifMatch, MessageContext messageContext) throws APIManagementException { String requestedTenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain(); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, requestedTenantDomain); apiConsumer.deleteComment(apiIdentifier, commentId); return Response.ok("The comment has been deleted").build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while deleting comment " + commentId + "for API " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } return null; } @Override public Response apisApiIdDocumentsDocumentIdContentGet(String apiId, String documentId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } DocumentationContent docContent = apiConsumer.getDocumentationContent(apiId, documentId, requestedTenantDomain); if (docContent == null) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log); return null; } // gets the content depending on the type of the document if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.FILE)) { String contentType = docContent.getResourceFile().getContentType(); contentType = contentType == null ? RestApiConstants.APPLICATION_OCTET_STREAM : contentType; String name = docContent.getResourceFile().getName(); return Response.ok(docContent.getResourceFile().getContent()) .header(RestApiConstants.HEADER_CONTENT_TYPE, contentType) .header(RestApiConstants.HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + name + "\"") .build(); } else if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.INLINE) || docContent.getSourceType().equals(DocumentationContent.ContentSourceType.MARKDOWN)) { String content = docContent.getTextContent(); return Response.ok(content) .header(RestApiConstants.HEADER_CONTENT_TYPE, APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE) .build(); } else if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.URL)) { String sourceUrl = docContent.getTextContent(); return Response.seeOther(new URI(sourceUrl)).build(); } } catch (APIManagementException e) { if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while retrieving document " + documentId + " of the API " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (URISyntaxException e) { String errorMessage = "Error while retrieving source URI location of " + documentId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdDocumentsDocumentIdGet(String apiId, String documentId, String xWSO2Tenant, String ifModifiedSince, MessageContext messageContext) { Documentation documentation; String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { String username = RestApiCommonUtil.getLoggedInUsername(); APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } if (!RestAPIStoreUtils.isUserAccessAllowedForAPIByUUID(apiId, requestedTenantDomain)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, log); } documentation = apiConsumer.getDocumentation(apiId, documentId, requestedTenantDomain); if (null != documentation) { DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation); return Response.ok().entity(documentDTO).build(); } else { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log); } } catch (APIManagementException e) { if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { RestApiUtil.handleInternalServerError("Error while getting API " + apiId, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdDocumentsGet(String apiId, Integer limit, Integer offset, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) { //pre-processing //setting default limit and offset values if they are not set limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT; offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT; String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { String username = RestApiCommonUtil.getLoggedInUsername(); APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } //this will fail if user doesn't have access to the API or the API does not exist //APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, requestedTenantDomain); //List<Documentation> documentationList = apiConsumer.getAllDocumentation(apiIdentifier, username); List<Documentation> documentationList = apiConsumer.getAllDocumentation(apiId, requestedTenantDomain); DocumentListDTO documentListDTO = DocumentationMappingUtil .fromDocumentationListToDTO(documentationList, offset, limit); //todo : set total count properly DocumentationMappingUtil .setPaginationParams(documentListDTO, apiId, offset, limit, documentationList.size()); return Response.ok().entity(documentListDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { RestApiUtil.handleInternalServerError("Error while getting API " + apiId, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } /*catch (UnsupportedEncodingException e) { String errorMessage = "Error while Decoding apiId" + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); }*/ return null; } @Override public Response apisApiIdRatingsGet(String id, Integer limit, Integer offset, String xWSO2Tenant, MessageContext messageContext) { //pre-processing //setting default limit and offset values if they are not set limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT; offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT; String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { String username = RestApiCommonUtil.getLoggedInUsername(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(id, requestedTenantDomain); Identifier identifier; if (apiTypeWrapper.isAPIProduct()) { identifier = apiTypeWrapper.getApiProduct().getId(); } else { identifier = apiTypeWrapper.getApi().getId(); } float avgRating = apiConsumer.getAverageAPIRating(identifier); int userRating = 0; if (!APIConstants.WSO2_ANONYMOUS_USER.equals(username)) { userRating = apiConsumer.getUserRating(identifier, username); } List<RatingDTO> ratingDTOList = new ArrayList<>(); JSONArray array = apiConsumer.getAPIRatings(identifier); for (int i = 0; i < array.size(); i++) { JSONObject obj = (JSONObject) array.get(i); RatingDTO ratingDTO = APIMappingUtil.fromJsonToRatingDTO(obj); ratingDTO.setApiId(id); ratingDTOList.add(ratingDTO); } RatingListDTO ratingListDTO = APIMappingUtil.fromRatingListToDTO(ratingDTOList, offset, limit); ratingListDTO.setUserRating(userRating); ratingListDTO.setAvgRating(String.valueOf(avgRating)); APIMappingUtil.setRatingPaginationParams(ratingListDTO, id, offset, limit, ratingDTOList.size()); return Response.ok().entity(ratingListDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, id, e, log); } else { RestApiUtil.handleInternalServerError("Error while retrieving ratings for API " + id, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } /** * Rest api implementation to downloading the client sdk for given api in given sdk language. * * @param apiId : The id of the api. * @param language : Preferred sdk language. * @param messageContext : messageContext * @return : The sdk as a zip archive. */ @Override public Response apisApiIdSdksLanguageGet(String apiId, String language, String xWSO2Tenant, MessageContext messageContext) { if (StringUtils.isEmpty(apiId) || StringUtils.isEmpty(language)) { String message = "Error generating the SDK. API id or language should not be empty"; RestApiUtil.handleBadRequest(message, log); } String tenant = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); APIDTO api = getAPIByAPIId(apiId, tenant); APIClientGenerationManager apiClientGenerationManager = new APIClientGenerationManager(); Map<String, String> sdkArtifacts; if (api != null) { String apiProvider = api.getProvider(); try { sdkArtifacts = apiClientGenerationManager.generateSDK(language, api.getName(), api.getVersion(), apiProvider, RestApiCommonUtil.getLoggedInUsername()); //Create the sdk response. File sdkFile = new File(sdkArtifacts.get("zipFilePath")); return Response.ok(sdkFile, MediaType.APPLICATION_OCTET_STREAM_TYPE).header("Content-Disposition", "attachment; filename=\"" + sdkArtifacts.get("zipFileName") + "\"" ).build(); } catch (APIClientGenerationException e) { String message = "Error generating client sdk for api: " + api.getName() + " for language: " + language; RestApiUtil.handleInternalServerError(message, e, log); } } String message = "Could not find an API for ID " + apiId; RestApiUtil.handleResourceNotFoundError(message, log); return null; } /** * Retrieves the swagger document of an API * * @param apiId API identifier * @param labelName name of the gateway label * @param environmentName name of the gateway environment * @param clusterName name of the container managed cluster * @param ifNoneMatch If-None-Match header value * @param xWSO2Tenant requested tenant domain for cross tenant invocations * @param messageContext CXF message context * @return Swagger document of the API for the given label or gateway environment */ @Override public Response apisApiIdSwaggerGet(String apiId, String labelName, String environmentName, String clusterName, String ifNoneMatch, String xWSO2Tenant, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); if (StringUtils.isNotEmpty(labelName) ? StringUtils.isNotEmpty(environmentName) || StringUtils.isNotEmpty(clusterName) : StringUtils.isNotEmpty(environmentName) && StringUtils.isNotEmpty(clusterName)) { RestApiUtil.handleBadRequest( "Only one of 'labelName', 'environmentName' or 'clusterName' can be provided", log ); } API api = apiConsumer.getLightweightAPIByUUID(apiId, requestedTenantDomain); if (api.getUuid() == null) { api.setUuid(apiId); } if (api.getSwaggerDefinition() != null) { api.setSwaggerDefinition(APIUtil.removeXMediationScriptsFromSwagger(api.getSwaggerDefinition())); } else { api.setSwaggerDefinition(apiConsumer.getOpenAPIDefinition(apiId, requestedTenantDomain)); } // gets the first available environment if any of label, environment or cluster name is not provided if (StringUtils.isEmpty(labelName) && StringUtils.isEmpty(environmentName) && StringUtils.isEmpty(clusterName)) { Map<String, Environment> existingEnvironments = APIUtil.getEnvironments(); // find a valid environment name from API // gateway environment may be invalid due to inconsistent state of the API // example: publish an API and later rename gateway environment from configurations // then the old gateway environment name becomes invalid for (String environmentNameOfApi : api.getEnvironments()) { if (existingEnvironments.get(environmentNameOfApi) != null) { environmentName = environmentNameOfApi; break; } } // if all environment of API are invalid or there are no environments (i.e. empty) if (StringUtils.isEmpty(environmentName)) { // if there are no environments in the API, take a random environment from the existing ones. // This is to make sure the swagger doesn't have invalid endpoints if (!existingEnvironments.keySet().isEmpty()) { environmentName = existingEnvironments.keySet().iterator().next(); } } } if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } String apiSwagger = null; if (StringUtils.isNotEmpty(environmentName)) { try { apiSwagger = apiConsumer.getOpenAPIDefinitionForEnvironment(api, environmentName); } catch (APIManagementException e) { // handle gateway not found exception otherwise pass it if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError( "Gateway environment '" + environmentName + "' not found", e, log); return null; } throw e; } } else if (StringUtils.isNotEmpty(labelName)) { apiSwagger = apiConsumer.getOpenAPIDefinitionForLabel(api, labelName); } else if (StringUtils.isNotEmpty(clusterName)) { apiSwagger = apiConsumer.getOpenAPIDefinitionForClusterName(api, clusterName); } else { apiSwagger = api.getSwaggerDefinition(); } return Response.ok().entity(apiSwagger).header("Content-Disposition", "attachment; filename=\"" + "swagger.json" + "\"" ).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while retrieving swagger of API : " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdThumbnailGet(String apiId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } //this will fail if user does not have access to the API or the API does not exist apiConsumer.getLightweightAPIByUUID(apiId, requestedTenantDomain); ResourceFile thumbnailResource = apiConsumer.getIcon(apiId, requestedTenantDomain); if (thumbnailResource != null) { return Response .ok(thumbnailResource.getContent(), MediaType.valueOf(thumbnailResource.getContentType())) .build(); } else { return Response.noContent().build(); } } catch (APIManagementException e) { //Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the // existence of the resource if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while retrieving thumbnail of API : " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdTopicsGet(String apiId, String xWSO2Tenant, MessageContext messageContext) throws APIManagementException { if (org.apache.commons.lang.StringUtils.isNotEmpty(apiId)) { String username = RestApiCommonUtil.getLoggedInUsername(); String tenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); Set<Topic> topics; try { APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(apiId, tenantDomain); TopicListDTO topicListDTO; if (apiTypeWrapper.isAPIProduct()) { topics = apiConsumer.getTopics(apiTypeWrapper.getApiProduct().getUuid()); } else { topics = apiConsumer.getTopics(apiTypeWrapper.getApi().getUuid()); } topicListDTO = AsyncAPIMappingUtil.fromTopicListToDTO(topics); return Response.ok().entity(topicListDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { RestApiUtil.handleInternalServerError("Failed to get topics of Async API " + apiId, e, log); } } } else { RestApiUtil.handleBadRequest("API Id is missing in request", log); } return null; } @Override public Response apisApiIdUserRatingPut(String id, RatingDTO body, String xWSO2Tenant, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { int rating = 0; String username = RestApiCommonUtil.getLoggedInUsername(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); //this will fail if user doesn't have access to the API or the API does not exist ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(id, requestedTenantDomain); Identifier identifier; if (apiTypeWrapper.isAPIProduct()) { identifier = apiTypeWrapper.getApiProduct().getId(); } else { identifier = apiTypeWrapper.getApi().getId(); } if (body != null) { rating = body.getRating(); } switch (rating) { //Below case 0[Rate 0] - is to remove ratings from a user case 0: { apiConsumer.rateAPI(identifier, APIRating.RATING_ZERO, username); break; } case 1: { apiConsumer.rateAPI(identifier, APIRating.RATING_ONE, username); break; } case 2: { apiConsumer.rateAPI(identifier, APIRating.RATING_TWO, username); break; } case 3: { apiConsumer.rateAPI(identifier, APIRating.RATING_THREE, username); break; } case 4: { apiConsumer.rateAPI(identifier, APIRating.RATING_FOUR, username); break; } case 5: { apiConsumer.rateAPI(identifier, APIRating.RATING_FIVE, username); break; } default: { RestApiUtil.handleBadRequest("Provided API Rating is not in the range from 1 to 5", log); } } JSONObject obj = apiConsumer.getUserRatingInfo(identifier, username); RatingDTO ratingDTO = new RatingDTO(); if (obj != null && !obj.isEmpty()) { ratingDTO = APIMappingUtil.fromJsonToRatingDTO(obj); ratingDTO.setApiId(id); } return Response.ok().entity(ratingDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, id, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, id, e, log); } else { RestApiUtil .handleInternalServerError("Error while adding/updating user rating for API " + id, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdUserRatingGet(String id, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { String username = RestApiCommonUtil.getLoggedInUsername(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); //this will fail if user doesn't have access to the API or the API does not exist ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(id, requestedTenantDomain); Identifier identifier; if (apiTypeWrapper.isAPIProduct()) { identifier = apiTypeWrapper.getApiProduct().getId(); } else { identifier = apiTypeWrapper.getApi().getId(); } JSONObject obj = apiConsumer.getUserRatingInfo(identifier, username); RatingDTO ratingDTO = new RatingDTO(); if (obj != null && !obj.isEmpty()) { ratingDTO = APIMappingUtil.fromJsonToRatingDTO(obj); ratingDTO.setApiId(id); } return Response.ok().entity(ratingDTO).build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, id, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, id, e, log); } else { RestApiUtil.handleInternalServerError("Error while retrieving user rating for API " + id, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response apisApiIdUserRatingDelete(String apiId, String xWSO2Tenant, String ifMatch, MessageContext messageContext) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { String username = RestApiCommonUtil.getLoggedInUsername(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } APIConsumer apiConsumer = RestApiCommonUtil.getConsumer(username); //this will fail if user doesn't have access to the API or the API does not exist ApiTypeWrapper apiTypeWrapper = apiConsumer.getAPIorAPIProductByUUID(apiId, requestedTenantDomain); Identifier identifier; if (apiTypeWrapper.isAPIProduct()) { identifier = apiTypeWrapper.getApiProduct().getId(); } else { identifier = apiTypeWrapper.getApi().getId(); } apiConsumer.removeAPIRating(identifier, username); return Response.ok().build(); } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError( RestApiConstants.RESOURCE_RATING + " for " + RestApiConstants.RESOURCE_API, apiId, e, log); } else { RestApiUtil.handleInternalServerError("Error while deleting user rating for API " + apiId, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } @Override public Response getWSDLOfAPI(String apiId, String labelName, String environmentName, String ifNoneMatch, String xWSO2Tenant, MessageContext messageContext) throws APIManagementException { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); API api = apiConsumer.getLightweightAPIByUUID(apiId, requestedTenantDomain); APIIdentifier apiIdentifier = api.getId(); List<Environment> environments = APIUtil.getEnvironmentsOfAPI(api); if (environments != null && environments.size() > 0) { if (StringUtils.isEmpty(labelName) && StringUtils.isEmpty(environmentName)) { environmentName = api.getEnvironments().iterator().next(); } Environment selectedEnvironment = null; for (Environment environment: environments) { if (environment.getName().equals(environmentName)) { selectedEnvironment = environment; break; } } if (selectedEnvironment == null) { throw new APIManagementException(ExceptionCodes.from(ExceptionCodes.GATEWAY_ENVIRONMENT_NOT_FOUND, environmentName)); } ResourceFile wsdl = apiConsumer.getWSDL(api, selectedEnvironment.getName(), selectedEnvironment.getType(), requestedTenantDomain); return RestApiUtil.getResponseFromResourceFile(apiIdentifier.toString(), wsdl); } else { throw new APIManagementException(ExceptionCodes.from(ExceptionCodes.NO_GATEWAY_ENVIRONMENTS_ADDED, apiIdentifier.toString())); } } @Override public Response apisApiIdSubscriptionPoliciesGet(String apiId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) { APIDTO apiInfo = getAPIByAPIId(apiId, xWSO2Tenant); List<Tier> availableThrottlingPolicyList = new ThrottlingPoliciesApiServiceImpl() .getThrottlingPolicyList(ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString(), xWSO2Tenant); if (apiInfo != null ) { List<APITiersDTO> apiTiers = apiInfo.getTiers(); if (apiTiers != null && !apiTiers.isEmpty()) { List<Tier> apiThrottlingPolicies = new ArrayList<>(); for (Tier policy : availableThrottlingPolicyList) { for (APITiersDTO apiTier :apiTiers) { if (apiTier.getTierName().equalsIgnoreCase(policy.getName())) { apiThrottlingPolicies.add(policy); } } } return Response.ok().entity(apiThrottlingPolicies).build(); } } return null; } private APIDTO getAPIByAPIId(String apiId, String xWSO2Tenant) { String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant); try { APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); if (!APIUtil.isTenantAvailable(requestedTenantDomain)) { RestApiUtil.handleBadRequest("Provided tenant domain '" + xWSO2Tenant + "' is invalid", ExceptionCodes.INVALID_TENANT.getErrorCode(), log); } ApiTypeWrapper api = apiConsumer.getAPIorAPIProductByUUID(apiId, requestedTenantDomain); String status = api.getStatus(); // Extracting clicked API name by the user, for the recommendation system String userName = RestApiCommonUtil.getLoggedInUsername(); apiConsumer.publishClickedAPI(api, userName); if (APIConstants.PUBLISHED.equals(status) || APIConstants.PROTOTYPED.equals(status) || APIConstants.DEPRECATED.equals(status)) { return APIMappingUtil.fromAPItoDTO(api, requestedTenantDomain); } else { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, log); } } catch (APIManagementException e) { if (RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = "Error while retrieving API : " + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (UserStoreException e) { String errorMessage = "Error while checking availability of tenant " + requestedTenantDomain; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } /** * To check whether a particular exception is due to access control restriction. * * @param e Exception object. * @return true if the the exception is caused due to authorization failure. */ private boolean isAuthorizationFailure(Exception e) { String errorMessage = e.getMessage(); return errorMessage != null && errorMessage.contains(APIConstants.UN_AUTHORIZED_ERROR_MESSAGE); } }
apache-2.0
bocon13/buck
src/com/facebook/buck/intellij/plugin/src/com/facebook/buck/intellij/plugin/actions/BuckBuildAction.java
2289
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.intellij.plugin.actions; import com.facebook.buck.intellij.plugin.build.BuckBuildCommandHandler; import com.facebook.buck.intellij.plugin.build.BuckBuildManager; import com.facebook.buck.intellij.plugin.build.BuckCommand; import com.facebook.buck.intellij.plugin.config.BuckModule; import com.facebook.buck.intellij.plugin.ui.BuckEventsConsumer; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.AnActionEvent; /** * Run buck build command. */ public class BuckBuildAction extends BuckBaseAction { public static final String ACTION_TITLE = "Run buck build"; public static final String ACTION_DESCRIPTION = "Run buck build command"; public BuckBuildAction() { super(ACTION_TITLE, ACTION_DESCRIPTION, AllIcons.Actions.Download); } @Override public void actionPerformed(AnActionEvent e) { BuckBuildManager buildManager = BuckBuildManager.getInstance(e.getProject()); String target = buildManager.getCurrentSavedTarget(e.getProject()); if (target == null) { buildManager.showNoTargetMessage(e.getProject()); return; } // Initiate a buck build BuckEventsConsumer buckEventsConsumer = new BuckEventsConsumer(e.getProject()); BuckModule buckModule = e.getProject().getComponent(BuckModule.class); buckModule.attach(buckEventsConsumer, target); BuckBuildCommandHandler handler = new BuckBuildCommandHandler( e.getProject(), e.getProject().getBaseDir(), BuckCommand.BUILD, buckEventsConsumer); handler.command().addParameter(target); buildManager.runBuckCommandWhileConnectedToBuck(handler, ACTION_TITLE, buckModule); } }
apache-2.0
lovemomia/momia
course/api-course/src/main/java/cn/momia/api/course/expert/ExpertHear.java
811
package cn.momia.api.course.expert; /** * Created by hoze on 16/6/15. */ public class ExpertHear { private int id; private long questionId; private long userId; private int status; public int getId() { return id; } public void setId(int id) { this.id = id; } public long getQuestionId() { return questionId; } public void setQuestionId(long questionId) { this.questionId = questionId; } public long getUserId() { return userId; } public void setUserId(long userId) { this.userId = userId; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public boolean exists() { return id > 0; } }
apache-2.0
fanlehai/CodePractice
java/src/main/java/com/fanlehai/java/container/ForEachTest.java
2809
package com.fanlehai.java.container; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Random; class DataView<T> implements Iterable<T>{ List<T> list = new LinkedList<>(); @Override public Iterator<T> iterator() { // TODO Auto-generated method stub return new Iterator<T>() { @Override public boolean hasNext() { // TODO Auto-generated method stub return true; } @Override public T next() { // TODO Auto-generated method stub return list.get(new Random().nextInt(list.size())); } }; } void add(T data){ list.add(data); } } class DataViewCol<T> implements Collection<T>{ List<T> list = new LinkedList<>(); @Override public int size() { // TODO Auto-generated method stub return list.size(); } @Override public boolean isEmpty() { // TODO Auto-generated method stub return list.isEmpty(); } @Override public boolean contains(Object o) { // TODO Auto-generated method stub return false; } @Override public Iterator<T> iterator() { // TODO Auto-generated method stub return new Iterator<T>() { @Override public boolean hasNext() { // TODO Auto-generated method stub return true; } @Override public T next() { // TODO Auto-generated method stub return list.get(new Random().nextInt(list.size())); } }; } @Override public Object[] toArray() { // TODO Auto-generated method stub return null; } @Override public <T> T[] toArray(T[] a) { // TODO Auto-generated method stub return null; } @Override public boolean add(T e) { // TODO Auto-generated method stub list.add(e); return true; } @Override public boolean remove(Object o) { // TODO Auto-generated method stub return false; } @Override public boolean containsAll(Collection<?> c) { // TODO Auto-generated method stub return false; } @Override public boolean addAll(Collection<? extends T> c) { // TODO Auto-generated method stub return false; } @Override public boolean removeAll(Collection<?> c) { // TODO Auto-generated method stub return false; } @Override public boolean retainAll(Collection<?> c) { // TODO Auto-generated method stub return false; } @Override public void clear() { // TODO Auto-generated method stub } } public class ForEachTest { public static void main(String[] args) { DataView<String> dataView = new DataView<>(); dataView.add("1"); dataView.add("2"); dataView.add("3"); for(String string : dataView){ System.out.println(string); } DataViewCol<String> dataViewCol = new DataViewCol<>(); dataViewCol.add("a"); dataViewCol.add("b"); dataViewCol.add("c"); for(String string : dataViewCol){ System.out.println(string); } } }
apache-2.0
iominh/ThreadWeaver
main/src/main/java/com/google/testing/threadtester/TestThread.java
3779
/* * Copyright 2009 Weaver authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.testing.threadtester; /** * Implementation of Thread that catches and stores any Exception thrown by the * {@link Thread#run} method. * * @author alasdair.mackintosh@gmail.com (Alasdair Mackintosh) */ public class TestThread extends Thread { private volatile Throwable threadException; private class ExceptionHandler implements UncaughtExceptionHandler { @Override public void uncaughtException(Thread t, Throwable e) { threadException = e; } } private static class TestRunnableWrapper implements Runnable { private ThrowingRunnable target; public TestRunnableWrapper(ThrowingRunnable target) { this.target = target; } public void run() { try { target.run(); } catch (Exception e) { // This is a bit hacky, but because the TestRunnableWrapper is created // in the constructor, it has to be static, and cannot have a reference // to the owning TestThread. However, we control the creation of // TestRunnableWrapper, and we know that it must always be executed by a // TestThread, so the cast is safe. ((TestThread) Thread.currentThread()).threadException = e; } } } /** * Creates a new TestThread that executes the given Runnable. Any * RuntimeExceptions thrown when the runnable runs will be caught. */ public TestThread(Runnable target, String name) { super(target, name); setHandler(); } /** * Creates a new TestThread that executes the given TestRunnable. Any * Exceptions thrown when the runnable's {@link ThrowingRunnable#run} method * is invoked will be caught. */ public TestThread(ThrowingRunnable target, String name) { super(new TestRunnableWrapper(target), name); setHandler(); } /** * Creates a new TestThread with the given name. */ public TestThread(String name) { super(name); setHandler(); } private void setHandler() { setUncaughtExceptionHandler(new ExceptionHandler()); } /** * Waits for this thread to finish. Will not wait longer than {@link * Options#timeout}. Throws an IllegalStateException if this thread has not * terminated by the specified time. */ public void finish() throws InterruptedException, TestTimeoutException { join(Options.timeout()); if (getState() != State.TERMINATED) { throw new TestTimeoutException("State = " + getState(), this); } } /** * Gets the Throwable thrown by the {@link #run} method, or null if there * is no such exception. */ public Throwable getException() { return threadException; } /** * Rethrows the Throwable thrown by the {@link #run} method, wrapped in a * RuntimeException if necessary. If there was no exception thrown, does * nothing. */ public void throwExceptionsIfAny() { if (threadException != null) { if (threadException instanceof RuntimeException) { throw (RuntimeException) threadException; } else if (threadException instanceof Error) { throw (Error) threadException; } else { throw new RuntimeException(threadException); } } } }
apache-2.0
aesteve/nubes
src/main/java/com/github/aesteve/vertx/nubes/reflections/factories/impl/FileProcessorFactory.java
551
package com.github.aesteve.vertx.nubes.reflections.factories.impl; import com.github.aesteve.vertx.nubes.annotations.File; import com.github.aesteve.vertx.nubes.handlers.AnnotationProcessor; import com.github.aesteve.vertx.nubes.handlers.impl.FileProcessor; import com.github.aesteve.vertx.nubes.reflections.factories.AnnotationProcessorFactory; public class FileProcessorFactory implements AnnotationProcessorFactory<File> { @Override public AnnotationProcessor<File> create(File annotation) { return new FileProcessor(annotation); } }
apache-2.0
dke-knu/i2am
rdma-based-storm/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/ManualPartitionNamedSubscription.java
3207
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.kafka.spout; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.storm.task.TopologyContext; public class ManualPartitionNamedSubscription extends NamedSubscription { private static final long serialVersionUID = 5633018073527583826L; private final ManualPartitioner partitioner; private Set<TopicPartition> currentAssignment = null; private KafkaConsumer<?, ?> consumer = null; private ConsumerRebalanceListener listener = null; private TopologyContext context = null; public ManualPartitionNamedSubscription(ManualPartitioner parter, Collection<String> topics) { super(topics); this.partitioner = parter; } public ManualPartitionNamedSubscription(ManualPartitioner parter, String ... topics) { this(parter, Arrays.asList(topics)); } @Override public <K, V> void subscribe(KafkaConsumer<K, V> consumer, ConsumerRebalanceListener listener, TopologyContext context) { this.consumer = consumer; this.listener = listener; this.context = context; refreshAssignment(); } @Override public void refreshAssignment() { List<TopicPartition> allPartitions = new ArrayList<>(); for (String topic : topics) { for (PartitionInfo partitionInfo: consumer.partitionsFor(topic)) { allPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition())); } } Collections.sort(allPartitions, TopicPartitionComparator.INSTANCE); Set<TopicPartition> newAssignment = new HashSet<>(partitioner.partition(allPartitions, context)); if (!newAssignment.equals(currentAssignment)) { if (currentAssignment != null) { listener.onPartitionsRevoked(currentAssignment); listener.onPartitionsAssigned(newAssignment); } currentAssignment = newAssignment; consumer.assign(currentAssignment); } } }
apache-2.0
opennetworkinglab/onos
drivers/server/src/main/java/org/onosproject/drivers/server/ServerBasicSystemOperations.java
4063
/* * Copyright 2020-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.drivers.server; import org.onosproject.net.DeviceId; import org.onosproject.net.behaviour.BasicSystemOperations; import org.onosproject.net.driver.DriverHandler; import org.onosproject.protocol.rest.RestSBDevice; import org.slf4j.Logger; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.InputStream; import java.io.IOException; import java.util.Map; import java.util.concurrent.CompletableFuture; import javax.ws.rs.ProcessingException; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.concurrent.CompletableFuture.completedFuture; import static org.onosproject.drivers.server.Constants.JSON; import static org.onosproject.drivers.server.Constants.MSG_HANDLER_NULL; import static org.onosproject.drivers.server.Constants.MSG_DEVICE_ID_NULL; import static org.onosproject.drivers.server.Constants.MSG_DEVICE_NULL; import static org.onosproject.drivers.server.Constants.PARAM_TIME; import static org.onosproject.drivers.server.Constants.URL_SRV_TIME_DISCOVERY; import static org.slf4j.LoggerFactory.getLogger; /** * Implementation of basic system operations' behaviour for server devices. */ public class ServerBasicSystemOperations extends BasicServerDriver implements BasicSystemOperations { private final Logger log = getLogger(getClass()); public ServerBasicSystemOperations() { super(); log.debug("Started"); } @Override public DriverHandler handler() { return super.getHandler(); } @Override public void setHandler(DriverHandler handler) { checkNotNull(handler, MSG_HANDLER_NULL); this.handler = handler; } @Override public CompletableFuture<Boolean> reboot() { throw new UnsupportedOperationException("Reboot operation not supported"); } @Override public CompletableFuture<Long> time() { // Retrieve the device ID from the handler DeviceId deviceId = super.getDeviceId(); checkNotNull(deviceId, MSG_DEVICE_ID_NULL); // Get the device RestSBDevice device = super.getDevice(deviceId); checkNotNull(device, MSG_DEVICE_NULL); // Hit the path that provides the server's time InputStream response = null; try { response = getController().get(deviceId, URL_SRV_TIME_DISCOVERY, JSON); } catch (ProcessingException pEx) { log.error("Failed to get the time of device: {}", deviceId); return null; } // Load the JSON into object ObjectMapper mapper = new ObjectMapper(); Map<String, Object> jsonMap = null; JsonNode jsonNode = null; try { jsonMap = mapper.readValue(response, Map.class); jsonNode = mapper.convertValue(jsonMap, JsonNode.class); } catch (IOException ioEx) { log.error("Failed to discover the device details of: {}", deviceId); return null; } if (jsonNode == null) { log.error("Failed to discover the device details of: {}", deviceId); return null; } long time = jsonNode.path(PARAM_TIME).asLong(); checkArgument(time > 0, "Invalid time format: {}", time); return completedFuture(new Long(time)); } }
apache-2.0
weiwenqiang/GitHub
expert/realm-java/realm/tools/backlink-ut-source/missingField/target/io/realm/entities/BacklinksMissingFieldSource.java
947
/* * Copyright 2017 Realm Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.realm.entities; import io.realm.RealmObject; public class BacklinksMissingFieldSource extends RealmObject { private BacklinksMissingFieldTarget xxxchild; public BacklinksMissingFieldTarget getChild() { return xxxchild; } public void setChild(BacklinksMissingFieldTarget child) { this.xxxchild = child; } }
apache-2.0
apache/sis
core/sis-referencing/src/main/java/org/apache/sis/geometry/Shapes2D.java
32922
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.geometry; import java.util.Set; import java.awt.geom.Point2D; import java.awt.geom.Line2D; import java.awt.geom.Ellipse2D; import java.awt.geom.Rectangle2D; import java.awt.geom.AffineTransform; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.CoordinateOperation; import org.opengis.referencing.operation.Matrix; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.MathTransform2D; import org.opengis.referencing.operation.NoninvertibleTransformException; import org.opengis.referencing.operation.TransformException; import org.apache.sis.internal.referencing.j2d.ShapeUtilities; import org.apache.sis.internal.referencing.j2d.IntervalRectangle; import org.apache.sis.internal.referencing.CoordinateOperations; import org.apache.sis.referencing.operation.AbstractCoordinateOperation; import org.apache.sis.referencing.operation.matrix.AffineTransforms2D; import org.apache.sis.referencing.operation.transform.MathTransforms; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.Static; /** * Utility methods working on Java2D shapes. * The {@code transform(…)} methods in this class work in the same way than the methods of the same signature * in {@link Envelopes}, except that they work on {@link Rectangle2D} objects instead of {@code Envelope}. * In particular, the same treatment for curvatures and poles is applied. * * @author Martin Desruisseaux (IRD, Geomatys) * @author Johann Sorel (Geomatys) * @version 1.1 * @since 0.8 * @module */ public final class Shapes2D extends Static { /** * Do not allow instantiation of this class. */ private Shapes2D() { } /** * Returns the intersection point between two line segments. The lines do not continue to infinity; * if the intersection does not occur between the ending points {@linkplain Line2D#getP1() P1} and * {@linkplain Line2D#getP2() P2} of the two line segments, then this method returns {@code null}. * * @param a the first line segment. * @param b the second line segment. * @return the intersection point, or {@code null} if none. */ public static Point2D intersectionPoint(final Line2D a, final Line2D b) { return ShapeUtilities.intersectionPoint(a.getX1(), a.getY1(), a.getX2(), a.getY2(), b.getX1(), b.getY1(), b.getX2(), b.getY2()); } /** * Returns the point on the given {@code line} segment which is closest to the given {@code point}. * Let {@code result} be the returned point. This method guarantees (except for rounding errors) that: * * <ul> * <li>{@code result} is a point on the {@code line} segment. It is located between * the {@linkplain Line2D#getP1() P1} and {@linkplain Line2D#getP2() P2} ending * points of that line segment.</li> * <li>The distance between the {@code result} point and the given {@code point} is * the shortest distance among the set of points meeting the previous condition. * This distance can be obtained with {@code point.distance(result)}.</li> * </ul> * * @param segment the line on which to search for a point. * @param point a point close to the given line. * @return the nearest point on the given line. * * @see #colinearPoint(Line2D, Point2D, double) */ public static Point2D nearestColinearPoint(final Line2D segment, final Point2D point) { return ShapeUtilities.nearestColinearPoint(segment.getX1(), segment.getY1(), segment.getX2(), segment.getY2(), point.getX(), point.getY()); } /** * Returns a point on the given {@code line} segment located at the given {@code distance} from that line. * Let {@code result} be the returned point. If {@code result} is not null, then this method guarantees * (except for rounding error) that: * * <ul> * <li>{@code result} is a point on the {@code line} segment. It is located between * the {@linkplain Line2D#getP1() P1} and {@linkplain Line2D#getP2() P2} ending * points of that line segment.</li> * <li>The distance between the {@code result} and the given {@code point} is exactly * equal to {@code distance}.</li> * </ul> * * If no result point meets those conditions, then this method returns {@code null}. * If two result points met those conditions, then this method returns the point * which is the closest to {@code line.getP1()}. * * @param line the line on which to search for a point. * @param point a point close to the given line. * @param distance the distance between the given point and the point to be returned. * @return a point on the given line located at the given distance from the given point. * * @see #nearestColinearPoint(Line2D, Point2D) */ public static Point2D colinearPoint(Line2D line, Point2D point, double distance) { return ShapeUtilities.colinearPoint(line.getX1(), line.getY1(), line.getX2(), line.getY2(), point.getX(), point.getY(), distance); } /** * Returns a circle passing by the 3 given points. * * @param P1 the first point. * @param P2 the second point. * @param P3 the third point. * @return a circle passing by the given points. */ public static Ellipse2D circle(final Point2D P1, final Point2D P2, final Point2D P3) { final Point2D.Double center = ShapeUtilities.circleCentre(P1.getX(), P1.getY(), P2.getX(), P2.getY(), P3.getX(), P3.getY()); final double radius = center.distance(P2); return new Ellipse2D.Double(center.x - radius, center.y - radius, 2*radius, 2*radius); } /** * Transforms a rectangular envelope using the given math transform. * The transformation is only approximated: the returned envelope may be bigger than * necessary, or smaller than required if the bounding box contains a pole. * * <p>Note that this method can not handle the case where the rectangle contains the North or South pole, * or when it cross the ±180° longitude, because {@code MathTransform} does not carry sufficient information. * For a more robust rectangle transformation, use {@link #transform(CoordinateOperation, Rectangle2D, Rectangle2D)} * instead.</p> * * @param transform the transform to use. Source and target dimension must be 2. * @param envelope the rectangle to transform (may be {@code null}). * @param destination the destination rectangle (may be {@code envelope}). * If {@code null}, a new rectangle will be created and returned. * @return {@code destination}, or a new rectangle if {@code destination} was null and {@code envelope} was non-null. * @throws TransformException if a transform failed. * * @see #transform(CoordinateOperation, Rectangle2D, Rectangle2D) * @see Envelopes#transform(MathTransform, Envelope) */ public static Rectangle2D transform(final MathTransform2D transform, final Rectangle2D envelope, Rectangle2D destination) throws TransformException { ArgumentChecks.ensureNonNull("transform", transform); if (transform instanceof AffineTransform) { // Common case implemented in a more efficient way (less points to transform). return AffineTransforms2D.transform((AffineTransform) transform, envelope, destination); } return transform(transform, envelope, destination, new double[2]); } /** * Implementation of {@link #transform(MathTransform2D, Rectangle2D, Rectangle2D)} with the * opportunity to save the projected center coordinate. This method sets {@code point} to * the center of the source envelope projected to the target CRS. */ @SuppressWarnings("fallthrough") private static Rectangle2D transform(final MathTransform2D transform, final Rectangle2D envelope, Rectangle2D destination, final double[] point) throws TransformException { if (envelope == null) { return null; } double xmin = Double.POSITIVE_INFINITY; double ymin = Double.POSITIVE_INFINITY; double xmax = Double.NEGATIVE_INFINITY; double ymax = Double.NEGATIVE_INFINITY; final WraparoundInEnvelope.Controller wc = new WraparoundInEnvelope.Controller(transform); do { /* * Notation (as if we were applying a map projection, but this is not necessarily the case): * - (λ,φ) are coordinate values before projection. * - (x,y) are coordinate values after projection. * - D[00|01|10|11] are the ∂x/∂λ, ∂x/∂φ, ∂y/∂λ and ∂y/∂φ derivatives respectively. * - Variables with indice 0 are for the very first point in iteration order. * - Variables with indice 1 are for the values of the previous iteration. * - Variables with indice 2 are for the current values in the iteration. * - P1-P2 form a line segment to be checked for curvature. */ double x0=0, y0=0, λ0=0, φ0=0; double x1=0, y1=0, λ1=0, φ1=0; Matrix D0=null, D1=null, D2=null; // x2 and y2 defined inside the loop. boolean isDerivativeSupported = true; final CurveExtremum extremum = new CurveExtremum(); for (int i=0; i<=8; i++) { /* * Iteration order (center must be last): * * (6)────(5)────(4) * | | * (7) (8) (3) * | | * (0)────(1)────(2) */ double λ2, φ2; switch (i) { case 0: case 6: case 7: λ2 = envelope.getMinX(); break; case 1: case 5: case 8: λ2 = envelope.getCenterX(); break; case 2: case 3: case 4: λ2 = envelope.getMaxX(); break; default: throw new AssertionError(i); } switch (i) { case 0: case 1: case 2: φ2 = envelope.getMinY(); break; case 3: case 7: case 8: φ2 = envelope.getCenterY(); break; case 4: case 5: case 6: φ2 = envelope.getMaxY(); break; default: throw new AssertionError(i); } point[0] = λ2; point[1] = φ2; try { D1 = D2; D2 = Envelopes.derivativeAndTransform(wc.transform, point, point, 0, isDerivativeSupported && i != 8); } catch (TransformException e) { if (!isDerivativeSupported) { throw e; // Derivative were already disabled, so something went wrong. } isDerivativeSupported = false; D2 = null; point[0] = λ2; point[1] = φ2; wc.transform.transform(point, 0, point, 0, 1); Envelopes.recoverableException(Shapes2D.class, e); // Log only if the above call was successful. } double x2 = point[0]; double y2 = point[1]; if (x2 < xmin) xmin = x2; if (x2 > xmax) xmax = x2; if (y2 < ymin) ymin = y2; if (y2 > ymax) ymax = y2; switch (i) { case 0: { // Remember the first point. λ0=λ2; x0=x2; φ0=φ2; y0=y2; D0=D2; break; } case 8: { // Close the iteration with the first point. λ2=λ0; x2=x0; // Discard P2 because it is the rectangle center. φ2=φ0; y2=y0; D2=D0; break; } } /* * At this point, we expanded the rectangle using the projected points. Now try * to use the information provided by derivatives at those points, if available. * For the following block, notation is: * * - s are coordinate values in the source space (λ or φ) * - t are coordinate values in the target space (x or y) * * They are not necessarily in the same dimension. For example would could have * s=λ while t=y. This is typically the case when inspecting the top or bottom * line segment of the rectangle. * * The same technic is also applied in the transform(MathTransform, Envelope) method. * The general method is more "elegant", at the cost of more storage requirement. */ if (D1 != null && D2 != null) { final int srcDim; final double s1, s2; // Coordinate values in source space (before projection) switch (i) { case 1: case 2: case 5: case 6: {assert φ2==φ1; srcDim=0; s1=λ1; s2=λ2; break;} // Horizontal segment case 3: case 4: case 7: case 8: {assert λ2==λ1; srcDim=1; s1=φ1; s2=φ2; break;} // Vertical segment default: throw new AssertionError(i); } final double min, max; if (s1 < s2) {min=s1; max=s2;} else {min=s2; max=s1;} int tgtDim = 0; do { // Executed exactly twice, for dimensions 0 and 1 in the projected space. extremum.resolve(s1, (tgtDim == 0) ? x1 : y1, D1.getElement(tgtDim, srcDim), s2, (tgtDim == 0) ? x2 : y2, D2.getElement(tgtDim, srcDim)); /* * At this point we found the extremum of the projected line segment * using a cubic curve t = A + Bs + Cs² + Ds³ approximation. Before * to add those extremum into the projected bounding box, we need to * ensure that the source coordinate is inside the the original * (unprojected) bounding box. */ boolean isP2 = false; do { // Executed exactly twice, one for each point. final double se = isP2 ? extremum.ex2 : extremum.ex1; if (se > min && se < max) { final double te = isP2 ? extremum.ey2 : extremum.ey1; if ((tgtDim == 0) ? (te < xmin || te > xmax) : (te < ymin || te > ymax)) { /* * At this point, we have determined that adding the extremum point * to the rectangle would have expanded it. However we will not add * that point directly, because maybe its position is not quite right * (since we used a cubic curve approximation). Instead, we project * the point on the rectangle border which is located vis-à-vis the * extremum. Our tests show that the correction can be as much as 50 * metres. */ final double oldX = point[0]; final double oldY = point[1]; if (srcDim == 0) { point[0] = se; point[1] = φ1; // == φ2 since we have an horizontal segment. } else { point[0] = λ1; // == λ2 since we have a vertical segment. point[1] = se; } wc.transform.transform(point, 0, point, 0, 1); final double x = point[0]; final double y = point[1]; if (x < xmin) xmin = x; if (x > xmax) xmax = x; if (y < ymin) ymin = y; if (y > ymax) ymax = y; point[0] = oldX; point[1] = oldY; } } } while ((isP2 = !isP2) == true); } while (++tgtDim == 1); } λ1=λ2; x1=x2; φ1=φ2; y1=y2; D1=D2; } } while (wc.translate()); if (destination != null) { destination.setRect(xmin, ymin, xmax - xmin, ymax - ymin); } else { destination = new IntervalRectangle(xmin, ymin, xmax, ymax); } /* * Note: a previous version had an "assert" statement here comparing our calculation * with the calculation performed by the more general method working on Envelope. We * verified that the same values (coordinate points and derivatives) were ultimately * passed to the CurveExtremum.resolve(…) method, so we would expect the same result. * However the iteration order is different. The result seems insensitive to iteration * order most of the time, but not always. However, it seems that the cases were the * results are different are the cases where the methods working with CoordinateOperation * object wipe out that difference anyway. */ return destination; } /** * Transforms a rectangular envelope using the given coordinate operation. * The transformation is only approximated: the returned envelope may be bigger * than the smallest possible bounding box, but should not be smaller in most cases. * * <p>This method can handle the case where the rectangle contains the North or South pole, * or when it cross the ±180° longitude.</p> * * @param operation the operation to use. Source and target dimension must be 2. * @param envelope the rectangle to transform (may be {@code null}). * @param destination the destination rectangle (may be {@code envelope}). * If {@code null}, a new rectangle will be created and returned. * @return {@code destination}, or a new rectangle if {@code destination} was null and {@code envelope} was non-null. * @throws TransformException if a transform failed. * * @see #transform(MathTransform2D, Rectangle2D, Rectangle2D) * @see Envelopes#transform(CoordinateOperation, Envelope) */ @SuppressWarnings("null") public static Rectangle2D transform(final CoordinateOperation operation, final Rectangle2D envelope, Rectangle2D destination) throws TransformException { ArgumentChecks.ensureNonNull("operation", operation); if (envelope == null) { return null; } final MathTransform2D mt = MathTransforms.bidimensional(operation.getMathTransform()); final double[] center = new double[2]; destination = transform(mt, envelope, destination, center); /* * If the source envelope crosses the expected range of valid coordinates, also projects * the range bounds as a safety. See the comments in transform(Envelope, ...). */ final CoordinateReferenceSystem sourceCRS = operation.getSourceCRS(); if (sourceCRS != null) { final CoordinateSystem cs = sourceCRS.getCoordinateSystem(); if (cs != null && cs.getDimension() == 2) { // Paranoiac check. CoordinateSystemAxis axis = cs.getAxis(0); double min = envelope.getMinX(); double max = envelope.getMaxX(); Point2D.Double pt = null; for (int i=0; i<4; i++) { if (i == 2) { axis = cs.getAxis(1); min = envelope.getMinY(); max = envelope.getMaxY(); } final double v = (i & 1) == 0 ? axis.getMinimumValue() : axis.getMaximumValue(); if (!(v > min && v < max)) { continue; } if (pt == null) { pt = new Point2D.Double(); } if ((i & 2) == 0) { pt.x = v; pt.y = envelope.getCenterY(); } else { pt.x = envelope.getCenterX(); pt.y = v; } destination.add(mt.transform(pt, pt)); } } } /* * Now take the target CRS in account. */ final CoordinateReferenceSystem targetCRS = operation.getTargetCRS(); if (targetCRS == null) { return destination; } final CoordinateSystem targetCS = targetCRS.getCoordinateSystem(); if (targetCS == null || targetCS.getDimension() != 2) { // It should be an error, but we keep this method tolerant. return destination; } /* * Checks for singularity points. See the Envelopes.transform(CoordinateOperation, Envelope) * method for comments about the algorithm. The code below is the same algorithm adapted for * the 2D case and the related objects (Point2D, Rectangle2D, etc.). * * The `border` variable in the loop below is used in order to compress 2 dimensions * and 2 extremums in a single loop, in this order: (xmin, xmax, ymin, ymax). */ MathTransform2D inverse = null; TransformException warning = null; Point2D sourcePt = null; Point2D targetPt = null; Point2D revertPt = null; int includedBoundsValue = 0; // A bitmask for each (dimension, extremum) pairs. for (int border=0; border<4; border++) { // 2 dimensions and 2 extremums compacted in a flag. final int dimension = border >>> 1; // The dimension index being examined. final CoordinateSystemAxis axis = targetCS.getAxis(dimension); if (axis == null) { // Should never be null, but check as a paranoiac safety. continue; } final double extremum = (border & 1) == 0 ? axis.getMinimumValue() : axis.getMaximumValue(); if (!Double.isFinite(extremum)) { continue; } if (inverse == null) { try { inverse = mt.inverse(); } catch (NoninvertibleTransformException exception) { Envelopes.recoverableException(Shapes2D.class, exception); return destination; } targetPt = new Point2D.Double(); } switch (dimension) { case 0: targetPt.setLocation(extremum, center[1]); break; case 1: targetPt.setLocation(center[0], extremum ); break; default: throw new AssertionError(border); } try { sourcePt = inverse.transform(targetPt, sourcePt); if (CoordinateOperations.isWrapAround(axis)) { revertPt = mt.transform(sourcePt, revertPt); final double delta = Math.abs((dimension == 0 ? revertPt.getX() : revertPt.getY()) - extremum); if (!(delta < Envelopes.SPAN_FRACTION_AS_BOUND * (axis.getMaximumValue() - axis.getMinimumValue()))) { continue; } } if (envelope.contains(sourcePt)) { destination.add(targetPt); includedBoundsValue |= (1 << border); } } catch (TransformException exception) { if (warning == null) { warning = exception; } else { warning.addSuppressed(exception); } } } /* * Iterate over all dimensions of type "WRAPAROUND" for which minimal or maximal axis * values have not yet been included in the envelope. We could inline this check inside * the above loop, but we don't in order to have a chance to exclude the dimensions for * which the point have already been added. * * See transform(CoordinateOperation, Envelope) for more comments about the algorithm. */ if (includedBoundsValue != 0) { /* * Bits mask transformation: * 1) Swaps the two dimensions (YyXx → XxYy) * 2) Insert a space between each bits (XxYy → X.x.Y.y.) * 3) Fill the space with duplicated values (X.x.Y.y. → XXxxYYyy) * * In terms of bit positions 1,2,4,8 (not bit values), we have: * * 8421 → 22881144 * i.e. (ymax, ymin, xmax, xmin) → (xmax², ymax², xmin², ymin²) * * Now look at the last part: (xmin², ymin²). The next step is to perform a bitwise * AND operation in order to have only both of the following conditions: * * Borders not yet added to the envelope: ~(ymax, ymin, xmax, xmin) * Borders in which a singularity exists: (xmin, xmin, ymin, ymin) * * The same operation is repeated on the next 4 bits for (xmax, xmax, ymax, ymax). */ int toTest = ((includedBoundsValue & 1) << 3) | ((includedBoundsValue & 4) >>> 1) | ((includedBoundsValue & 2) << 6) | ((includedBoundsValue & 8) << 2); toTest |= (toTest >>> 1); // Duplicate the bit values. toTest &= ~(includedBoundsValue | (includedBoundsValue << 4)); /* * Forget any axes that are not of kind "WRAPAROUND". Then get the final * bit pattern indicating which points to test. Iterate over that bits. */ if ((toTest & 0x33333333) != 0 && !CoordinateOperations.isWrapAround(targetCS.getAxis(0))) toTest &= 0xCCCCCCCC; if ((toTest & 0xCCCCCCCC) != 0 && !CoordinateOperations.isWrapAround(targetCS.getAxis(1))) toTest &= 0x33333333; while (toTest != 0) { final int border = Integer.numberOfTrailingZeros(toTest); final int bitMask = 1 << border; toTest &= ~bitMask; // Clear now the bit, for the next iteration. final int dimensionToAdd = (border >>> 1) & 1; final CoordinateSystemAxis toAdd = targetCS.getAxis(dimensionToAdd); final CoordinateSystemAxis added = targetCS.getAxis(dimensionToAdd ^ 1); final double x = (border & 1) == 0 ? toAdd.getMinimumValue() : toAdd.getMaximumValue(); final double y = (border & 4) == 0 ? added.getMinimumValue() : added.getMaximumValue(); if (dimensionToAdd == 0) { targetPt.setLocation(x, y); } else { targetPt.setLocation(y, x); } try { sourcePt = inverse.transform(targetPt, sourcePt); if (envelope.contains(sourcePt)) { destination.add(targetPt); } } catch (TransformException exception) { if (warning == null) { warning = exception; } else { warning.addSuppressed(exception); } } } } /* * At this point we finished envelope transformation. Verify if some coordinates need to be "wrapped around" * as a result of the coordinate operation. This is usually the longitude axis where the source CRS uses * the [-180 … +180]° range and the target CRS uses the [0 … 360]° range, or the converse. In such case we * set the rectangle to the full range (we do not use the mechanism documented in Envelope2D) because most * Rectangle2D implementations do not support crossing the anti-meridian. This results in larger rectangle * than what would be possible with GeneralEnvelope or Envelope2D, but we try to limit the situation where * this expansion is applied. */ final Set<Integer> wrapAroundChanges; if (operation instanceof AbstractCoordinateOperation) { wrapAroundChanges = ((AbstractCoordinateOperation) operation).getWrapAroundChanges(); } else { wrapAroundChanges = CoordinateOperations.wrapAroundChanges(sourceCRS, targetCS); } for (int dim : wrapAroundChanges) { // Empty in the vast majority of cases. final CoordinateSystemAxis axis = targetCS.getAxis(dim); final double minimum = axis.getMinimumValue(); final double maximum = axis.getMaximumValue(); final double o1, o2; if (dim == 0) { o1 = destination.getMinX(); o2 = destination.getMaxX(); } else { o1 = destination.getMinY(); o2 = destination.getMaxY(); } if (o1 < minimum || o2 > maximum) { final double span = maximum - minimum; if (dim == 0) { destination.setRect(minimum, destination.getY(), span, destination.getHeight()); } else { destination.setRect(destination.getX(), minimum, destination.getWidth(), span); } } } if (warning != null) { Envelopes.recoverableException(Shapes2D.class, warning); } return destination; } }
apache-2.0
jibaro/sql-for-lucene
src/main/java/bbejeck/sql/antlr/generated/LuceneSqlBaseVisitor.java
12340
// Generated from /Users/bbejeck/dev/github_clones/sql-for-lucene/src/main/java/LuceneSql.g4 by ANTLR 4.5.1 package bbejeck.sql.antlr.generated; import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; /** * This class provides an empty implementation of {@link LuceneSqlVisitor}, * which can be extended to create a visitor which only needs to handle a subset * of the available methods. * * @param <T> The return type of the visit operation. Use {@link Void} for * operations with no return type. */ public class LuceneSqlBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements LuceneSqlVisitor<T> { /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitQuery(LuceneSqlParser.QueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSelect_stmt(LuceneSqlParser.Select_stmtContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitFrom_stmt(LuceneSqlParser.From_stmtContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitWhere_stmt(LuceneSqlParser.Where_stmtContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLimit_stmt(LuceneSqlParser.Limit_stmtContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSearch_condition(LuceneSqlParser.Search_conditionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitPredicate(LuceneSqlParser.PredicateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitComparison_predicate(LuceneSqlParser.Comparison_predicateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitFunction_predicate(LuceneSqlParser.Function_predicateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitField(LuceneSqlParser.FieldContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitEquals(LuceneSqlParser.EqualsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNotEqual(LuceneSqlParser.NotEqualContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitRange_op(LuceneSqlParser.Range_opContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitGreaterThanNumber(LuceneSqlParser.GreaterThanNumberContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitGreaterThanTerm(LuceneSqlParser.GreaterThanTermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitGreaterThanDate(LuceneSqlParser.GreaterThanDateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitGreaterThanEqNumber(LuceneSqlParser.GreaterThanEqNumberContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitGreaterThanEqTerm(LuceneSqlParser.GreaterThanEqTermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitGreaterThanEqDate(LuceneSqlParser.GreaterThanEqDateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLessThanNumber(LuceneSqlParser.LessThanNumberContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLessThanTerm(LuceneSqlParser.LessThanTermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLessThanDate(LuceneSqlParser.LessThanDateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLessThanEqNumber(LuceneSqlParser.LessThanEqNumberContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLessThanEqTerm(LuceneSqlParser.LessThanEqTermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLessThanEqDate(LuceneSqlParser.LessThanEqDateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitAnd(LuceneSqlParser.AndContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitOr(LuceneSqlParser.OrContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNot(LuceneSqlParser.NotContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitAndNot(LuceneSqlParser.AndNotContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitOrNot(LuceneSqlParser.OrNotContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNested_predicate(LuceneSqlParser.Nested_predicateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNumber(LuceneSqlParser.NumberContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitTerm(LuceneSqlParser.TermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitPhrase(LuceneSqlParser.PhraseContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitDate(LuceneSqlParser.DateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitMULTI_PHRASE(LuceneSqlParser.MULTI_PHRASEContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitRegexp(LuceneSqlParser.RegexpContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitBetween(LuceneSqlParser.BetweenContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitBetween_term(LuceneSqlParser.Between_termContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitBetween_number(LuceneSqlParser.Between_numberContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLike(LuceneSqlParser.LikeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitIn(LuceneSqlParser.InContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitValue_list(LuceneSqlParser.Value_listContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNumber_list(LuceneSqlParser.Number_listContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitDate_list(LuceneSqlParser.Date_listContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitTerm_list(LuceneSqlParser.Term_listContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitPhrase_list(LuceneSqlParser.Phrase_listContext ctx) { return visitChildren(ctx); } }
apache-2.0
sdgdsffdsfff/zeus
slb/src/main/java/com/ctrip/zeus/dao/entity/RuleRule.java
4663
package com.ctrip.zeus.dao.entity; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; public class RuleRule { private Long id; private String name; private Integer ruleType; private Date datachangeLasttime; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name == null ? null : name.trim(); } public Integer getRuleType() { return ruleType; } public void setRuleType(Integer ruleType) { this.ruleType = ruleType; } public Date getDatachangeLasttime() { return datachangeLasttime; } public void setDatachangeLasttime(Date datachangeLasttime) { this.datachangeLasttime = datachangeLasttime; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(getClass().getSimpleName()); sb.append(" ["); sb.append("Hash = ").append(hashCode()); sb.append(", id=").append(id); sb.append(", name=").append(name); sb.append(", ruleType=").append(ruleType); sb.append(", datachangeLasttime=").append(datachangeLasttime); sb.append("]"); return sb.toString(); } public static RuleRule.Builder builder() { return new RuleRule.Builder(); } public static class Builder { private RuleRule obj; public Builder() { this.obj = new RuleRule(); } public Builder id(Long id) { obj.setId(id); return this; } public Builder name(String name) { obj.setName(name); return this; } public Builder ruleType(Integer ruleType) { obj.setRuleType(ruleType); return this; } public Builder datachangeLasttime(Date datachangeLasttime) { obj.setDatachangeLasttime(datachangeLasttime); return this; } public RuleRule build() { return this.obj; } } public enum Column { id("id", "id", "BIGINT", false), name("name", "name", "VARCHAR", false), ruleType("rule_type", "ruleType", "INTEGER", false), datachangeLasttime("DataChange_LastTime", "datachangeLasttime", "TIMESTAMP", false), attributes("attributes", "attributes", "LONGVARCHAR", false), content("content", "content", "LONGVARCHAR", false); private static final String BEGINNING_DELIMITER = "`"; private static final String ENDING_DELIMITER = "`"; private final String column; private final boolean isColumnNameDelimited; private final String javaProperty; private final String jdbcType; public String value() { return this.column; } public String getValue() { return this.column; } public String getJavaProperty() { return this.javaProperty; } public String getJdbcType() { return this.jdbcType; } Column(String column, String javaProperty, String jdbcType, boolean isColumnNameDelimited) { this.column = column; this.javaProperty = javaProperty; this.jdbcType = jdbcType; this.isColumnNameDelimited = isColumnNameDelimited; } public String desc() { return this.getEscapedColumnName() + " DESC"; } public String asc() { return this.getEscapedColumnName() + " ASC"; } public static Column[] excludes(Column ... excludes) { ArrayList<Column> columns = new ArrayList<>(Arrays.asList(Column.values())); if (excludes != null && excludes.length > 0) { columns.removeAll(new ArrayList<>(Arrays.asList(excludes))); } return columns.toArray(new Column[]{}); } public String getEscapedColumnName() { if (this.isColumnNameDelimited) { return new StringBuilder().append(BEGINNING_DELIMITER).append(this.column).append(ENDING_DELIMITER).toString(); } else { return this.column; } } public String getAliasedEscapedColumnName() { return this.getEscapedColumnName(); } } }
apache-2.0
wattale/carbon-identity
components/identity/org.wso2.carbon.identity.sso.saml/src/main/java/org/wso2/carbon/identity/sso/saml/builders/SignKeyDataHolder.java
8431
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.sso.saml.builders; import org.apache.xml.security.signature.XMLSignature; import org.opensaml.xml.security.credential.Credential; import org.opensaml.xml.security.credential.CredentialContextSet; import org.opensaml.xml.security.credential.UsageType; import org.opensaml.xml.security.x509.X509Credential; import org.osgi.framework.BundleContext; import org.osgi.util.tracker.ServiceTracker; import org.wso2.carbon.base.ServerConfiguration; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.core.util.KeyStoreManager; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.sso.saml.SAMLSSOConstants; import org.wso2.carbon.identity.sso.saml.util.SAMLSSOUtil; import org.wso2.carbon.security.keystore.KeyStoreAdmin; import org.wso2.carbon.utils.AuthenticationObserver; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import javax.crypto.SecretKey; import java.security.KeyStore; import java.security.PrivateKey; import java.security.PublicKey; import java.security.cert.Certificate; import java.security.cert.X509CRL; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.Collection; public class SignKeyDataHolder implements X509Credential { private static SignKeyDataHolder instance = null; private String signatureAlgorithm = null; private X509Certificate[] issuerCerts = null; private PrivateKey issuerPK = null; public SignKeyDataHolder(String username) throws IdentityException { String keyAlias = null; KeyStoreAdmin keyAdmin; KeyStoreManager keyMan; Certificate[] certificates; int tenantID; String tenantDomain; String userTenantDomain; String spTenantDomain; try { userTenantDomain = SAMLSSOUtil.getUserTenantDomain(); spTenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(); if (userTenantDomain == null) { // all local authenticator must set the value of userTenantDomain. // if userTenantDomain is null that means, there is no local authenticator or // the assert with local ID is set. In that case, this should be coming from // federated authentication. In that case, we treat SP domain is equal to user domain. userTenantDomain = spTenantDomain; } if (!SAMLSSOUtil.isSaaSApplication() && !spTenantDomain.equalsIgnoreCase(userTenantDomain)) { throw new IdentityException("Service Provider tenant domian must be equal to user tenant domain" + " for non-SaaS applications"); } String signWithValue = IdentityUtil.getProperty( SAMLSSOConstants.FileBasedSPConfig.USE_AUTHENTICATED_USER_DOMAIN_CRYPTO); if (signWithValue != null && "true".equalsIgnoreCase(signWithValue.trim())) { tenantDomain = userTenantDomain; tenantID = SAMLSSOUtil.getRealmService().getTenantManager(). getTenantId(tenantDomain); } else { tenantDomain = spTenantDomain; tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); } initializeRegistry(tenantID); if (tenantID != MultitenantConstants.SUPER_TENANT_ID) { String keyStoreName = SAMLSSOUtil.generateKSNameFromDomainName(tenantDomain); keyAlias = tenantDomain; keyMan = KeyStoreManager.getInstance(tenantID); KeyStore keyStore = keyMan.getKeyStore(keyStoreName); issuerPK = (PrivateKey) keyMan.getPrivateKey(keyStoreName, tenantDomain); certificates = keyStore.getCertificateChain(keyAlias); issuerCerts = new X509Certificate[certificates.length]; int i = 0; for (Certificate certificate : certificates) { issuerCerts[i++] = (X509Certificate) certificate; } signatureAlgorithm = XMLSignature.ALGO_ID_SIGNATURE_RSA; String pubKeyAlgo = issuerCerts[0].getPublicKey().getAlgorithm(); if (pubKeyAlgo.equalsIgnoreCase("DSA")) { signatureAlgorithm = XMLSignature.ALGO_ID_SIGNATURE_DSA; } } else { keyAlias = ServerConfiguration.getInstance().getFirstProperty( "Security.KeyStore.KeyAlias"); keyAdmin = new KeyStoreAdmin(tenantID, SAMLSSOUtil.getRegistryService().getGovernanceSystemRegistry()); keyMan = KeyStoreManager.getInstance(tenantID); issuerPK = (PrivateKey) keyAdmin.getPrivateKey(keyAlias, true); certificates = keyMan.getPrimaryKeyStore().getCertificateChain(keyAlias); issuerCerts = new X509Certificate[certificates.length]; int i = 0; for (Certificate certificate : certificates) { issuerCerts[i++] = (X509Certificate) certificate; } signatureAlgorithm = XMLSignature.ALGO_ID_SIGNATURE_RSA; String pubKeyAlgo = issuerCerts[0].getPublicKey().getAlgorithm(); if (pubKeyAlgo.equalsIgnoreCase("DSA")) { signatureAlgorithm = XMLSignature.ALGO_ID_SIGNATURE_DSA; } } } catch (Exception e) { throw new IdentityException(e.getMessage(), e); } } private void initializeRegistry(int tenantId) { BundleContext bundleContext = SAMLSSOUtil.getBundleContext(); if (bundleContext != null) { ServiceTracker tracker = new ServiceTracker(bundleContext, AuthenticationObserver.class.getName(), null); tracker.open(); Object[] services = tracker.getServices(); if (services != null) { for (Object service : services) { ((AuthenticationObserver) service).startedAuthentication(tenantId); } } tracker.close(); } } public String getSignatureAlgorithm() { return signatureAlgorithm; } public void setSignatureAlgorithm(String signatureAlgorithm) { this.signatureAlgorithm = signatureAlgorithm; } public Collection<X509CRL> getCRLs() { return null; } public X509Certificate getEntityCertificate() { return issuerCerts[0]; } public Collection<X509Certificate> getEntityCertificateChain() { return Arrays.asList(issuerCerts); } public CredentialContextSet getCredentalContextSet() { // TODO Auto-generated method stub return null; } public Class<? extends Credential> getCredentialType() { // TODO Auto-generated method stub return null; } public String getEntityId() { // TODO Auto-generated method stub return null; } public Collection<String> getKeyNames() { // TODO Auto-generated method stub return null; } public PrivateKey getPrivateKey() { return issuerPK; } public PublicKey getPublicKey() { return issuerCerts[0].getPublicKey(); } public SecretKey getSecretKey() { // TODO Auto-generated method stub return null; } public UsageType getUsageType() { // TODO Auto-generated method stub return null; } }
apache-2.0
cy19890513/Leetcode-1
src/test/java/com/fishercoder/_690Test.java
1310
package com.fishercoder; import com.fishercoder.common.classes.Employee; import com.fishercoder.solutions._690; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static junit.framework.TestCase.assertEquals; /** * Created by fishercoder on 5/18/17. */ public class _690Test { private static _690.Solution1 solution1; private static List<Employee> employees; private static int id; @Before public void setupForEachTest() { solution1 = new _690.Solution1(); } @Test public void test1() { employees = new ArrayList(Arrays.asList( new Employee(1, 5, Arrays.asList(2,3)), new Employee(2, 3, Arrays.asList()), new Employee(3, 3, Arrays.asList()))); id = 1; assertEquals(11, solution1.getImportance(employees, id)); } @Test public void test2() { employees = new ArrayList(Arrays.asList( new Employee(1, 5, Arrays.asList(2,3)), new Employee(2, 3, Arrays.asList(4)), new Employee(3, 4, Arrays.asList()), new Employee(4, 1, Arrays.asList()))); id = 1; assertEquals(13, solution1.getImportance(employees, id)); } }
apache-2.0
skolome/son-sp-infrabstract
wim-adaptor/adaptor/src/main/java/sonata/kernel/WimAdaptor/messaging/RabbitMqHelperSingleton.java
5279
/* * Copyright (c) 2015 SONATA-NFV, UCL, NOKIA, THALES, NCSR Demokritos ALL RIGHTS RESERVED. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. * * Neither the name of the SONATA-NFV, UCL, NOKIA, NCSR Demokritos nor the names of its contributors * may be used to endorse or promote products derived from this software without specific prior * written permission. * * This work has been performed in the framework of the SONATA project, funded by the European * Commission under Grant number 671517 through the Horizon 2020 and 5G-PPP programmes. The authors * would like to acknowledge the contributions of their colleagues of the SONATA partner consortium * (www.sonata-nfv.eu). * * @author Dario Valocchi (Ph.D.), UCL * */ package sonata.kernel.WimAdaptor.messaging; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import org.json.JSONObject; import org.json.JSONTokener; import org.slf4j.LoggerFactory; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.Properties; import java.util.concurrent.TimeoutException; public class RabbitMqHelperSingleton { private Channel channel; private Connection connection; private String queueName; private String exchangeName; private static final String configFilePath = "/etc/son-mano/broker.config"; private static final org.slf4j.Logger Logger = LoggerFactory.getLogger(RabbitMqHelperSingleton.class); private static RabbitMqHelperSingleton myInstance = null; private RabbitMqHelperSingleton(){ Properties brokerConfig = parseConfigFile(); Logger.info("Connecting to broker..."); ConnectionFactory cf = new ConnectionFactory(); if (!brokerConfig.containsKey("broker_url") || !brokerConfig.containsKey("exchange")) { Logger.error("Missing broker url configuration."); System.exit(1); } try { Logger.info("Connecting to: " + brokerConfig.getProperty("broker_url")); cf.setUri(brokerConfig.getProperty("broker_url")); connection = cf.newConnection(); channel = connection.createChannel(); exchangeName = brokerConfig.getProperty("exchange"); channel.exchangeDeclare(exchangeName, "topic"); queueName = exchangeName + "." + "WimAdaptor"; channel.queueDeclare(queueName, true, false, false, null); Logger.info("Binding queue to topics..."); channel.queueBind(queueName, exchangeName, "platform.management.plugin.register"); Logger.info("Bound to topic \"platform.platform.management.plugin.register\""); channel.queueBind(queueName, exchangeName, "platform.management.plugin.deregister"); Logger.info("Bound to topic \"platform.platform.management.plugin.deregister\""); channel.queueBind(queueName, exchangeName, "infrastructure.#.wan.#"); Logger.info("[northbound] RabbitMqConsumer - bound to topic \"infrastructure.#.wan.#\""); } catch (TimeoutException e) { Logger.error(e.getMessage(), e); } catch (KeyManagementException e) { Logger.error(e.getMessage(), e); } catch (NoSuchAlgorithmException e) { Logger.error(e.getMessage(), e); } catch (URISyntaxException e) { Logger.error(e.getMessage(), e); } catch (IOException e) { Logger.error(e.getMessage(), e); } } public static RabbitMqHelperSingleton getInstance(){ if (myInstance==null){ myInstance= new RabbitMqHelperSingleton(); } return myInstance; } public Channel getChannel(){ return this.channel; } /** * Utility function to parse the broker configuration file. * * @return a Java Properties object representing the json config as a Key-Value map */ private Properties parseConfigFile() { Properties prop = new Properties(); try { InputStreamReader in = new InputStreamReader(new FileInputStream(configFilePath), Charset.forName("UTF-8")); JSONTokener tokener = new JSONTokener(in); JSONObject jsonObject = (JSONObject) tokener.nextValue(); String brokerUrl = jsonObject.getString("broker_url"); String exchange = jsonObject.getString("exchange"); prop.put("broker_url", brokerUrl); prop.put("exchange", exchange); } catch (FileNotFoundException e) { Logger.error("Unable to load Broker Config file", e); System.exit(1); } return prop; } public String getExchangeName() { return exchangeName; } public String getQueueName() { return queueName; } }
apache-2.0
LucidDB/luciddb
farrago/src/org/eigenbase/sql/ExplicitOperatorBinding.java
3062
/* // Licensed to DynamoBI Corporation (DynamoBI) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. DynamoBI licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. */ package org.eigenbase.sql; import org.eigenbase.reltype.*; import org.eigenbase.sql.parser.*; import org.eigenbase.sql.validate.*; import org.eigenbase.util.*; /** * <code>ExplicitOperatorBinding</code> implements {@link SqlOperatorBinding} * via an underlying array of known operand types. * * @author Wael Chatila * @version $Id$ */ public class ExplicitOperatorBinding extends SqlOperatorBinding { //~ Instance fields -------------------------------------------------------- private final RelDataType [] types; private final SqlOperatorBinding delegate; //~ Constructors ----------------------------------------------------------- public ExplicitOperatorBinding( SqlOperatorBinding delegate, RelDataType [] types) { this( delegate, delegate.getTypeFactory(), delegate.getOperator(), types); } public ExplicitOperatorBinding( RelDataTypeFactory typeFactory, SqlOperator operator, RelDataType [] types) { this(null, typeFactory, operator, types); } private ExplicitOperatorBinding( SqlOperatorBinding delegate, RelDataTypeFactory typeFactory, SqlOperator operator, RelDataType [] types) { super(typeFactory, operator); this.types = types; this.delegate = delegate; } //~ Methods ---------------------------------------------------------------- // implement SqlOperatorBinding public int getOperandCount() { return types.length; } // implement SqlOperatorBinding public RelDataType getOperandType(int ordinal) { return types[ordinal]; } public EigenbaseException newError( SqlValidatorException e) { if (delegate != null) { return delegate.newError(e); } else { return SqlUtil.newContextException(SqlParserPos.ZERO, e); } } public boolean isOperandNull(int ordinal, boolean allowCast) { // NOTE jvs 1-May-2006: This call is only relevant // for SQL validation, so anywhere else, just say // everything's OK. return false; } } // End ExplicitOperatorBinding.java
apache-2.0
birkoff88/HackBG
Core-Java v2/Week2/Generics/src/ImplementOnOff.java
431
import java.util.Collection; public class ImplementOnOff { private Collection<Integer> coll; public ImplementOnOff(Collection<Integer> collection) { this.coll = collection; } public void add(int element) { if (coll.contains(element)) coll.remove(element); else coll.add(element); } public String toString() { return this.coll.toString(); } }
apache-2.0
danielsoro/crest
tomitribe-crest/src/main/java/org/tomitribe/crest/cmds/OverloadedCmdMethod.java
4015
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tomitribe.crest.cmds; import org.tomitribe.crest.cmds.processors.Help; import org.tomitribe.crest.cmds.processors.OptionParam; import org.tomitribe.crest.interceptor.internal.InternalInterceptor; import org.tomitribe.util.Join; import java.io.PrintStream; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; public class OverloadedCmdMethod implements Cmd { private final String name; private final Set<CmdMethod> methods; public OverloadedCmdMethod(final String name) { this.name = name; this.methods = new TreeSet<>(new Comparator<CmdMethod>() { @Override public int compare(final CmdMethod a, final CmdMethod b) { return a.getArgumentParameters().size() - b.getArgumentParameters().size(); } }); } @Override public String getUsage() { final StringBuilder sb = new StringBuilder(); for (final CmdMethod method : methods) { sb.append(method.getUsage()).append('\n'); } return sb.toString().trim(); } @Override public String getName() { return name; } @Override public Object exec(final Map<Class<?>, InternalInterceptor> globalInterceptors, final String... rawArgs) { final Iterator<CmdMethod> iterator = methods.iterator(); while (iterator.hasNext()) { final CmdMethod method = iterator.next(); final List<Object> args; try { args = method.parse(rawArgs); } catch (final Exception e) { if (iterator.hasNext()) { continue; } else { throw CmdMethod.toRuntimeException(e); } } return method.exec(globalInterceptors, args); } throw new IllegalStateException(String.format("Unable to find matching method for command: %s", Join.join(" " + "", rawArgs))); } @Override public void help(final PrintStream out) { if (methods.isEmpty()) { throw new IllegalStateException("No method in group: " + name); } out.println(); { // usage final Iterator<CmdMethod> it = methods.iterator(); out.printf("Usage: %s%n", it.next().getUsage()); while (it.hasNext()) { out.printf(" %s%n", it.next().getUsage()); } } out.println(); final Map<String, OptionParam> options = new TreeMap<>(); for (final CmdMethod method : methods) { options.putAll(method.getOptionParameters()); } final CmdMethod first = methods.iterator().next(); Help.optionHelp(first.getMethod().getDeclaringClass(), getName(), options.values(), out); } public void add(final CmdMethod cmd) { methods.add(cmd); } @Override public Collection<String> complete(String buffer, int cursorPosition) { throw new UnsupportedOperationException(); } }
apache-2.0