repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
blindio/Prospero
java/core/io/github/blindio/prospero/core/elements/ClickableElement.java
1232
/******************************************************************************* * Copyright 2014 S. Thorson Little * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package io.github.blindio.prospero.core.elements; import io.github.blindio.prospero.core.utils.Context; /** * Base element for all clickable elements (i.e. radio, link, checkbox and * button) * * @author tlittle */ public abstract class ClickableElement extends BaseElement { public ClickableElement(String locator) { super(locator); } public void click() { Context.getBrowserDriver().delay(); Context.getBrowserDriver().click(getLocator()); } }
apache-2.0
HubSpot/jinjava
src/test/java/com/hubspot/jinjava/interpret/VariableFunctionTest.java
2108
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hubspot.jinjava.interpret; import static org.assertj.core.api.Assertions.assertThat; import com.hubspot.jinjava.Jinjava; import java.util.HashMap; import java.util.Map; import org.junit.Test; public class VariableFunctionTest { private static final DynamicVariableResolver VARIABLE_FUNCTION = s -> { switch (s) { case "name": return "Jared"; case "title": return "Mr."; case "surname": return "Stehler"; default: return null; } }; @Test public void willUseTheFunctionToPopulateVariables() { final Jinjava jinjava = new Jinjava(); jinjava.getGlobalContext().setDynamicVariableResolver(VARIABLE_FUNCTION); final Map<String, Object> context = new HashMap<>(); final String template = "<div>Hello, {{ title }} {{ name }} {{ surname }}!</div>"; final String renderedTemplate = jinjava.render(template, context); assertThat(renderedTemplate).isEqualTo("<div>Hello, Mr. Jared Stehler!</div>"); } @Test public void willPreferTheContextOverTheFunctionToPopulateVariables() { final Jinjava jinjava = new Jinjava(); jinjava.getGlobalContext().setDynamicVariableResolver(VARIABLE_FUNCTION); final Map<String, Object> context = new HashMap<>(); context.put("name", "Greg"); final String template = "<div>Hello, {{ title }} {{ name }} {{ surname }}!</div>"; final String renderedTemplate = jinjava.render(template, context); assertThat(renderedTemplate).isEqualTo("<div>Hello, Mr. Greg Stehler!</div>"); } }
apache-2.0
yntelectual/nlighten
nlighten-backend/src/main/java/me/nlighten/backend/websocket/MessageDecoder.java
717
package me.nlighten.backend.websocket; import javax.websocket.DecodeException; import javax.websocket.Decoder; import javax.websocket.EndpointConfig; import com.google.gson.Gson; /** * The Class MessageDecoder. * * @author Lubo */ public class MessageDecoder implements Decoder.Text<Message> { private static Gson gson = new Gson(); @Override public void init(final EndpointConfig config) { System.out.println("init DECODER"); } @Override public void destroy() {} @Override public boolean willDecode(final String s) { return true; } @Override public Message decode(String stringMessage) throws DecodeException { return gson.fromJson(stringMessage, Message.class); } }
apache-2.0
multi-os-engine/moe-core
moe.apple/moe.platform.ios/src/main/java/apple/uikit/protocol/UISearchResultsUpdating.java
1321
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.uikit.protocol; import apple.uikit.UISearchController; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Runtime; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.ann.ObjCProtocolName; import org.moe.natj.objc.ann.Selector; @Generated @Library("UIKit") @Runtime(ObjCRuntime.class) @ObjCProtocolName("UISearchResultsUpdating") public interface UISearchResultsUpdating { /** * Called when the search bar's text or scope has changed or when the search bar becomes first responder. */ @Generated @Selector("updateSearchResultsForSearchController:") void updateSearchResultsForSearchController(UISearchController searchController); }
apache-2.0
aminmf/crawljax
examples/src/test/java/com/crawljax/plugins/testilizer/generated/claroline_INIT/GeneratedTestCase39.java
17725
package com.crawljax.plugins.testilizer.generated.claroline_INIT; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; import org.junit.*; import static org.junit.Assert.*; import org.openqa.selenium.*; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.firefox.FirefoxProfile; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.NodeList; import com.crawljax.forms.RandomInputValueGenerator; import com.crawljax.util.DomUtils; /* * Generated @ Tue Apr 08 22:59:20 PDT 2014 */ public class GeneratedTestCase39 { private WebDriver driver; private String url; private boolean acceptNextAlert = true; private StringBuffer verificationErrors = new StringBuffer(); private DOMElement element; private DOMElement parentElement; private ArrayList<DOMElement> childrenElements = new ArrayList<DOMElement>(); private String DOM = null; boolean getCoverageReport = false; @Before public void setUp() throws Exception { // Setting the JavaScript code coverage switch getCoverageReport = com.crawljax.plugins.testilizer.Testilizer.getCoverageReport(); if (getCoverageReport) driver = new FirefoxDriver(getProfile()); else driver = new FirefoxDriver(); url = "http://localhost:8888/claroline-1.11.7/index.php?logout=true"; driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS); } public static FirefoxProfile getProfile() { FirefoxProfile profile = new FirefoxProfile(); profile.setPreference("network.proxy.http", "localhost"); profile.setPreference("network.proxy.http_port", 3128); profile.setPreference("network.proxy.type", 1); /* use proxy for everything, including localhost */ profile.setPreference("network.proxy.no_proxies_on", ""); return profile; } @After public void tearDown() throws Exception { if (getCoverageReport) ((JavascriptExecutor) driver).executeScript(" if (window.jscoverage_report) {return jscoverage_report('CodeCoverageReport');}"); driver.quit(); String verificationErrorString = verificationErrors.toString(); if (!"".equals(verificationErrorString)) { fail(verificationErrorString); } } /* * Test Cases */ @Test public void method39(){ driver.get(url); //From state 0 to state 147 //Eventable{eventType=click, identification=cssSelector button[type="submit"], element=Element{node=[BUTTON: null], tag=BUTTON, text=Enter, attributes={tabindex=3, type=submit}}, source=StateVertexImpl{id=0, name=index}, target=StateVertexImpl{id=147, name=state147}} mutateDOMTree(0); checkState0_OriginalAssertions(); checkState0_ReusedAssertions(); checkState0_GeneratedAssertions(); checkState0_LearnedAssertions(); checkState0_AllAssertions(); checkState0_RandAssertions1(); checkState0_RandAssertions2(); checkState0_RandAssertions3(); checkState0_RandAssertions4(); checkState0_RandAssertions5(); driver.findElement(By.id("login")).clear(); driver.findElement(By.id("login")).sendKeys("nainy"); driver.findElement(By.id("password")).clear(); driver.findElement(By.id("password")).sendKeys("nainy"); driver.findElement(By.cssSelector("button[type=\"submit\"]")).click(); //From state 147 to state 2 //Eventable{eventType=click, identification=text Platform administration, element=Element{node=[A: null], tag=A, text=Platform administration, attributes={href=/claroline-1.11.7/claroline/admin/, target=_top}}, source=StateVertexImpl{id=147, name=state147}, target=StateVertexImpl{id=2, name=state2}} mutateDOMTree(147); checkState147_OriginalAssertions(); checkState147_ReusedAssertions(); checkState147_GeneratedAssertions(); checkState147_LearnedAssertions(); checkState147_AllAssertions(); checkState147_RandAssertions1(); checkState147_RandAssertions2(); checkState147_RandAssertions3(); checkState147_RandAssertions4(); checkState147_RandAssertions5(); driver.findElement(By.linkText("Platform administration")).click(); //From state 2 to state 23 //Eventable{eventType=click, identification=text Manage classes, element=Element{node=[A: null], tag=A, text=Manage classes, attributes={href=admin_class.php}}, source=StateVertexImpl{id=2, name=state2}, target=StateVertexImpl{id=23, name=state23}} mutateDOMTree(2); checkState2_OriginalAssertions(); checkState2_ReusedAssertions(); checkState2_GeneratedAssertions(); checkState2_LearnedAssertions(); checkState2_AllAssertions(); checkState2_RandAssertions1(); checkState2_RandAssertions2(); checkState2_RandAssertions3(); checkState2_RandAssertions4(); checkState2_RandAssertions5(); driver.findElement(By.linkText("Manage classes")).click(); //From state 23 to state 24 //Eventable{eventType=click, identification=text Create a new class, element=Element{node=[A: null], tag=A, text=Create a new class, attributes={href=/claroline-1.11.7/claroline/admin/admin_class.php?cmd=rqAdd, style=background-image: url(/claroline-1.11.7/web/img/class.png?1232379976); background-repeat: no-repeat; background-position: left center; padding-left: 20px;}}, source=StateVertexImpl{id=23, name=state23}, target=StateVertexImpl{id=24, name=state24}} mutateDOMTree(23); checkState23_OriginalAssertions(); checkState23_ReusedAssertions(); checkState23_GeneratedAssertions(); checkState23_LearnedAssertions(); checkState23_AllAssertions(); checkState23_RandAssertions1(); checkState23_RandAssertions2(); checkState23_RandAssertions3(); checkState23_RandAssertions4(); checkState23_RandAssertions5(); driver.findElement(By.linkText("Create a new class")).click(); //From state 24 to state 25 //Eventable{eventType=click, identification=cssSelector input[type="submit"], element=Element{node=[INPUT: null], tag=INPUT, text=, attributes={type=submit, value= Ok }}, source=StateVertexImpl{id=24, name=state24}, target=StateVertexImpl{id=25, name=state25}} mutateDOMTree(24); checkState24_OriginalAssertions(); checkState24_ReusedAssertions(); checkState24_GeneratedAssertions(); checkState24_LearnedAssertions(); checkState24_AllAssertions(); checkState24_RandAssertions1(); checkState24_RandAssertions2(); checkState24_RandAssertions3(); checkState24_RandAssertions4(); checkState24_RandAssertions5(); driver.findElement(By.name("class_name")).clear(); driver.findElement(By.name("class_name")).sendKeys("EG"); driver.findElement(By.cssSelector("input[type=\"submit\"]")).click(); //From state 25 to state 14 //Eventable{eventType=click, identification=text Logout, element=Element{node=[A: null], tag=A, text=Logout, attributes={href=/claroline-1.11.7/index.php?logout=true, target=_top}}, source=StateVertexImpl{id=25, name=state25}, target=StateVertexImpl{id=14, name=state14}} mutateDOMTree(25); checkState25_OriginalAssertions(); checkState25_ReusedAssertions(); checkState25_GeneratedAssertions(); checkState25_LearnedAssertions(); checkState25_AllAssertions(); checkState25_RandAssertions1(); checkState25_RandAssertions2(); checkState25_RandAssertions3(); checkState25_RandAssertions4(); checkState25_RandAssertions5(); driver.findElement(By.linkText("Logout")).click(); //Sink node at state 14 mutateDOMTree(14); checkState14_OriginalAssertions(); checkState14_ReusedAssertions(); checkState14_GeneratedAssertions(); checkState14_LearnedAssertions(); checkState14_AllAssertions(); checkState14_RandAssertions1(); checkState14_RandAssertions2(); checkState14_RandAssertions3(); checkState14_RandAssertions4(); checkState14_RandAssertions5(); } public void checkState0_OriginalAssertions(){ } public void checkState0_ReusedAssertions(){ } public void checkState0_GeneratedAssertions(){ } public void checkState0_LearnedAssertions(){ } public void checkState0_AllAssertions(){ } public void checkState0_RandAssertions1(){ } public void checkState0_RandAssertions2(){ } public void checkState0_RandAssertions3(){ } public void checkState0_RandAssertions4(){ } public void checkState0_RandAssertions5(){ } public void checkState147_OriginalAssertions(){ } public void checkState147_ReusedAssertions(){ } public void checkState147_GeneratedAssertions(){ } public void checkState147_LearnedAssertions(){ } public void checkState147_AllAssertions(){ } public void checkState147_RandAssertions1(){ } public void checkState147_RandAssertions2(){ } public void checkState147_RandAssertions3(){ } public void checkState147_RandAssertions4(){ } public void checkState147_RandAssertions5(){ } public void checkState2_OriginalAssertions(){ } public void checkState2_ReusedAssertions(){ } public void checkState2_GeneratedAssertions(){ } public void checkState2_LearnedAssertions(){ } public void checkState2_AllAssertions(){ } public void checkState2_RandAssertions1(){ } public void checkState2_RandAssertions2(){ } public void checkState2_RandAssertions3(){ } public void checkState2_RandAssertions4(){ } public void checkState2_RandAssertions5(){ } public void checkState23_OriginalAssertions(){ } public void checkState23_ReusedAssertions(){ } public void checkState23_GeneratedAssertions(){ } public void checkState23_LearnedAssertions(){ } public void checkState23_AllAssertions(){ } public void checkState23_RandAssertions1(){ } public void checkState23_RandAssertions2(){ } public void checkState23_RandAssertions3(){ } public void checkState23_RandAssertions4(){ } public void checkState23_RandAssertions5(){ } public void checkState24_OriginalAssertions(){ } public void checkState24_ReusedAssertions(){ } public void checkState24_GeneratedAssertions(){ } public void checkState24_LearnedAssertions(){ } public void checkState24_AllAssertions(){ } public void checkState24_RandAssertions1(){ } public void checkState24_RandAssertions2(){ } public void checkState24_RandAssertions3(){ } public void checkState24_RandAssertions4(){ } public void checkState24_RandAssertions5(){ } public void checkState25_OriginalAssertions(){ if(!(driver.findElement(By.cssSelector("div.claroDialogBox.boxSuccess")).getText().matches("^[\\s\\S]*The new class has been created[\\s\\S]*$"))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // original assertion } public void checkState25_ReusedAssertions(){ } public void checkState25_GeneratedAssertions(){ } public void checkState25_LearnedAssertions(){ } public void checkState25_AllAssertions(){ if(!(driver.findElement(By.cssSelector("div.claroDialogBox.boxSuccess")).getText().matches("^[\\s\\S]*The new class has been created[\\s\\S]*$"))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // original assertion } public void checkState25_RandAssertions1(){ } public void checkState25_RandAssertions2(){ } public void checkState25_RandAssertions3(){ } public void checkState25_RandAssertions4(){ } public void checkState25_RandAssertions5(){ } public void checkState14_OriginalAssertions(){ } public void checkState14_ReusedAssertions(){ } public void checkState14_GeneratedAssertions(){ } public void checkState14_LearnedAssertions(){ } public void checkState14_AllAssertions(){ } public void checkState14_RandAssertions1(){ } public void checkState14_RandAssertions2(){ } public void checkState14_RandAssertions3(){ } public void checkState14_RandAssertions4(){ } public void checkState14_RandAssertions5(){ } /* * Auxiliary methods */ private boolean isElementPresent(By by) { try { driver.findElement(by); return true; } catch (NoSuchElementException e) { return false; } } private boolean isElementPatternTagPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) { try { String source = driver.getPageSource(); Document dom = DomUtils.asDocument(source); NodeList nodeList = dom.getElementsByTagName(element.getTagName()); org.w3c.dom.Element sourceElement = null; for (int i = 0; i < nodeList.getLength(); i++){ sourceElement = (org.w3c.dom.Element) nodeList.item(i); // check parent node's tag and attributes String parentTagName = sourceElement.getParentNode().getNodeName(); if (!parentTagName.equals(parent.getTagName())) continue; // check children nodes' tags HashSet<String> childrenTagNameFromDOM = new HashSet<String>(); for (int j=0; j<sourceElement.getChildNodes().getLength();j++) childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName()); HashSet<String> childrenTagNameToTest = new HashSet<String>(); for (int k=0; k<children.size();k++) childrenTagNameToTest.add(children.get(k).getTagName()); if (!childrenTagNameToTest.equals(childrenTagNameFromDOM)) continue; return true; } } catch (IOException e) { e.printStackTrace(); } return false; } private boolean isElementPatternFullPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) { try { String source = driver.getPageSource(); Document dom = DomUtils.asDocument(source); NodeList nodeList = dom.getElementsByTagName(element.getTagName()); org.w3c.dom.Element sourceElement = null; for (int i = 0; i < nodeList.getLength(); i++){ // check node's attributes sourceElement = (org.w3c.dom.Element) nodeList.item(i); NamedNodeMap elementAttList = sourceElement.getAttributes(); HashSet<String> elemetAtts = new HashSet<String>(); for (int j = 0; j < elementAttList.getLength(); j++) elemetAtts.add(elementAttList.item(j).getNodeName() + "=\"" + elementAttList.item(j).getNodeValue() + "\""); if (!element.getAttributes().equals(elemetAtts)) continue; // check parent node's tag and attributes String parentTagName = sourceElement.getParentNode().getNodeName(); if (!parentTagName.equals(parent.getTagName())) continue; NamedNodeMap parentAttList = sourceElement.getParentNode().getAttributes(); HashSet<String> parentAtts = new HashSet<String>(); for (int j = 0; j < parentAttList.getLength(); j++) parentAtts.add(parentAttList.item(j).getNodeName() + "=\"" + parentAttList.item(j).getNodeValue() + "\""); if (!parent.getAttributes().equals(parentAtts)) continue; // check children nodes' tags HashSet<String> childrenTagNameFromDOM = new HashSet<String>(); for (int j=0; j<sourceElement.getChildNodes().getLength();j++) childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName()); HashSet<String> childrenTagNameToTest = new HashSet<String>(); for (int k=0; k<children.size();k++) childrenTagNameToTest.add(children.get(k).getTagName()); if (!childrenTagNameToTest.equals(childrenTagNameFromDOM)) continue; // check children nodes' attributes HashSet<HashSet<String>> childrenAttsFromDOM = new HashSet<HashSet<String>>(); for (int j=0; j<sourceElement.getChildNodes().getLength();j++){ NamedNodeMap childAttListFromDOM = sourceElement.getChildNodes().item(j).getAttributes(); HashSet<String> childAtts = new HashSet<String>(); if (childAttListFromDOM!=null) for (int k = 0; k < childAttListFromDOM.getLength(); k++) childAtts.add(childAttListFromDOM.item(k).getNodeName() + "=\"" + childAttListFromDOM.item(k).getNodeValue() + "\""); childrenAttsFromDOM.add(childAtts); } HashSet<HashSet<String>> childrenAttsToTest = new HashSet<HashSet<String>>(); for (int k=0; k<children.size();k++) childrenAttsToTest.add(children.get(k).getAttributes()); if (!childrenAttsToTest.equals(childrenAttsFromDOM)) continue; return true; } } catch (IOException e) { e.printStackTrace(); } return false; } private boolean isAlertPresent() { try { driver.switchTo().alert(); return true; } catch (NoAlertPresentException e) { return false; } } private String closeAlertAndGetItsText() { try { Alert alert = driver.switchTo().alert(); String alertText = alert.getText(); if (acceptNextAlert) { alert.accept(); } else { alert.dismiss(); } return alertText; } finally { acceptNextAlert = true; } } public class DOMElement { private String tagName; private String textContent; private HashSet<String> attributes = new HashSet<String>(); public DOMElement(String tagName, String textContent, ArrayList<String> attributes){ this.tagName = tagName; this.textContent = textContent; if (attributes.get(0)!="") for (int i=0; i<attributes.size();i++) this.attributes.add(attributes.get(i)); } public String getTagName() { return tagName; } public String getTextContent() { return textContent; } public HashSet<String> getAttributes() { return attributes; } } private void mutateDOMTree(int stateID){ // execute JavaScript code to mutate DOM String code = com.crawljax.plugins.testilizer.Testilizer.mutateDOMTreeCode(stateID); if (code!= null){ long RandomlySelectedDOMElementID = (long) ((JavascriptExecutor)driver).executeScript(code); int MutationOperatorCode = com.crawljax.plugins.testilizer.Testilizer.MutationOperatorCode; int StateToBeMutated = com.crawljax.plugins.testilizer.Testilizer.StateToBeMutated; com.crawljax.plugins.testilizer.Testilizer.SelectedRandomElementInDOM[MutationOperatorCode][StateToBeMutated] = (int) RandomlySelectedDOMElementID; } } }
apache-2.0
Clydeside/ALipatov
chapter_002/src/test/java/ru/job4j/tracker/StubInputTest.java
2588
package ru.job4j.tracker; import org.junit.Test; import java.util.ArrayList; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; public class StubInputTest { @Test public void addTest() { Tracker tracker = new Tracker(); Item item = new Item("1"); String id = item.getId(); Input input = new StubInput(new String[]{"0", id, "test name", "desc", "6"}); new StartUI(input, tracker).startWorking(); assertThat(id, is(tracker.findAll().get(0).getId())); tracker.delete(item); } @Test public void updateTest() { Tracker tracker = new Tracker(); Item item = new Item("1"); tracker.add(item); String id = item.getId(); Input input = new StubInput(new String[]{"2", id, "test name", "desc", "6"}); new StartUI(input, tracker).startWorking(); assertThat(tracker.findById(id).getName(), is("test name")); tracker.delete(item); } @Test public void deleteTest() { Tracker tracker = new Tracker(); Item item1 = tracker.add(new Item("1")); Item item2 = tracker.add(new Item("2")); Item item3 = tracker.add(new Item("3")); String id = item2.getId(); Input input = new StubInput(new String[]{"3", id, "6"}); new StartUI(input, tracker).startWorking(); assertThat(tracker.findAll().size(), is(2)); tracker.delete(item1); tracker.delete(item3); } @Test public void findByIdTest() { Tracker tracker = new Tracker(); Item item1 = tracker.add(new Item("name1")); Item item2 = tracker.add(new Item("name2")); Item item3 = tracker.add(new Item("name3")); String result = item2.getId(); Input input = new StubInput(new String[]{"4", item2.getId(), "6"}); new StartUI(input, tracker).startWorking(); assertThat(result, is(tracker.findAll().get(1).getId())); assertThat(tracker.findById(item2.getId()).getId(), is(result)); tracker.delete(item1); tracker.delete(item2); tracker.delete(item3); } @Test public void findByNameTest() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("1", "name")); Input input = new StubInput(new String[]{"5", item.getName(), "6"}); new StartUI(input, tracker).startWorking(); assertThat(tracker.findByName("name").get(0).getName(), is("name") ); tracker.delete(item); } }
apache-2.0
Sellegit/j2objc
runtime/src/main/java/apple/coreanimation/CAEmitterCell.java
7873
package apple.coreanimation; import java.io.*; import java.nio.*; import java.util.*; import com.google.j2objc.annotations.*; import com.google.j2objc.runtime.*; import com.google.j2objc.runtime.block.*; import apple.audiotoolbox.*; import apple.corefoundation.*; import apple.coregraphics.*; import apple.coreservices.*; import apple.foundation.*; import apple.coreimage.*; import apple.coretext.*; import apple.opengles.*; @Library("QuartzCore/QuartzCore.h") @Mapping("CAEmitterCell") public class CAEmitterCell extends NSObject implements NSCoding, CAMediaTiming { @Mapping("init") public CAEmitterCell() { } @Mapping("name") public native String getName(); @Mapping("setName:") public native void setName(String v); @Mapping("isEnabled") public native boolean isEnabled(); @Mapping("setEnabled:") public native void setEnabled(boolean v); @Mapping("birthRate") public native float getBirthRate(); @Mapping("setBirthRate:") public native void setBirthRate(float v); @Mapping("lifetime") public native float getLifetime(); @Mapping("setLifetime:") public native void setLifetime(float v); @Mapping("lifetimeRange") public native float getLifetimeRange(); @Mapping("setLifetimeRange:") public native void setLifetimeRange(float v); @Mapping("emissionLatitude") public native @MachineSizedFloat double getEmissionLatitude(); @Mapping("setEmissionLatitude:") public native void setEmissionLatitude(@MachineSizedFloat double v); @Mapping("emissionLongitude") public native @MachineSizedFloat double getEmissionLongitude(); @Mapping("setEmissionLongitude:") public native void setEmissionLongitude(@MachineSizedFloat double v); @Mapping("emissionRange") public native @MachineSizedFloat double getEmissionRange(); @Mapping("setEmissionRange:") public native void setEmissionRange(@MachineSizedFloat double v); @Mapping("velocity") public native @MachineSizedFloat double getVelocity(); @Mapping("setVelocity:") public native void setVelocity(@MachineSizedFloat double v); @Mapping("velocityRange") public native @MachineSizedFloat double getVelocityRange(); @Mapping("setVelocityRange:") public native void setVelocityRange(@MachineSizedFloat double v); @Mapping("xAcceleration") public native @MachineSizedFloat double getXAcceleration(); @Mapping("setXAcceleration:") public native void setXAcceleration(@MachineSizedFloat double v); @Mapping("yAcceleration") public native @MachineSizedFloat double getYAcceleration(); @Mapping("setYAcceleration:") public native void setYAcceleration(@MachineSizedFloat double v); @Mapping("zAcceleration") public native @MachineSizedFloat double getZAcceleration(); @Mapping("setZAcceleration:") public native void setZAcceleration(@MachineSizedFloat double v); @Mapping("scale") public native @MachineSizedFloat double getScale(); @Mapping("setScale:") public native void setScale(@MachineSizedFloat double v); @Mapping("scaleRange") public native @MachineSizedFloat double getScaleRange(); @Mapping("setScaleRange:") public native void setScaleRange(@MachineSizedFloat double v); @Mapping("scaleSpeed") public native @MachineSizedFloat double getScaleSpeed(); @Mapping("setScaleSpeed:") public native void setScaleSpeed(@MachineSizedFloat double v); @Mapping("spin") public native @MachineSizedFloat double getSpin(); @Mapping("setSpin:") public native void setSpin(@MachineSizedFloat double v); @Mapping("spinRange") public native @MachineSizedFloat double getSpinRange(); @Mapping("setSpinRange:") public native void setSpinRange(@MachineSizedFloat double v); @Mapping("color") public native CGColor getColor(); @Mapping("setColor:") public native void setColor(CGColor v); @Mapping("redRange") public native float getRedRange(); @Mapping("setRedRange:") public native void setRedRange(float v); @Mapping("greenRange") public native float getGreenRange(); @Mapping("setGreenRange:") public native void setGreenRange(float v); @Mapping("blueRange") public native float getBlueRange(); @Mapping("setBlueRange:") public native void setBlueRange(float v); @Mapping("alphaRange") public native float getAlphaRange(); @Mapping("setAlphaRange:") public native void setAlphaRange(float v); @Mapping("redSpeed") public native float getRedSpeed(); @Mapping("setRedSpeed:") public native void setRedSpeed(float v); @Mapping("greenSpeed") public native float getGreenSpeed(); @Mapping("setGreenSpeed:") public native void setGreenSpeed(float v); @Mapping("blueSpeed") public native float getBlueSpeed(); @Mapping("setBlueSpeed:") public native void setBlueSpeed(float v); @Mapping("alphaSpeed") public native float getAlphaSpeed(); @Mapping("setAlphaSpeed:") public native void setAlphaSpeed(float v); @Mapping("contents") public native Object getContents(); @Mapping("setContents:") public native void setContents(Object v); @Mapping("contentsRect") public native CGRect getContentsRect(); @Mapping("setContentsRect:") public native void setContentsRect(CGRect v); @Mapping("minificationFilter") public native CAFilter getMinificationFilter(); @Mapping("setMinificationFilter:") public native void setMinificationFilter(CAFilter v); @Mapping("magnificationFilter") public native CAFilter getMagnificationFilter(); @Mapping("setMagnificationFilter:") public native void setMagnificationFilter(CAFilter v); @Mapping("minificationFilterBias") public native float getMinificationFilterBias(); @Mapping("setMinificationFilterBias:") public native void setMinificationFilterBias(float v); @Mapping("emitterCells") public native NSArray<CAEmitterCell> getEmitterCells(); @Mapping("setEmitterCells:") public native void setEmitterCells(NSArray<CAEmitterCell> v); @Mapping("style") public native NSDictionary<NSString, NSObject> getStyle(); @Mapping("setStyle:") public native void setStyle(NSDictionary<NSString, NSObject> v); @Mapping("beginTime") public native double getBeginTime(); @Mapping("setBeginTime:") public native void setBeginTime(double v); @Mapping("duration") public native double getDuration(); @Mapping("setDuration:") public native void setDuration(double v); @Mapping("speed") public native float getSpeed(); @Mapping("setSpeed:") public native void setSpeed(float v); @Mapping("timeOffset") public native double getTimeOffset(); @Mapping("setTimeOffset:") public native void setTimeOffset(double v); @Mapping("repeatCount") public native float getRepeatCount(); @Mapping("setRepeatCount:") public native void setRepeatCount(float v); @Mapping("repeatDuration") public native double getRepeatDuration(); @Mapping("setRepeatDuration:") public native void setRepeatDuration(double v); @Mapping("autoreverses") public native boolean autoreverses(); @Mapping("setAutoreverses:") public native void setAutoreverses(boolean v); @Mapping("fillMode") public native CAFillMode getFillMode(); @Mapping("setFillMode:") public native void setFillMode(CAFillMode v); @Mapping("shouldArchiveValueForKey:") public native boolean shouldArchiveValue(String key); @Mapping("emitterCell") public static native CAEmitterCell create(); @Mapping("defaultValueForKey:") public static native Object getDefaultValue(String key); @Mapping("encodeWithCoder:") public native void encode(NSCoder aCoder); }
apache-2.0
charles-cooper/idylfin
src/com/opengamma/analytics/financial/credit/RestructuringClause.java
645
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.credit; /** * The restructuring clause to apply in the event of a credit event deemed to be a restructuring */ public enum RestructuringClause { /** * Old-Restructuring */ OLDRE, /** * Modified Restructuring */ MODRE, /** * Modified-Modified Restructuring */ MODMODRE, /** * No-Restructuring */ NORE, /** * Full-Restructuring (MarkIt notation) */ CR, /** * No-Restructuring (MarkIt notation) */ XR; }
apache-2.0
maximus0/RocketMQ
rocketmq-tools/src/main/java/com/alibaba/rocketmq/tools/command/consumer/ConsumerStatusSubCommand.java
6137
/** * Copyright (C) 2010-2013 Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.rocketmq.tools.command.consumer; import java.util.Iterator; import java.util.Map.Entry; import java.util.TreeMap; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import com.alibaba.rocketmq.common.MQVersion; import com.alibaba.rocketmq.common.MixAll; import com.alibaba.rocketmq.common.protocol.body.Connection; import com.alibaba.rocketmq.common.protocol.body.ConsumerConnection; import com.alibaba.rocketmq.common.protocol.body.ConsumerRunningInfo; import com.alibaba.rocketmq.remoting.RPCHook; import com.alibaba.rocketmq.tools.admin.DefaultMQAdminExt; import com.alibaba.rocketmq.tools.command.MQAdminStartup; import com.alibaba.rocketmq.tools.command.SubCommand; /** * 查询Consumer内部数据结构 * * @author shijia.wxr<vintage.wang@gmail.com> * @since 2014-07-20 */ public class ConsumerStatusSubCommand implements SubCommand { @Override public String commandName() { return "consumerStatus"; } @Override public String commandDesc() { return "Query consumer's internal data structure"; } @Override public Options buildCommandlineOptions(Options options) { Option opt = new Option("g", "consumerGroup", true, "consumer group name"); opt.setRequired(true); options.addOption(opt); opt = new Option("i", "clientId", true, "The consumer's client id"); opt.setRequired(false); options.addOption(opt); return options; } @Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group); if (!commandLine.hasOption('i')) { // 打印连接 int i = 1; long now = System.currentTimeMillis(); final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<String, ConsumerRunningInfo>(); for (Connection conn : cc.getConnectionSet()) { try { ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId()); if (consumerRunningInfo != null) { criTable.put(conn.getClientId(), consumerRunningInfo); String filePath = now + "/" + conn.getClientId(); MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath); System.out.printf("%03d %-40s %-20s %s\n",// i++,// conn.getClientId(),// MQVersion.getVersionDesc(conn.getVersion()),// filePath); } } catch (Exception e) { e.printStackTrace(); } } if (!criTable.isEmpty()) { boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable); boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable); if (subSame) { System.out.println("\n\nSame subscription in the same group of consumer"); System.out.printf("\n\nRebalance %s\n", rebalanceOK ? "OK" : "Failed"); Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (result.length() > 0) { System.out.println(result); } } } else { System.out .println("\n\nWARN: Different subscription in the same group of consumer!!!"); } } } else { String clientId = commandLine.getOptionValue('i').trim(); ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, clientId); if (consumerRunningInfo != null) { System.out.println(consumerRunningInfo.formatString()); } } } catch (Exception e) { e.printStackTrace(); } finally { defaultMQAdminExt.shutdown(); } } public static void main(String[] args) { System.setProperty(MixAll.NAMESRV_ADDR_PROPERTY, "10.235.169.73:9876"); MQAdminStartup.main(new String[] { new ConsumerStatusSubCommand().commandName(), // "-g", "benchmark_consumer" // }); } }
apache-2.0
carewebframework/carewebframework-core
org.carewebframework.ui-parent/org.carewebframework.ui.core/src/main/java/org/carewebframework/ui/session/SessionControl.java
1239
package org.carewebframework.ui.session; /** * Events used to control session state via administrator functions. */ public enum SessionControl { SHUTDOWN_START, SHUTDOWN_ABORT, SHUTDOWN_PROGRESS, LOCK; public static final String EVENT_ROOT = "SESSION_CONTROL"; private static final String EVENT_PREFIX = EVENT_ROOT + "."; /** * Returns the enum member corresponding to the event name. * * @param eventName The event name. * @return The corresponding member, or null if none. */ public static SessionControl fromEvent(String eventName) { if (eventName.startsWith(EVENT_PREFIX)) { String name = eventName.substring(EVENT_PREFIX.length()).replace(".", "_"); try { return SessionControl.valueOf(name); } catch (Exception e) { return null; } } return null; } private final String eventName; SessionControl() { eventName = EVENT_PREFIX + name().replace("_", "."); } /** * Returns the event name for this member. * * @return The event name. */ public String getEventName() { return eventName; } }
apache-2.0
herickson/terremark-api
src/main/java/com/terremark/api/ComputePoolPerformanceStatistics.java
2447
package com.terremark.api; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ComputePoolPerformanceStatistics complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ComputePoolPerformanceStatistics"> * &lt;complexContent> * &lt;extension base="{}Resource"> * &lt;sequence> * &lt;element name="Hourly" type="{}ComputePoolPerformanceStatistic" minOccurs="0"/> * &lt;element name="Daily" type="{}ComputePoolPerformanceStatistic" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ComputePoolPerformanceStatistics", propOrder = { "hourly", "daily" }) @javax.xml.bind.annotation.XmlRootElement(name = "ComputePoolPerformanceStatistics") public class ComputePoolPerformanceStatistics extends Resource { @XmlElement(name = "Hourly", nillable = true) protected ComputePoolPerformanceStatistic hourly; @XmlElement(name = "Daily", nillable = true) protected ComputePoolPerformanceStatistic daily; /** * Gets the value of the hourly property. * * @return * possible object is * {@link ComputePoolPerformanceStatistic } * */ public ComputePoolPerformanceStatistic getHourly() { return hourly; } /** * Sets the value of the hourly property. * * @param value * allowed object is * {@link ComputePoolPerformanceStatistic } * */ public void setHourly(ComputePoolPerformanceStatistic value) { this.hourly = value; } /** * Gets the value of the daily property. * * @return * possible object is * {@link ComputePoolPerformanceStatistic } * */ public ComputePoolPerformanceStatistic getDaily() { return daily; } /** * Sets the value of the daily property. * * @param value * allowed object is * {@link ComputePoolPerformanceStatistic } * */ public void setDaily(ComputePoolPerformanceStatistic value) { this.daily = value; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-imagebuilder/src/main/java/com/amazonaws/services/imagebuilder/model/transform/InstanceConfigurationJsonUnmarshaller.java
3198
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.imagebuilder.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.imagebuilder.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * InstanceConfiguration JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InstanceConfigurationJsonUnmarshaller implements Unmarshaller<InstanceConfiguration, JsonUnmarshallerContext> { public InstanceConfiguration unmarshall(JsonUnmarshallerContext context) throws Exception { InstanceConfiguration instanceConfiguration = new InstanceConfiguration(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("image", targetDepth)) { context.nextToken(); instanceConfiguration.setImage(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("blockDeviceMappings", targetDepth)) { context.nextToken(); instanceConfiguration.setBlockDeviceMappings(new ListUnmarshaller<InstanceBlockDeviceMapping>(InstanceBlockDeviceMappingJsonUnmarshaller .getInstance()) .unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return instanceConfiguration; } private static InstanceConfigurationJsonUnmarshaller instance; public static InstanceConfigurationJsonUnmarshaller getInstance() { if (instance == null) instance = new InstanceConfigurationJsonUnmarshaller(); return instance; } }
apache-2.0
mvp4g/mvp4g
src/test/java/com/google/gwt/dev/javac/typemodel/StubClassType.java
6796
/* * Copyright (c) 2009 - 2017 - Pierre-Laurent Coirer, Frank Hossfeld * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.gwt.dev.javac.typemodel; import java.lang.annotation.Annotation; import java.util.HashMap; import java.util.Map; import com.google.gwt.core.ext.typeinfo.JPrimitiveType; import com.google.gwt.core.ext.typeinfo.JType; import com.google.gwt.core.ext.typeinfo.NotFoundException; import com.mvp4g.rebind.test_tools.Mvp4gRunAsyncCallbackStub; public class StubClassType extends JClassType { TypeOracle oracle; public StubClassType(TypeOracle oracle) { this.oracle = oracle; } @Override public JConstructor findConstructor(JType[] paramTypes) { return null; } @Override public JField findField(String name) { return null; } @Override public JMethod findMethod(String name, JType[] paramTypes) { return null; } @Override public JClassType findNestedType(String typeName) { return null; } @Override public <T extends Annotation> T getAnnotation(Class<T> annotationClass) { return null; } @Override public Annotation[] getAnnotations() { return null; } @Override public JConstructor getConstructor(JType[] paramTypes) throws NotFoundException { return null; } @Override public JConstructor[] getConstructors() { return null; } @Override public Annotation[] getDeclaredAnnotations() { return null; } @Override public JClassType getEnclosingType() { return null; } @Override public JClassType getErasedType() { return null; } @Override public JField getField(String name) { return null; } @Override public JField[] getFields() { return null; } @Override public JClassType[] getImplementedInterfaces() { return null; } @Override public JMethod[] getInheritableMethods() { return null; } @Override public String getJNISignature() { return null; } @Override public JMethod getMethod(String name, JType[] paramTypes) throws NotFoundException { return null; } @Override public JMethod[] getMethods() { return null; } @Override public String getName() { return null; } @Override public JClassType getNestedType(String typeName) throws NotFoundException { return null; } @Override public JClassType[] getNestedTypes() { return null; } @Override public TypeOracle getOracle() { return null; } @Override public JMethod[] getOverloads(String name) { return null; } @Override public JMethod[] getOverridableMethods() { Map<Class<? extends Annotation>, Annotation> declaredAnnotations = new HashMap<Class<? extends Annotation>, Annotation>(); JMethod method = new JMethod(this, "load", declaredAnnotations, null); new JParameter(method, oracle.findType(Mvp4gRunAsyncCallbackStub.class.getCanonicalName()), "callback", declaredAnnotations, true); return new JMethod[] { method }; } @Override public JPackage getPackage() { return null; } @Override public String getQualifiedBinaryName() { return null; } @Override public String getQualifiedSourceName() { return null; } @Override public String getSimpleSourceName() { return null; } @Override public JClassType[] getSubtypes() { return null; } @Override public JClassType getSuperclass() { return null; } @Override void setSuperclass(JClassType type) { } @Override public boolean isAbstract() { return false; } @Override public boolean isAnnotationPresent(Class<? extends Annotation> annotationClass) { return false; } @Override public JArrayType isArray() { return null; } @Override public JClassType isClass() { return null; } @Override public boolean isDefaultInstantiable() { return false; } @Override public JEnumType isEnum() { return null; } @Override public boolean isFinal() { return false; } @Override public JGenericType isGenericType() { return null; } @Override public JClassType isInterface() { return null; } @Override public boolean isMemberType() { return false; } @Override public JParameterizedType isParameterized() { return null; } @Override public JPrimitiveType isPrimitive() { return null; } @Override public boolean isPrivate() { return false; } @Override public boolean isProtected() { return false; } @Override public boolean isPublic() { return false; } @Override public JRawType isRawType() { return null; } @Override public boolean isStatic() { return false; } @Override public JWildcardType isWildcard() { return null; } @Override protected void acceptSubtype(JClassType me) { } @Override protected void getInheritableMethodsOnSuperclassesAndThisClass(Map<String, JMethod> methodsBySignature) { } @Override protected void getInheritableMethodsOnSuperinterfacesAndMaybeThisInterface(Map<String, JMethod> methodsBySignature) { } @Override protected int getModifierBits() { return 0; } @Override protected void notifySuperTypesOf(JClassType me) { } @Override protected void removeSubtype(JClassType me) { } @Override void addConstructor(JConstructor ctor) { } @Override void addField(JField field) { } @Override void addImplementedInterface(JClassType intf) { } @Override void addMethod(JMethod method) { } @Override void addModifierBits(int bits) { } @Override void addNestedType(JClassType type) { } @Override JClassType findNestedTypeImpl(String[] typeName, int index) { return null; } @Override JClassType getSubstitutedType(JParameterizedType parameterizedType) { return null; } @Override void notifySuperTypes() { } @Override void removeFromSupertypes() { } }
apache-2.0
seasarorg/s2dao
s2-dao/src/test/java/org/seasar/dao/pager/EmployeeDao.java
929
/* * Copyright 2004-2011 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.seasar.dao.pager; import java.util.List; /** * @author manhole */ public interface EmployeeDao { public Class BEAN = Employee.class; public String getEmployees_QUERY = "ORDER BY EMPNO"; public List getEmployees(PagerCondition condition); }
apache-2.0
mkoslacz/Moviper
moviper-databinding-viewstate/src/main/java/com/mateuszkoslacz/moviper/base/view/fragment/autoinject/databinding/ViperDataBindingLceViewStateFragment.java
1488
package com.mateuszkoslacz.moviper.base.view.fragment.autoinject.databinding; import android.databinding.DataBindingUtil; import android.databinding.ViewDataBinding; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.hannesdorfmann.mosby.mvp.MvpPresenter; import com.hannesdorfmann.mosby.mvp.lce.MvpLceView; import com.hannesdorfmann.mosby.mvp.viewstate.ViewState; import com.mateuszkoslacz.moviper.base.view.fragment.autoinject.ViperLceViewStateAiFragment; import com.mateuszkoslacz.moviper.iface.view.ViperLceView; /** * Created by mateuszkoslacz on 14.12.2016. */ public abstract class ViperDataBindingLceViewStateFragment <ContentView extends View, Model, ViewType extends MvpLceView<Model>, Presenter extends MvpPresenter<ViewType>, ViewStateType extends ViewState<ViewType>, Binding extends ViewDataBinding> extends ViperLceViewStateAiFragment<ContentView, Model, ViewType, Presenter, ViewStateType> implements ViperLceView<Model> { protected Binding mBinding; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { mBinding = DataBindingUtil.inflate(inflater, getLayoutId(), container, false); return mBinding.getRoot(); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/ResetEbsDefaultKmsKeyIdRequest.java
2694
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.Request; import com.amazonaws.services.ec2.model.transform.ResetEbsDefaultKmsKeyIdRequestMarshaller; /** * */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ResetEbsDefaultKmsKeyIdRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<ResetEbsDefaultKmsKeyIdRequest> { /** * This method is intended for internal use only. Returns the marshaled request configured with additional * parameters to enable operation dry-run. */ @Override public Request<ResetEbsDefaultKmsKeyIdRequest> getDryRunRequest() { Request<ResetEbsDefaultKmsKeyIdRequest> request = new ResetEbsDefaultKmsKeyIdRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ResetEbsDefaultKmsKeyIdRequest == false) return false; ResetEbsDefaultKmsKeyIdRequest other = (ResetEbsDefaultKmsKeyIdRequest) obj; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; return hashCode; } @Override public ResetEbsDefaultKmsKeyIdRequest clone() { return (ResetEbsDefaultKmsKeyIdRequest) super.clone(); } }
apache-2.0
laurar07/onMy6
android/app/src/main/java/com/nativestarterkit/MainActivity.java
478
package com.nativestarterkit; import com.facebook.react.ReactActivity; import com.oblador.vectoricons.VectorIconsPackage; import com.airbnb.android.react.maps.MapsPackage; public class MainActivity extends ReactActivity { /** * Returns the name of the main component registered from JavaScript. * This is used to schedule rendering of the component. */ @Override protected String getMainComponentName() { return "NativeStarterKit"; } }
apache-2.0
consulo/consulo
modules/desktop-awt/desktop-ui-laf-impl/src/main/java/consulo/desktop/ui/laf/idea/darcula/DarculaJBPopupComboPopup.java
9105
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package consulo.desktop.ui.laf.idea.darcula; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopupListener; import com.intellij.openapi.ui.popup.LightweightWindowEvent; import com.intellij.openapi.ui.popup.PopupStep; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.TitledSeparator; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.components.JBList; import com.intellij.ui.popup.list.ListPopupImpl; import com.intellij.util.ui.JBUI; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.event.AncestorEvent; import javax.swing.event.AncestorListener; import javax.swing.plaf.basic.ComboPopup; import java.awt.*; import java.awt.event.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; /** * @author gregsh */ //@ApiStatus.Experimental public class DarculaJBPopupComboPopup<T> implements ComboPopup, ItemListener, MouseListener, MouseMotionListener, MouseWheelListener, PropertyChangeListener, AncestorListener { public static final String CLIENT_PROP = "ComboBox.jbPopup"; private final JComboBox<T> myComboBox; private final JList<T> myProxyList = new JBList<>(); private ListPopupImpl myPopup; private boolean myJustClosedViaClick; public DarculaJBPopupComboPopup(@Nonnull JComboBox<T> comboBox) { myComboBox = comboBox; myProxyList.setModel(comboBox.getModel()); myComboBox.addPropertyChangeListener(this); myComboBox.addItemListener(this); myComboBox.addAncestorListener(this); } @Override public void show() { myJustClosedViaClick = false; if (myPopup != null) { if (myPopup.isVisible()) return; // onClosed() was not called for some reason myPopup.cancel(); } ArrayList<T> items = new ArrayList<>(myComboBox.getModel().getSize()); for (int i = 0, size = myComboBox.getModel().getSize(); i < size; i++) { items.add(myComboBox.getModel().getElementAt(i)); } BaseListPopupStep<T> step = new BaseListPopupStep<T>("", items) { @Nullable @Override public PopupStep onChosen(T selectedValue, boolean finalChoice) { myComboBox.setSelectedItem(selectedValue); return FINAL_CHOICE; } @Override public void canceled() { myComboBox.firePopupMenuCanceled(); } @Override public boolean isSpeedSearchEnabled() { return true; } @Nonnull @Override public String getTextFor(T value) { Component component = myComboBox.getRenderer().getListCellRendererComponent(myProxyList, value, -1, false, false); return component instanceof TitledSeparator || component instanceof JSeparator ? "" : component instanceof JLabel ? ((JLabel)component).getText() : component instanceof SimpleColoredComponent ? ((SimpleColoredComponent)component).getCharSequence(false).toString() : String.valueOf(value); } @Override public boolean isSelectable(T value) { Component component = myComboBox.getRenderer().getListCellRendererComponent(myProxyList, value, -1, false, false); return !(component instanceof TitledSeparator || component instanceof JSeparator); } }; step.setDefaultOptionIndex(myComboBox.getSelectedIndex()); Project project = DataManager.getInstance().getDataContext(myComboBox).getData(CommonDataKeys.PROJECT); myPopup = new ListPopupImpl(project, step) { @Override public void cancel(InputEvent e) { if (e instanceof MouseEvent) { // we want the second click on combo-box just to close // and not to instantly show the popup again in the following // DarculaJBPopupComboPopup#mousePressed() Point point = new RelativePoint((MouseEvent)e).getPoint(myComboBox); myJustClosedViaClick = new Rectangle(myComboBox.getSize()).contains(point); } super.cancel(e); } }; myPopup.setMaxRowCount(10); myPopup.setRequestFocus(false); myPopup.addListener(new JBPopupListener() { @Override public void beforeShown(@Nonnull LightweightWindowEvent event) { myComboBox.firePopupMenuWillBecomeVisible(); } @Override public void onClosed(@Nonnull LightweightWindowEvent event) { myComboBox.firePopupMenuWillBecomeInvisible(); myPopup = null; myProxyList.setCellRenderer(new DefaultListCellRenderer()); myProxyList.setModel(myComboBox.getModel()); } }); //noinspection unchecked JList<T> list = myPopup.getList(); configureList(list); Border border = UIManager.getBorder("ComboPopup.border"); if (border != null) { myPopup.getContent().setBorder(border); } myProxyList.setCellRenderer(list.getCellRenderer()); myProxyList.setModel(list.getModel()); myPopup.setMinimumSize(myComboBox.getSize()); myPopup.showUnderneathOf(myComboBox); } protected void configureList(@Nonnull JList<T> list) { list.setFont(myComboBox.getFont()); list.setForeground(myComboBox.getForeground()); list.setBackground(myComboBox.getBackground()); list.setSelectionForeground(UIManager.getColor("ComboBox.selectionForeground")); list.setSelectionBackground(UIManager.getColor("ComboBox.selectionBackground")); list.setBorder(null); //noinspection unchecked list.setCellRenderer(new MyDelegateRenderer()); list.setFocusable(false); list.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); } protected void customizeListRendererComponent(JComponent component) { component.setBorder(JBUI.Borders.empty(2, 8)); } @Override public void hide() { myJustClosedViaClick = false; if (myPopup == null) return; myPopup.cancel(); } @Override public boolean isVisible() { return myPopup != null && myPopup.isVisible(); } @Override public JList getList() { return myProxyList; } @Override public MouseListener getMouseListener() { return this; } @Override public MouseMotionListener getMouseMotionListener() { return this; } @Override public KeyListener getKeyListener() { return null; } @Override public void uninstallingUI() { myComboBox.removePropertyChangeListener(this); myComboBox.removeItemListener(this); myComboBox.removeAncestorListener(this); } @Override public void propertyChange(PropertyChangeEvent e) { String propertyName = e.getPropertyName(); if ("model".equals(propertyName) || "renderer".equals(propertyName) || "editable".equals(propertyName)) { if (isVisible()) { hide(); } } } @Override public void itemStateChanged(ItemEvent e) { } @Override public void mouseClicked(MouseEvent e) { } @Override public void mousePressed(MouseEvent e) { if (e.getSource() == getList()) return; if (!SwingUtilities.isLeftMouseButton(e) || !myComboBox.isEnabled()) return; if (myComboBox.isEditable()) { Component comp = myComboBox.getEditor().getEditorComponent(); if ((!(comp instanceof JComponent)) || ((JComponent)comp).isRequestFocusEnabled()) { comp.requestFocus(); } } else if (myComboBox.isRequestFocusEnabled()) { myComboBox.requestFocus(); } if (myJustClosedViaClick) { myJustClosedViaClick = false; return; } if (isVisible()) { hide(); } else { show(); } } @Override public void mouseReleased(MouseEvent e) { } @Override public void mouseEntered(MouseEvent e) { } @Override public void mouseExited(MouseEvent e) { } @Override public void mouseDragged(MouseEvent e) { } @Override public void mouseMoved(MouseEvent e) { } @Override public void mouseWheelMoved(MouseWheelEvent e) { } @Override public void ancestorAdded(AncestorEvent event) { } @Override public void ancestorRemoved(AncestorEvent event) { } @Override public void ancestorMoved(AncestorEvent event) { hide(); } private class MyDelegateRenderer implements ListCellRenderer { @Override public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { //noinspection unchecked Component component = myComboBox.getRenderer().getListCellRendererComponent(list, (T)value, index, isSelected, cellHasFocus); if (component instanceof JComponent && !(component instanceof JSeparator || component instanceof TitledSeparator)) { customizeListRendererComponent((JComponent)component); } return component; } } }
apache-2.0
java110/MicroCommunity
service-api/src/main/java/com/java110/api/listener/machineTranslate/UpdateMachineTranslateListener.java
2110
package com.java110.api.listener.machineTranslate; import com.alibaba.fastjson.JSONObject; import com.java110.api.bmo.machineTranslate.IMachineTranslateBMO; import com.java110.api.listener.AbstractServiceApiPlusListener; import com.java110.core.annotation.Java110Listener; import com.java110.core.context.DataFlowContext; import com.java110.core.event.service.api.ServiceDataFlowEvent; import com.java110.utils.constant.ServiceCodeMachineTranslateConstant; import com.java110.utils.util.Assert; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpMethod; /** * 保存设备同步侦听 * add by wuxw 2019-06-30 */ @Java110Listener("updateMachineTranslateListener") public class UpdateMachineTranslateListener extends AbstractServiceApiPlusListener { @Autowired private IMachineTranslateBMO machineTranslateBMOImpl; @Override protected void validate(ServiceDataFlowEvent event, JSONObject reqJson) { Assert.hasKeyAndValue(reqJson, "machineTranslateId", "同步ID不能为空"); Assert.hasKeyAndValue(reqJson, "machineCode", "必填,请填写设备编码"); Assert.hasKeyAndValue(reqJson, "machineId", "必填,请填写设备版本号"); Assert.hasKeyAndValue(reqJson, "typeCd", "必填,请选择对象类型"); Assert.hasKeyAndValue(reqJson, "objName", "必填,请填写设备名称"); Assert.hasKeyAndValue(reqJson, "objId", "必填,请填写对象Id"); Assert.hasKeyAndValue(reqJson, "state", "必填,请选择状态"); } @Override protected void doSoService(ServiceDataFlowEvent event, DataFlowContext context, JSONObject reqJson) { //添加单元信息 machineTranslateBMOImpl.updateMachineTranslate(reqJson, context); } @Override public String getServiceCode() { return ServiceCodeMachineTranslateConstant.UPDATE_MACHINETRANSLATE; } @Override public HttpMethod getHttpMethod() { return HttpMethod.POST; } @Override public int getOrder() { return DEFAULT_ORDER; } }
apache-2.0
mtenrero/vetManager
src/main/java/es/urjc/etsii/mtenrero/Entities/User.java
1416
package es.urjc.etsii.mtenrero.Entities; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import javax.persistence.*; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by was12 on 14/03/2017. */ @MappedSuperclass public abstract class User implements Serializable { @Id @GeneratedValue(strategy = GenerationType.AUTO) private long id; @Column(unique=true) private String logon; private String password; @ElementCollection(fetch = FetchType.EAGER) private List<String> roles; public User() { this.roles = new ArrayList<>(); } public User(String name, String password, String... roles) { this.logon = name; this.password = new BCryptPasswordEncoder().encode(password); this.roles = new ArrayList<>(Arrays.asList(roles)); } public String getLogon() { return logon; } public void setLogon(String logon) { this.logon = logon; } public String getPasswordHash() { return password; } public void setPasswordHash(String password) { this.password = new BCryptPasswordEncoder().encode(password); } public void setRoles(String... roles) { this.roles = new ArrayList<>(Arrays.asList(roles)); } public List<String> getRoles() { return roles; } }
apache-2.0
josueeduardo/snappy
plugin/snappy-loader/src/main/java/io/joshworks/snappy/loader/WarLauncher.java
2004
/* * Copyright 2012-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.joshworks.snappy.loader; import io.joshworks.snappy.loader.archive.Archive; import io.joshworks.snappy.loader.util.AsciiBytes; /** * {@link Launcher} for WAR based archives. This launcher for standard WAR archives. * Supports dependencies in {@code WEB-INF/lib} as well as {@code WEB-INF/lib-provided}, * classes are loaded from {@code WEB-INF/classes}. * * @author Phillip Webb * @author Andy Wilkinson */ public class WarLauncher extends ExecutableArchiveLauncher { private static final AsciiBytes WEB_INF = new AsciiBytes("WEB-INF/"); private static final AsciiBytes WEB_INF_CLASSES = WEB_INF.append("classes/"); private static final AsciiBytes WEB_INF_LIB = WEB_INF.append("lib/"); private static final AsciiBytes WEB_INF_LIB_PROVIDED = WEB_INF .append("lib-provided/"); public WarLauncher() { super(); } protected WarLauncher(Archive archive) { super(archive); } public static void main(String[] args) { new WarLauncher().launch(args); } @Override public boolean isNestedArchive(Archive.Entry entry) { if (entry.isDirectory()) { return entry.getName().equals(WEB_INF_CLASSES); } else { return entry.getName().startsWith(WEB_INF_LIB) || entry.getName().startsWith(WEB_INF_LIB_PROVIDED); } } }
apache-2.0
jamiemccrindle/bpmscript
bpmscript-core/src/main/java/org/bpmscript/exec/INextMessage.java
1230
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bpmscript.exec; /** * A Next Message. This will be sent to a process after it has started and contains * the information required to kick off the process again including which branch, * version and queue this message is aimed at */ public interface INextMessage extends IScriptMessage { Object getMessage(); String getQueueId(); String getPid(); String getBranch(); String getVersion(); }
apache-2.0
wxb2939/rwxlicai
mzb-phone-app-android/mzbemployeeapp/src/main/java/com/xem/mzbemployeeapp/activity/E3_Add137ManagerAty.java
9495
package com.xem.mzbemployeeapp.activity; import android.app.AlertDialog; import android.app.DatePickerDialog; import android.app.Dialog; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.view.View; import android.widget.Button; import android.widget.DatePicker; import android.widget.ImageView; import android.widget.TextView; import com.loopj.android.http.RequestParams; import com.xem.mzbemployeeapp.R; import com.xem.mzbemployeeapp.net.NetCallBack; import com.xem.mzbemployeeapp.utils.Config; import com.xem.mzbemployeeapp.utils.MzbUrlFactory; import com.xem.mzbemployeeapp.utils.RequestUtils; import com.xem.mzbemployeeapp.utils.TitleBuilder; import com.xem.mzbemployeeapp.views.MzbDialogListview; import org.json.JSONException; import org.json.JSONObject; import java.util.Calendar; import butterknife.ButterKnife; import butterknife.InjectView; import butterknife.OnClick; /** * Created by xuebing on 15/9/4. */ public class E3_Add137ManagerAty extends MzbActivity { @InjectView(R.id.titlebar_iv_left) ImageView back; @InjectView(R.id.manager_time) TextView managerTime; @InjectView(R.id.manager_plan) TextView managerPlan; @InjectView(R.id.manager_extra) TextView managerExtra; @InjectView(R.id.manager_state) TextView managerState; @InjectView(R.id.llstate) View llstate; @InjectView(R.id.llmanager) View llManager; @InjectView(R.id.sure) Button sure; private int mYear; private int mMonth; private int mDay; private static final int SHOW_DATAPICK = 0; private static final int DATE_DIALOG_ID = 1; private String branid; private String custid; private MzbDialogListview dialogListview; private int num; private String strExtra = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.e3_add137_manager_aty); ButterKnife.inject(this); new TitleBuilder(this).setTitleText("添加新的137计划").setLeftImage(R.mipmap.top_view_back); llstate.setVisibility(View.GONE); branid = getIntent().getStringExtra("branid"); custid = getIntent().getStringExtra("custid"); setDateTime(); } @OnClick({R.id.titlebar_iv_left,R.id.manager_time,R.id.manager_plan,R.id.sure,R.id.llmanager}) public void onDo(View v) { switch (v.getId()) { case R.id.titlebar_iv_left: finish(); break; case R.id.llmanager: Intent orderExtra = new Intent(E3_Add137ManagerAty.this,CommWriteAty.class); startActivityForResult(orderExtra, 0); break; case R.id.manager_time: Message msgt = new Message(); msgt.what = E3_Add137ManagerAty.SHOW_DATAPICK; E3_Add137ManagerAty.this.dateandtimeHandler.sendMessage(msgt); break; case R.id.sure: if (managerTime.getText().toString().trim().equals("")) { showToast("请选择时间"); return; } else if (managerPlan.getText().toString().trim().equals("")) { showToast("输入选择项目"); return; } doCommit(); finish(); break; case R.id.manager_plan: dialogListview = new MzbDialogListview(E3_Add137ManagerAty.this,num); dialogListview.show(); dialogListview.dlPhone.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dialogListview.imgStore.setVisibility(View.VISIBLE); num = 1; managerPlan.setText("预约来店"); dialogListview.dismiss(); } }); dialogListview.dlCare.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dialogListview.imgCare.setVisibility(View.VISIBLE); num = 2; managerPlan.setText("专业关怀"); dialogListview.dismiss(); } }); dialogListview.dlExtracare.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dialogListview.imgExtracare.setVisibility(View.VISIBLE); num = 3; managerPlan.setText("特殊关怀"); dialogListview.dismiss(); } }); dialogListview.dlStore.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dialogListview.imgStore.setVisibility(View.VISIBLE); num = 4; managerPlan.setText("到店护理"); dialogListview.dismiss(); } }); dialogListview.dlOther.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dialogListview.imgOther.setVisibility(View.VISIBLE); num = 5; managerPlan.setText("特殊标记"); dialogListview.dismiss(); } }); default: break; } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); switch (resultCode) { case -102: strExtra = data.getStringExtra("extra"); managerExtra.setText(strExtra); break; default: break; } } public void doCommit() { RequestParams params1 = new RequestParams(); params1.put("custid", custid); params1.put("branid", branid); params1.put("type", num+""); params1.put("date", managerTime.getText().toString()); params1.put("memo", managerExtra.getText().toString()); params1.put("creator", Config.getCachedBrandEmpid(E3_Add137ManagerAty.this).toString()); RequestUtils.ClientTokenPost(E3_Add137ManagerAty.this, MzbUrlFactory.BASE_URL + MzbUrlFactory.BRAND_CREATE, params1, new NetCallBack(this) { @Override public void onMzbSuccess(String result) { try { JSONObject obj = new JSONObject(result); if (obj.getInt("code") == 0) { showToast("新建成功"); } else { showToast(obj.getString("message")); } } catch (JSONException e) { e.printStackTrace(); } } @Override public void onMzbFailues(Throwable arg0) { showToast("请求失败,请确认网络连接!"); } }); } /** * 更新日期显示 */ private void updateDateDisplay() { final Calendar c = Calendar.getInstance(); int nYear = c.get(Calendar.YEAR); int nMonth = c.get(Calendar.MONTH); int nDay = c.get(Calendar.DAY_OF_MONTH); if (mYear < nYear || mMonth < nMonth || mDay < nDay){ showToast("当前时间不可选择"); return; }else { managerTime.setText(new StringBuilder().append(mYear).append("-") .append((mMonth + 1) < 10 ? "0" + (mMonth + 1) : (mMonth + 1)) .append("-").append((mDay < 10) ? "0" + mDay : mDay)); } } /** * 设置日期 */ private void setDateTime() { final Calendar c = Calendar.getInstance(); mYear = c.get(Calendar.YEAR); mMonth = c.get(Calendar.MONTH); mDay = c.get(Calendar.DAY_OF_MONTH); // updateDateDisplay(); } /** * 日期控件的事件 */ private DatePickerDialog.OnDateSetListener mDateSetListener = new DatePickerDialog.OnDateSetListener() { public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { mYear = year; mMonth = monthOfYear; mDay = dayOfMonth; updateDateDisplay(); } }; @Override protected Dialog onCreateDialog(int id) { switch (id) { case DATE_DIALOG_ID: return new DatePickerDialog(E3_Add137ManagerAty.this, AlertDialog.THEME_HOLO_LIGHT, mDateSetListener, mYear, mMonth, mDay); } return null; } Handler dateandtimeHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case E3_Add137ManagerAty.SHOW_DATAPICK: showDialog(DATE_DIALOG_ID); break; } } }; }
apache-2.0
gustavoanatoly/hbase
hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java
1793
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ReflectionUtils; /** * A factory class that constructs an {@link org.apache.hadoop.hbase.client.RequestController}. */ @InterfaceAudience.Public public final class RequestControllerFactory { public static final String REQUEST_CONTROLLER_IMPL_CONF_KEY = "hbase.client.request.controller.impl"; /** * Constructs a {@link org.apache.hadoop.hbase.client.RequestController}. * @param conf The {@link Configuration} to use. * @return A RequestController which is built according to the configuration. */ public static RequestController create(Configuration conf) { Class<? extends RequestController> clazz= conf.getClass(REQUEST_CONTROLLER_IMPL_CONF_KEY, SimpleRequestController.class, RequestController.class); return ReflectionUtils.newInstance(clazz, conf); } }
apache-2.0
tranquang9a1/ECRM
App/ECRM/src/main/java/com/ecrm/Entity/TblReportDetailEntity.java
5502
package com.ecrm.Entity; import javax.persistence.*; import java.sql.Timestamp; /** * Created by Htang on 6/5/2015. */ @Entity @Table(name = "tblReportDetail") @IdClass(TblReportDetailEntityPK.class) public class TblReportDetailEntity { private int equipmentId; private int reportId; private boolean status; private String damagedLevel; private String description; private String position; private String solution; private Timestamp resolveTime; private TblEquipmentEntity tblEquipmentByEquipmentId; private TblReportEntity tblReportByReportId; public TblReportDetailEntity(){} public TblReportDetailEntity(int equipmentId, int reportId, String damagedLevel, String description, String position){ this.equipmentId = equipmentId; this.reportId = reportId; this.damagedLevel = damagedLevel; this.description = description; this.position = position; this.status = false; } public TblReportDetailEntity(int equipmentId, int reportId, String damagedLevel,String description, String position, boolean status){ this.equipmentId = equipmentId; this.reportId = reportId; this.damagedLevel = damagedLevel; this.description = description; this.position = position; this.status = status; } public TblReportDetailEntity(int equipmentId, int reportId, boolean status, String position) { this.equipmentId = equipmentId; this.reportId = reportId; this.status = status; this.position = position; } @Id @Column(name = "EquipmentId") public int getEquipmentId() { return equipmentId; } public void setEquipmentId(int equipmentId) { this.equipmentId = equipmentId; } @Id @Column(name = "ReportId") public int getReportId() { return reportId; } public void setReportId(int reportId) { this.reportId = reportId; } @Basic @Column(name = "Status") public boolean isStatus() { return status; } public void setStatus(boolean status) { this.status = status; } @Basic @Column(name = "DamagedLevel") public String getDamagedLevel() { return damagedLevel; } public void setDamagedLevel(String damagedLevel) { this.damagedLevel = damagedLevel; } @Basic @Column(name = "Description") public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } @Basic @Column(name = "Position") public String getPosition() { return position; } public void setPosition(String position) { this.position = position; } @Basic @Column(name = "Solution") public String getSolution() { return solution; } public void setSolution(String solution) { this.solution = solution; } @Basic @Column(name = "ResolveTime") public Timestamp getResolveTime() { return resolveTime; } public void setResolveTime(Timestamp resolveTime) { this.resolveTime = resolveTime; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TblReportDetailEntity that = (TblReportDetailEntity) o; if (equipmentId != that.equipmentId) return false; if (reportId != that.reportId) return false; if (status != that.status) return false; if (damagedLevel != that.damagedLevel) return false; if (position != null ? !position.equals(that.position) : that.position != null) return false; if (solution != null ? !solution.equals(that.solution) : that.solution != null) return false; if (resolveTime != null ? !resolveTime.equals(that.resolveTime) : that.resolveTime != null) return false; return true; } @Override public int hashCode() { int result = equipmentId; result = 31 * result + reportId; result = 31 * result + (status ? 1 : 0); result = 31 * result + (damagedLevel != null ? damagedLevel.hashCode() : 0); result = 31 * result + (position != null ? position.hashCode() : 0); result = 31 * result + (solution != null ? solution.hashCode() : 0); result = 31 * result + (resolveTime != null ? resolveTime.hashCode() : 0); return result; } @ManyToOne @JoinColumn(name = "EquipmentId", referencedColumnName = "Id", nullable = false, insertable = false, updatable = false) public TblEquipmentEntity getTblEquipmentByEquipmentId() { return tblEquipmentByEquipmentId; } public void setTblEquipmentByEquipmentId(TblEquipmentEntity tblEquipmentByEquipmentId) { this.tblEquipmentByEquipmentId = tblEquipmentByEquipmentId; } @ManyToOne @JoinColumn(name = "ReportId", referencedColumnName = "Id", nullable = false, insertable = false, updatable = false) public TblReportEntity getTblReportByReportId() { return tblReportByReportId; } public void setTblReportByReportId(TblReportEntity tblReportByReportId) { this.tblReportByReportId = tblReportByReportId; } }
apache-2.0
abdelrahmanbadawy/Cassandra-VMS
src/java/client/client/Experiment3_agg.java
8369
package client.client; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Random; public class Experiment3_agg { /*static ArrayList<String> faculty = new ArrayList<String>( Arrays.asList("Informatics", "Bioinformatics","BWL","Biology","Dental Medicine","Design","Mathematics","Physics","Chemistry","Medicine","Art","Mechanical Engineering","Architecture","Construction","Electrical Engineering","Biochemistry","Biophysics","Tiermedizin","English Literature","Arabic Literature","German Literature","Islamic Studies","Political Studies","Finance","VWL","Neuroscience","Classics","English","History of Art","History","Science","Social Science","Economics","Education","Law","Languages","Music","Philosophy","Modern Languages","Medival Languages","Clinical Medicine","Medicine","Geography","Earth Sciences","Business","Management","Engineering")); */ //10 keys /*static ArrayList<String> faculty = new ArrayList<String>( Arrays.asList("Informatics", "Bioinformatics","BWL","Biology","Dental Medicine","Design","Mathematics","Physics","Chemistry","Medicine", "Informatics1", "Bioinformat1ics","B1WL","B1iology","Den1tal Medicine","Des1ign","Mathema1tics","Phy1sics","Chemist1ry","Medi1cine", "Info2rmatics", "Bioi2nformatics","BW2L","Biol2ogy","Dent2al Medicine","Des2ign","Mathe2matics","Phys2ics","Chemist2ry","Medi2cine", "Inform3atics", "Bioinforma3tics","BW3L","Bio3logy","Dental 3Medicine","Des3ign","Mat3hematics","Phy3sics","Che3mistry","Me3dicine", "Info4rmatics", "Bioinform4atics","BW4L","B4iology","Den4tal Medicine","De4sign","Math4ematics","Physic4s","Che4mistry","Med4icine")); */ static ArrayList<String> faculty = new ArrayList<String>( Arrays.asList("Informatics", "Bioinformatics","BWL","Biology","Dental Medicine","Design","Mathematics","Physics","Chemistry","Medicine", "Informatics", "Bioinformatics1","BWL1","Biology1","Dental Medicine1","Design1","Mathematics1","Physics1","Chemistry1","Medicine1", "Informatics2", "Bioinformatics2","BWL2","Biology2","Dental Medicine2","Design2","Mathematics2","Physics2","Chemistry2","Medicine2", "Informat3ics", "Bioinf3ormatics","BW3L","Biol3ogy","Dental Medici3ne","Des33ign","Mathemat3ics","Phys3ics","Chemistry3","Medicine3", "Informat34ics", "Bioinf43ormatics","B4W3L","Bio4l3ogy","Den4tal Medici3ne","Des334ign","Mathema4t3ics","Phy4s3ics","Chemist4ry3","Medic4ine3", "Informat34i5cs", "Bioinf43orm5atics","B45W3L","Bio54l3ogy","Den54tal Medici3ne","Des5334ign","Mat5hema4t3ics","Phy54s3ics","Che5mist4ry3","Me5dic4ine3", "Info6rmat34i5cs", "Bioinf643orm5atics","B465W3L","Bi6o54l3ogy","Den654tal Medici3ne","De6s5334ign","Ma6t5hema4t3ics","P6hy54s3ics","Ch6e5mist4ry3","Me65dic4ine3", "Info6rmat374i5cs", "Bioinf7643orm5atics","B4675W3L","Bi6o754l3ogy","Den6754tal Medici3ne","De6s75334ign","Ma6t5he7ma4t3ics","P6hy574s3ics","Ch6e57mist4ry3","Me65dic74ine3", "Info6rm8at374i5cs", "Bi8oinf7643orm5atics","B48675W3L","Bi68o754l3ogy","Den68754tal Medici3ne","De6s753834ign","Ma6t5h8e7ma4t3ics","P6hy5784s3ics","Ch68e57mist4ry3","Me65d8ic74ine3", "Info6rm98at374i5cs", "Bi8oinf97643orm5atics","B498675W3L","Bi68o9754l3ogy","Den687594tal Medici3ne","De6s7953834ign","Ma6t5h8e7ma4t93ics","P6hy5784s39ics","Ch698e57mist4ry3","Me659d8ic74ine3", "Inform11atics", "Bioi11nformatics","11BWL","Bi11ology","De11ntal Medicine","Des11ign","Mathe11matics","Physi11cs","Chemis11try","Medic11ine", "Informati22cs", "Bioinf22ormatics1","B22WL1","22Biology1","Denta22l Medicine1","Desi22gn1","Ma22thematics1","Phys22ics1","Chemis22try1","Medic22ine1", "Infor33matics2", "Bioinfo33rmatics2","BW33L2","Biol33ogy2","Den33tal Medicine2","De33sign2","Mathe33matics2","Phys33ics2","Chem33istry2","Medici33ne2", "Infor44mat3ics", "Bi44oinf3ormatics","B44W3L","Biol443ogy","Dent44al Medici3ne","De44s33ign","M44athemat3ics","Phys443ics","Che44mistry3","Med44icine3", "In554format34ics", "Bioi55nf43ormatics","B455W3L","Bio554l3ogy","Den4t55al Medici3ne","Des35534ign","Mathe55ma4t3ics","P55hy4s3ics","Chemi55st4ry3","Me55dic4ine3", "Inform66at34i5cs", "Bioinf6643orm5atics","B4665W3L","Bio5664l3ogy","Den54tal Me66dici3ne","Des665334ign","Mat566hema4t3ics","Phy54s3i66cs","Che5mi66st4ry3","Me5dic664ine3", "Info6rmat3477i5cs", "Bioinf77643orm5atics","B46775W3L","Bi6o5477l3ogy","Den654ta77l Medici3ne","De776s5334ign","Ma6t775hema4t3ics","P776hy54s3ics","Ch776e5mist4ry3","Me7765dic4ine3", "Info6rma88t374i5cs", "Bi88oinf7643orm5atics","B488675W3L","Bi6o75488l3ogy","Den675488tal Medici3ne","De6s7885334ign","Ma6t5he7ma884t3ics","P6hy574s883ics","Ch6e57mist884ry3","Me65dic8874ine3", "Info6rm899at374i5cs", "Bi8oi99nf7643orm5atics","B4899675W3L","Bi68o99754l3ogy","Den9968754tal Medici3ne","De699s753834ign","Ma6t5h899e7ma4t3ics","P6hy599784s3ics","Ch68e57mis99t4ry3","Me65d899ic74ine3", "Info6r100m98at374i5cs", "Bi8100oinf97643orm5atics","B410098675W3L","Bi68o9710054l3ogy","Den687100594tal Medici3ne","De6s7100953834ign","Ma6t5h8e7m100a4t93ics","P6hy5784100s39ics","Ch698e51007mist4ry3","Me659d1008ic74ine3" )); // 3 keys /*static ArrayList<String> faculty = new ArrayList<String>( Arrays.asList("Informatics", "Bioinformatics","BWL"));*/ static ArrayList<String> courseName = new ArrayList<String>( Arrays.asList("Astronomy","Chemistry","Metallurgy","Physics","French","German","Arabic","Slavonic Studies","Archaeology","Social Anthropology","Biological Anthropology","Asian Studies","American Studies","Development Studies","Psychology","Plant Sciences","Pathology","Genetics","Biochemistry","Clinical pharmacology","Transfusion medicine","Civil engineering","turbomachinery","Computer Laboratory","Lab1","Lab2","Lab3","Lab4","Energy","Fluids","Information engineering","Business Research","Acadamic Research","Scientific Research","Brain mapping unit","Orthopaedic Surgery","Trauma and Orthopaedic Surgery","The Polar Museum","Earth Sciences","Modern Greek","Neo-Latin")); private static void generateCsvFile(String sFileName) { //1,'Distributed Systems',4,'Informatics' int counter = 1; FileWriter writer = null; try { writer = new FileWriter(sFileName); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } Random rn = new Random(); int value = 1; //40000 insertions that are passing //for(int i=0;i<40000;i++){ for(int i=0;i<5000;i++){ try { writer.append(String.valueOf(counter)); writer.append(','); counter++; int r = rn.nextInt((courseName.size()-1) - 0 + 1) + 0; writer.append("'"+courseName.get(r)+"'"); writer.append(','); r = rn.nextInt(1000 - 5 + 1) + 5; writer.append(String.valueOf(r)); writer.append(','); if(value==4) value = 1; writer.append("'"+"fac"+value+"'"); value++; writer.append('\n'); //generate whatever data you want writer.flush(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } for(int i=5000;i<8000;i++){ try { int r = rn.nextInt(4999 - 1 + 1) + 1; writer.append(String.valueOf(r)); writer.append(','); r = rn.nextInt((courseName.size()-1) - 0 + 1) + 0; writer.append("'"+courseName.get(r)+"'"); writer.append(','); r = rn.nextInt(1000 - 5 + 1) + 5; writer.append(String.valueOf(r)); writer.append(','); if(value==4) value = 1; writer.append("'"+"fac"+value+"'"); value++; writer.append('\n'); //generate whatever data you want writer.flush(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } //800001-802500 insertions that are passing for(int i=8000;i<10000;i++){ try { int r = rn.nextInt(4999 - 1 + 1) + 1; writer.append(String.valueOf(r)); writer.append('\n'); //generate whatever data you want writer.flush(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } try { writer.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public static void main(String [] args){ generateCsvFile("src/java/client/data/ex3-courses-agg-3.csv"); } }
apache-2.0
rpudil/midpoint
testing/rest/src/test/java/com/evolveum/midpoint/testing/rest/TestRestService.java
32170
/* * Copyright (c) 2013-2015 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.testing.rest; import static org.testng.AssertJUnit.assertTrue; import static org.testng.AssertJUnit.assertNull; import static com.evolveum.midpoint.test.util.TestUtil.displayTestTile; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.fail; import java.io.File; import java.io.IOException; import javax.ws.rs.core.Response; import org.apache.commons.lang.StringUtils; import org.apache.cxf.endpoint.Server; import org.apache.cxf.jaxrs.JAXRSServerFactoryBean; import org.apache.cxf.jaxrs.client.ClientConfiguration; import org.apache.cxf.jaxrs.client.WebClient; import org.apache.cxf.transport.local.LocalConduit; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext.ClassMode; import org.springframework.test.context.ContextConfiguration; import org.testng.AssertJUnit; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.evolveum.midpoint.common.InternalsConfig; import com.evolveum.midpoint.common.monitor.InternalMonitor; import com.evolveum.midpoint.model.api.ModelService; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.delta.ChangeType; import com.evolveum.midpoint.provisioning.api.ProvisioningService; import com.evolveum.midpoint.provisioning.impl.ProvisioningServiceImpl; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.repo.sql.SqlRepositoryServiceImpl; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.result.OperationResultStatus; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.task.api.TaskManager; import com.evolveum.midpoint.test.DummyAuditService; import com.evolveum.midpoint.test.IntegrationTestTools; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.MiscUtil; import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectTemplateType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; @ContextConfiguration(locations = { "classpath:ctx-rest-test.xml" }) @DirtiesContext(classMode = ClassMode.AFTER_CLASS) public class TestRestService { private static final File BASE_DIR = new File("src/test/resources"); private static final File REPO_DIR = new File("src/test/resources/repo/"); private static final File REQ_DIR = new File("src/test/resources/req/"); public static final File USER_ADMINISTRATOR_FILE = new File(REPO_DIR, "user-administrator.xml"); public static final String USER_ADMINISTRATOR_USERNAME = "administrator"; public static final String USER_ADMINISTRATOR_PASSWORD = "5ecr3t"; // No authorization public static final File USER_NOBODY_FILE = new File(REPO_DIR, "user-nobody.xml"); public static final String USER_NOBODY_USERNAME = "nobody"; public static final String USER_NOBODY_PASSWORD = "nopassword"; // REST authorization only public static final File USER_CYCLOPS_FILE = new File(REPO_DIR, "user-cyclops.xml"); public static final String USER_CYCLOPS_USERNAME = "cyclops"; public static final String USER_CYCLOPS_PASSWORD = "cyclopassword"; // REST and reader authorization public static final File USER_SOMEBODY_FILE = new File(REPO_DIR, "user-somebody.xml"); public static final String USER_SOMEBODY_USERNAME = "somebody"; public static final String USER_SOMEBODY_PASSWORD = "somepassword"; // REST, reader and adder authorization public static final File USER_DARTHADDER_FILE = new File(REPO_DIR, "user-darthadder.xml"); public static final String USER_DARTHADDER_OID = "1696229e-d90a-11e4-9ce6-001e8c717e5b"; public static final String USER_DARTHADDER_USERNAME = "darthadder"; public static final String USER_DARTHADDER_PASSWORD = "iamyouruncle"; // Authorizations, but no password public static final File USER_NOPASSWORD_FILE = new File(REPO_DIR, "user-nopassword.xml"); public static final String USER_NOPASSWORD_USERNAME = "nopassword"; public static final File ROLE_SUPERUSER_FILE = new File(REPO_DIR, "role-superuser.xml"); public static final File ROLE_REST_FILE = new File(REPO_DIR, "role-rest.xml"); public static final File ROLE_READER_FILE = new File(REPO_DIR, "role-reader.xml"); public static final File ROLE_ADDER_FILE = new File(REPO_DIR, "role-adder.xml"); public static final File ROLE_MODIFIER_FILE = new File(REPO_DIR, "role-modifier.xml"); public static final String ROLE_MODIFIER_OID = "82005ae4-d90b-11e4-bdcc-001e8c717e5b"; public static final File RESOURCE_OPENDJ_FILE = new File(REPO_DIR, "reosurce-opendj.xml"); public static final String RESOURCE_OPENDJ_OID = "ef2bc95b-76e0-59e2-86d6-3d4f02d3ffff"; public static final File USER_TEMPLATE_FILE = new File(REPO_DIR, "user-template.xml"); public static final String USER_TEMPLATE_OID = "c0c010c0-d34d-b33f-f00d-777111111111"; public static final File ACCOUT_CHUCK_FILE = new File(REPO_DIR, "account-chuck.xml"); public static final String ACCOUT_CHUCK_OID = REPO_DIR + "a0c010c0-d34d-b33f-f00d-111111111666"; public static final File SYSTEM_CONFIGURATION_FILE = new File(REPO_DIR, "system-configuration.xml"); private static final Trace LOGGER = TraceManager.getTrace(TestRestService.class); private final static String ENDPOINT_ADDRESS = "http://localhost:18080/rest"; private static final File MODIFICATION_DISABLE = new File(REQ_DIR, "modification-disable.xml"); private static final File MODIFICATION_ENABLE = new File(REQ_DIR, "modification-enable.xml"); private static final File MODIFICATION_ASSIGN_ROLE_MODIFIER = new File(REQ_DIR, "modification-assign-role-modifier.xml"); private static PrismContext prismContext; private static TaskManager taskManager; private static ModelService modelService; private static Server server; private static RepositoryService repositoryService; private static ProvisioningService provisioning; private static DummyAuditService dummyAuditService; @BeforeClass public static void initialize() throws Exception { startServer(); } private static void startServer() throws Exception { ApplicationContext applicationContext = new ClassPathXmlApplicationContext("ctx-rest-test-main.xml"); LOGGER.info("Spring context initialized."); JAXRSServerFactoryBean sf = (JAXRSServerFactoryBean) applicationContext.getBean("restService"); sf.setAddress(ENDPOINT_ADDRESS); server = sf.create(); repositoryService = (SqlRepositoryServiceImpl) applicationContext.getBean("repositoryService"); provisioning = (ProvisioningServiceImpl) applicationContext.getBean("provisioningService"); taskManager = (TaskManager) applicationContext.getBean("taskManager"); modelService = (ModelService) applicationContext.getBean("modelController"); Task initTask = taskManager.createTaskInstance(TestRestService.class.getName() + ".startServer"); OperationResult result = initTask.getResult(); InternalsConfig.encryptionChecks = false; prismContext = (PrismContext) applicationContext.getBean("prismContext"); addObject(ROLE_SUPERUSER_FILE, result); addObject(ROLE_REST_FILE, result); addObject(ROLE_READER_FILE, result); addObject(USER_ADMINISTRATOR_FILE, result); addObject(USER_NOBODY_FILE, result); addObject(USER_CYCLOPS_FILE, result); addObject(USER_SOMEBODY_FILE, result); addObject(SYSTEM_CONFIGURATION_FILE, result); dummyAuditService = DummyAuditService.getInstance(); InternalMonitor.reset(); modelService.postInit(result); result.computeStatus(); TestUtil.assertSuccessOrWarning("startServer failed (result)", result, 1); } private static <O extends ObjectType> PrismObject<O> addObject(File file, OperationResult result) throws SchemaException, IOException, ObjectAlreadyExistsException { PrismObject<O> object = prismContext.parseObject(file); String oid = repositoryService.addObject(object, null, result); object.setOid(oid); return object; } @AfterClass public static void destroy() throws Exception { server.stop(); server.destroy(); } public TestRestService() { super(); } @Test public void test001GetUserAdministrator() { final String TEST_NAME = "test001GetUserAdministrator"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 200); UserType userType = response.readEntity(UserType.class); assertNotNull("Returned entity in body must not be null.", userType); LOGGER.info("Returned entity: {}", userType.asPrismObject().debugDump()); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(2); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); } @Test public void test002GetNonExistingUser() { final String TEST_NAME = "test002GetNonExistingUser"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/users/12345"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 404); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(2); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); } @Test public void test003GetNoAuthHeaders() { final String TEST_NAME = "test003GetNoAuthHeaders"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(null, null); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); IntegrationTestTools.display("Audit", dummyAuditService); // No records. There are no auth headers so this is not considered to be a login attempt dummyAuditService.assertRecords(0); } @Test public void test004GetAuthBadUsernameNullPassword() { final String TEST_NAME = "test004GetAuthBadUsernameNullPassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient("NoSUCHuser", null); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test005GetAuthBadUsernameEmptyPassword() { final String TEST_NAME = "test005GetAuthBadUsernameEmptyPassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient("NoSUCHuser", ""); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test006GetAuthBadUsernameBadPassword() { final String TEST_NAME = "test006GetAuthBadUsernameBadPassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient("NoSUCHuser", "NoSuchPassword"); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test007GetAuthNoPassword() { final String TEST_NAME = "test007GetAuthNoPassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_ADMINISTRATOR_USERNAME, null); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test016GetAuthBadPassword() { final String TEST_NAME = "test016GetAuthBadPassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_ADMINISTRATOR_USERNAME, "forgot"); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test017GetUnauthorizedUser() { final String TEST_NAME = "test017GetUnauthorizedUser"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_NOBODY_USERNAME, USER_NOBODY_PASSWORD); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 403); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test018GetUserAdministratorByCyclops() { final String TEST_NAME = "test018GetUserAdministratorByCyclops"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_CYCLOPS_USERNAME, USER_CYCLOPS_PASSWORD); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 403); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(2); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); } @Test public void test019GetUserAdministratorBySomebody() { final String TEST_NAME = "test019GetUserAdministratorBySomebody"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_SOMEBODY_USERNAME, USER_SOMEBODY_PASSWORD); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 200); UserType userType = response.readEntity(UserType.class); assertNotNull("Returned entity in body must not be null.", userType); LOGGER.info("Returned entity: {}", userType.asPrismObject().debugDump()); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(2); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); } @Test public void test102AddUserTemplate() throws Exception { final String TEST_NAME = "test102AddUserTemplate"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/objectTemplates"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(USER_TEMPLATE_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 201); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, ObjectTemplateType.class); } @Test public void test103AddUserBadTargetCollection() throws Exception { final String TEST_NAME = "test103AddUserBadTargetCollection"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/objectTemplates"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(USER_ADMINISTRATOR_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 400); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(2); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); } @Test public void test104AddAccountRaw() throws Exception { final String TEST_NAME = "test104AddAccountRaw"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/shadows"); client.query("options", "raw"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(ACCOUT_CHUCK_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 201); OperationResult parentResult = new OperationResult("get"); try { provisioning.getObject(ShadowType.class, ACCOUT_CHUCK_OID, SelectorOptions.createCollection(GetOperationOptions.createDoNotDiscovery()), null, parentResult); fail("expected object not found exception but haven't got one."); } catch (ObjectNotFoundException ex) { // this is OK..we expect objet not found, because accout was added // with the raw options which indicates, that it was created only in // the repository } IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, ShadowType.class); } @Test public void test120AddRoleAdder() throws Exception { final String TEST_NAME = "test120AddRoleAdder"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/roles"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(ROLE_ADDER_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 201); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, RoleType.class); } @Test public void test121AddUserDarthAdder() throws Exception { final String TEST_NAME = "test121AddUserDarthAdder"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/users"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(USER_DARTHADDER_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 201); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(5); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, UserType.class); } @Test public void test122AddRoleModifierAsDarthAdder() throws Exception { final String TEST_NAME = "test122AddRoleModifierAsDarthAdder"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_DARTHADDER_USERNAME, USER_DARTHADDER_PASSWORD); client.path("/roles"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(ROLE_MODIFIER_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 201); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, RoleType.class); } @Test public void test123DarthAdderAssignModifierHimself() throws Exception { final String TEST_NAME = "test123DarthAdderAssignModifierHimself"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_DARTHADDER_USERNAME, USER_DARTHADDER_PASSWORD); client.path("/users/"+USER_DARTHADDER_OID); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(MiscUtil.readFile(MODIFICATION_ASSIGN_ROLE_MODIFIER)); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 403); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertExecutionOutcome(1, OperationResultStatus.FATAL_ERROR); } @Test public void test124DarthAdderAssignModifierByAdministrator() throws Exception { final String TEST_NAME = "test124DarthAdderAssignModifierByAdministrator"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/users/"+USER_DARTHADDER_OID); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(MiscUtil.readFile(MODIFICATION_ASSIGN_ROLE_MODIFIER)); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 204); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.MODIFY, UserType.class); OperationResult result = new OperationResult("test"); PrismObject<UserType> user = repositoryService.getObject(UserType.class, USER_DARTHADDER_OID, null, result); assertEquals("Unexpected number of assignments", 4, user.asObjectable().getAssignment().size()); } @Test public void test130DarthAdderDisableHimself() throws Exception { final String TEST_NAME = "test130DarthAdderDisableHimself"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_DARTHADDER_USERNAME, USER_DARTHADDER_PASSWORD); client.path("/users/"+USER_DARTHADDER_OID); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(MiscUtil.readFile(MODIFICATION_DISABLE)); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 204); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(5); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.MODIFY, UserType.class); OperationResult result = new OperationResult("test"); PrismObject<UserType> user = repositoryService.getObject(UserType.class, USER_DARTHADDER_OID, null, result); assertEquals("Wrong administrativeStatus", ActivationStatusType.DISABLED, user.asObjectable().getActivation().getAdministrativeStatus()); } @Test public void test131GetUserAdministratorByDarthAdder() { final String TEST_NAME = "test131GetUserAdministratorByDarthAdder"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_DARTHADDER_USERNAME, USER_DARTHADDER_PASSWORD); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 403); assertNoEmptyResponse(response); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test132DarthAdderEnableByAdministrator() throws Exception { final String TEST_NAME = "test132DarthAdderEnableByAdministrator"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/users/"+USER_DARTHADDER_OID); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(MiscUtil.readFile(MODIFICATION_ENABLE)); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 204); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(5); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.MODIFY, UserType.class); OperationResult result = new OperationResult("test"); PrismObject<UserType> user = repositoryService.getObject(UserType.class, USER_DARTHADDER_OID, null, result); assertEquals("Wrong administrativeStatus", ActivationStatusType.ENABLED, user.asObjectable().getActivation().getAdministrativeStatus()); } @Test public void test133GetUserAdministratorByDarthAdder() { final String TEST_NAME = "test133GetUserAdministratorByDarthAdder"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_DARTHADDER_USERNAME, USER_DARTHADDER_PASSWORD); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 200); UserType userType = response.readEntity(UserType.class); assertNotNull("Returned entity in body must not be null.", userType); LOGGER.info("Returned entity: {}", userType.asPrismObject().debugDump()); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(2); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); } @Test public void test135AddUserNopasswordAsDarthAdder() throws Exception { final String TEST_NAME = "test135AddUserNopasswordAsDarthAdder"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_DARTHADDER_USERNAME, USER_DARTHADDER_PASSWORD); client.path("/users"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(USER_NOPASSWORD_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertStatus(response, 201); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(5); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, UserType.class); } @Test public void test140GetUserAdministratorByNopassword() { final String TEST_NAME = "test140GetUserAdministratorByNopassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_NOPASSWORD_USERNAME, null); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 401); assertNoEmptyResponse(response); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test141GetUserAdministratorByNopasswordBadPassword() { final String TEST_NAME = "test140GetUserAdministratorByNopassword"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(USER_NOPASSWORD_USERNAME, "bad"); client.path("/users/" + SystemObjectsType.USER_ADMINISTRATOR.value()); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.get(); TestUtil.displayThen(TEST_NAME); assertStatus(response, 403); assertNoEmptyResponse(response); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(1); dummyAuditService.assertFailedLogin(SchemaConstants.CHANNEL_REST_URI); } @Test public void test401AddSystemConfigurationOverwrite() throws Exception { final String TEST_NAME = "test401AddSystemConfigurationOverwrite"; displayTestTile(this, TEST_NAME); WebClient client = prepareClient(); client.path("/systemConfigurations"); client.query("options", "overwrite"); dummyAuditService.clear(); TestUtil.displayWhen(TEST_NAME); Response response = client.post(SYSTEM_CONFIGURATION_FILE); TestUtil.displayThen(TEST_NAME); displayResponse(response); assertEquals("Expected 201 but got " + response.getStatus(), 201, response.getStatus()); String location = response.getHeaderString("Location"); assertEquals( ENDPOINT_ADDRESS + "/systemConfigurations/" + SystemObjectsType.SYSTEM_CONFIGURATION.value(), location); IntegrationTestTools.display("Audit", dummyAuditService); dummyAuditService.assertRecords(4); dummyAuditService.assertLoginLogout(SchemaConstants.CHANNEL_REST_URI); dummyAuditService.assertHasDelta(1, ChangeType.ADD, SystemConfigurationType.class); } private WebClient prepareClient() { return prepareClient(USER_ADMINISTRATOR_USERNAME, USER_ADMINISTRATOR_PASSWORD); } private WebClient prepareClient(String username, String password) { WebClient client = WebClient.create(ENDPOINT_ADDRESS); ClientConfiguration clientConfig = WebClient.getConfig(client); clientConfig.getRequestContext().put(LocalConduit.DIRECT_DISPATCH, Boolean.TRUE); client.accept("application/xml"); if (username != null) { String authorizationHeader = "Basic " + org.apache.cxf.common.util.Base64Utility.encode((username+":"+(password==null?"":password)).getBytes()); client.header("Authorization", authorizationHeader); } return client; } private void assertStatus(Response response, int expStatus) { assertEquals("Expected "+expStatus+" but got " + response.getStatus(), expStatus, response.getStatus()); } private void assertNoEmptyResponse(Response response) { String respBody = response.readEntity(String.class); assertTrue("Unexpected reposponse: "+respBody, StringUtils.isBlank(respBody)); } private void displayResponse(Response response) { LOGGER.info("response : {} ", response.getStatus()); LOGGER.info("response : {} ", response.getStatusInfo().getReasonPhrase()); } }
apache-2.0
denisneuling/cctrl.jar
cctrl-api/src/main/java/com/cloudcontrolled/api/response/normalize/ListDeploymentNormalizer.java
1275
/* * Copyright 2012 Denis Neuling * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudcontrolled.api.response.normalize; import java.util.HashMap; import java.util.Map; import org.json.JSONArray; import org.json.JSONObject; /** * <p> * ListDeploymentNormalizer class. * </p> * * @author Denis Neuling (denisneuling@gmail.com) * */ public class ListDeploymentNormalizer extends JSONNormalizer { /** {@inheritDoc} */ @Override public String normalize(String jsonResponse) throws Exception { JSONArray array = new JSONArray(jsonResponse); Map<String, JSONArray> layout = new HashMap<String, JSONArray>(); layout.put("deployments", array); JSONObject deployments = new JSONObject(layout); return deployments.toString(); } }
apache-2.0
JoeSteven/HuaBan
app/src/main/java/com/joe/zatuji/api/BmobService.java
4319
package com.joe.zatuji.api; import com.google.gson.JsonElement; import com.joe.zatuji.repo.bean.BaseBmobBean; import com.joe.zatuji.repo.bean.BaseListBean; import com.joe.zatuji.repo.bean.BmobFile; import com.joe.zatuji.repo.bean.UpdateBean; import com.joe.zatuji.repo.bean.User; import com.joe.zatuji.repo.bean.request.ChangePwdParams; import com.joe.zatuji.repo.bean.request.RegisterParams; import com.joe.zatuji.repo.bean.request.ResetPwdParams; import com.joe.zatuji.repo.bean.request.UserUpdateParams; import io.reactivex.Single; import okhttp3.MultipartBody; import okhttp3.RequestBody; import retrofit2.http.Body; import retrofit2.http.DELETE; import retrofit2.http.GET; import retrofit2.http.POST; import retrofit2.http.PUT; import retrofit2.http.Part; import retrofit2.http.Path; import retrofit2.http.Query; ///** // * Created by joe on 16/5/21. // */ public interface BmobService { ////以下为用户相关接口 /** * params body 必须是json格式 * {"username":"110@qq.com", * "password":"123", * "nickname":"haha" * } */ @POST("1/users") Single<User> register(@Body RegisterParams params); /** * 登录 */ @GET("1/login") Single<User> login(@Query("username") String username, @Query("password") String password); @POST("1/requestPasswordReset") Single<BaseBmobBean> resetPwd(@Body ResetPwdParams params); @POST("2/files/{fileName}") Single<BmobFile> uploadFile(@Path("fileName") String fileName, @Body RequestBody file); /** * 根据id查询用户 */ @GET("1/users/{objectId}") Single<User> queryUserInfo(@Path("objectId") String userId); /** * 查询用户登录是否过期 */ @GET("1/checkSession/{objectId}") Single<BaseBmobBean> checkTokenExpired(@Path("objectId") String userId); /** * 更新用户资料 * params userID * params body json格式 */ @PUT("1/users/{objectId}") Single<BaseBmobBean> updateUser(@Path("objectId") String userId, @Body UserUpdateParams params); @GET("1/classes/_User") Single<BaseListBean<User>> queryUser(@Query("where") JsonElement query); // /** * 修改密码 * params user_id * params new_password * params body json格式 */ @PUT("1/updateUserPassword/{objectId}") Single<BaseBmobBean> changePassword(@Path("objectId") String userId, @Body ChangePwdParams query); //以上为用户相关接口 // @DELETE("2/files/{cdn}") // Single<BaseBmobBean> deleteAvatar(@Path("cdn") String cdn); // ////意见反馈接口 // /**意见反馈*/ // @POST("1/classes/FeedBackBean") // Single<BaseBmobBean> feedBack(@Body JsonElement feedback); // @GET("1/classes/AppVersion?order=-createdAt") Single<BaseListBean<UpdateBean>> checkUpdate(); // ////数据表接口 /** * 查询单条数据 */ @GET("1/classes/{TableName}/{objectId}") Single<JsonElement> querySingle(@Path("TableName") String table, @Path("objectId") String objectId); /** * 查询多条数据 */ @GET("1/classes/{TableName}") Single<JsonElement> query(@Path("TableName") String table, @Query("where") JsonElement query, @Query("order") String order, @Query("limit") int limit, @Query("skip") int skip); @GET("1/classes/{TableName}") Single<JsonElement> query(@Path("TableName") String table, @Query("where") JsonElement query, @Query("order") String order); @POST("1/classes/{TableName}") Single<BaseBmobBean> add(@Path("TableName") String table, @Body JsonElement body); @PUT("1/classes/{TableName}/{objectId}") Single<BaseBmobBean> update(@Path("TableName") String table, @Path("objectId") String objectId, @Body JsonElement body); @DELETE("1/classes/{TableName}/{objectId}") Single<BaseBmobBean> delete(@Path("TableName") String table, @Path("objectId") String objectId); // // @POST("1/batch") // Call<ResponseBody> multi(@Body JsonElement jsonElement); // // //welcome // /** // * 启动页正式 // */ // @POST("1/functions/welcome") // Single<WelcomeCover> getWelcomeCover(@Body JsonElement body); // /** // * 启动页调试 // */ // @POST("1/functions/debugWelcome") // Single<WelcomeCover> getWelcomeCoverDebug(@Body JsonElement body); }
apache-2.0
ZhenchaoWang/passport-oauth
oauth-dal/src/main/java/org/zhenchao/oauth/dao/AuthorizeRelationMapper.java
801
package org.zhenchao.oauth.dao; import java.util.List; import org.apache.ibatis.annotations.Param; import org.zhenchao.oauth.entity.AuthorizeRelation; import org.zhenchao.oauth.entity.AuthorizeRelationExample; public interface AuthorizeRelationMapper { long countByExample(AuthorizeRelationExample example); int deleteByExample(AuthorizeRelationExample example); int insert(AuthorizeRelation record); int insertSelective(AuthorizeRelation record); List<AuthorizeRelation> selectByExample(AuthorizeRelationExample example); int updateByExampleSelective(@Param("record") AuthorizeRelation record, @Param("example") AuthorizeRelationExample example); int updateByExample(@Param("record") AuthorizeRelation record, @Param("example") AuthorizeRelationExample example); }
apache-2.0
dave-tucker/intellij-yang
gen/com/intellij/lang/psi/impl/YangLeafStmtImpl.java
3407
/* * Copyright 2014 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // This is a generated file. Not intended for manual editing. package com.intellij.lang.psi.impl; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.util.PsiTreeUtil; import static com.intellij.lang.yang.psi.YangTypes.*; import com.intellij.extapi.psi.ASTWrapperPsiElement; import com.intellij.lang.psi.*; public class YangLeafStmtImpl extends ASTWrapperPsiElement implements YangLeafStmt { public YangLeafStmtImpl(ASTNode node) { super(node); } public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof YangVisitor) ((YangVisitor)visitor).visitLeafStmt(this); else super.accept(visitor); } @Override @NotNull public List<YangConfigStmt> getConfigStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangConfigStmt.class); } @Override @NotNull public List<YangDefaultStmt> getDefaultStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangDefaultStmt.class); } @Override @NotNull public List<YangDescriptionStmt> getDescriptionStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangDescriptionStmt.class); } @Override @NotNull public List<YangIdentifierStmt> getIdentifierStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangIdentifierStmt.class); } @Override @NotNull public List<YangIfFeatureStmt> getIfFeatureStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangIfFeatureStmt.class); } @Override @NotNull public List<YangMandatoryStmt> getMandatoryStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangMandatoryStmt.class); } @Override @NotNull public List<YangMustStmt> getMustStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangMustStmt.class); } @Override @NotNull public List<YangReferenceStmt> getReferenceStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangReferenceStmt.class); } @Override @NotNull public List<YangStatusStmt> getStatusStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangStatusStmt.class); } @Override @NotNull public YangString getString() { return findNotNullChildByClass(YangString.class); } @Override @NotNull public List<YangTypeStmt> getTypeStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangTypeStmt.class); } @Override @NotNull public List<YangUnitsStmt> getUnitsStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangUnitsStmt.class); } @Override @NotNull public List<YangWhenStmt> getWhenStmtList() { return PsiTreeUtil.getChildrenOfTypeAsList(this, YangWhenStmt.class); } }
apache-2.0
simararneja/phoenix
phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
7907
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.pherf.workload; import java.sql.Connection; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.configuration.XMLConfigParser; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.phoenix.pherf.result.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.phoenix.pherf.configuration.DataModel; import org.apache.phoenix.pherf.configuration.ExecutionType; import org.apache.phoenix.pherf.configuration.Query; import org.apache.phoenix.pherf.configuration.QuerySet; import org.apache.phoenix.pherf.configuration.Scenario; import org.apache.phoenix.pherf.util.PhoenixUtil; public class QueryExecutor { private static final Logger logger = LoggerFactory.getLogger(QueryExecutor.class); private List<DataModel> dataModels; private String queryHint; private RunMode runMode; public QueryExecutor(XMLConfigParser parser) { this.dataModels = parser.getDataModels(); } /** * Calls in Multithreaded Query Executor for all datamodels * @throws Exception */ public void execute(String queryHint, boolean exportCSV, RunMode runMode) throws Exception { this.queryHint = queryHint; this.runMode = runMode; for (DataModel dataModel: dataModels) { if (exportCSV) { exportAllScenarios(dataModel); } else { executeAllScenarios(dataModel); } } } /** * Export all queries results to CSV * @param dataModel * @throws Exception */ protected void exportAllScenarios(DataModel dataModel) throws Exception { List<Scenario> scenarios = dataModel.getScenarios(); QueryVerifier exportRunner = new QueryVerifier(false); for (Scenario scenario : scenarios) { for (QuerySet querySet : scenario.getQuerySet()) { executeQuerySetDdls(querySet); for (Query query : querySet.getQuery()) { exportRunner.exportCSV(query); } } } } /** * Execute all scenarios * @param dataModel * @throws Exception */ protected void executeAllScenarios(DataModel dataModel) throws Exception { List<DataModelResult> dataModelResults = new ArrayList<DataModelResult>(); DataModelResult dataModelResult = new DataModelResult(dataModel, PhoenixUtil.getZookeeper()); ResultManager resultManager = new ResultManager(dataModelResult.getName(), this.runMode); dataModelResults.add(dataModelResult); List<Scenario> scenarios = dataModel.getScenarios(); Configuration conf = HBaseConfiguration.create(); Map<String, String> phoenixProperty = conf.getValByRegex("phoenix"); phoenixProperty.putAll(conf.getValByRegex("sfdc")); for (Scenario scenario : scenarios) { ScenarioResult scenarioResult = new ScenarioResult(scenario); scenarioResult.setPhoenixProperties(phoenixProperty); dataModelResult.getScenarioResult().add(scenarioResult); for (QuerySet querySet : scenario.getQuerySet()) { QuerySetResult querySetResult = new QuerySetResult(querySet); scenarioResult.getQuerySetResult().add(querySetResult); executeQuerySetDdls(querySet); if (querySet.getExecutionType() == ExecutionType.SERIAL) { execcuteQuerySetSerial(dataModelResult, querySet, querySetResult, scenarioResult); } else { execcuteQuerySetParallel(dataModelResult, querySet, querySetResult, scenarioResult); } } resultManager.write(dataModelResult); } resultManager.write(dataModelResults); } /** * Execute all querySet DDLs first based on tenantId if specified. This is executed * first since we don't want to run DDLs in parallel to executing queries. * * @param querySet * @throws Exception */ protected void executeQuerySetDdls(QuerySet querySet) throws Exception { PhoenixUtil pUtil = new PhoenixUtil(); for (Query query : querySet.getQuery()) { if (null != query.getDdl()) { Connection conn = null; try { logger.info("\nExecuting DDL:" + query.getDdl() + " on tenantId:" + query.getTenantId()); pUtil.executeStatement(query.getDdl(), conn = pUtil.getConnection(query.getTenantId())); } finally { if (null != conn) { conn.close(); } } } } } /** * Execute query set serially * @param dataModelResult * @param querySet * @param querySetResult * @param scenario * @throws InterruptedException */ protected void execcuteQuerySetSerial(DataModelResult dataModelResult, QuerySet querySet, QuerySetResult querySetResult, Scenario scenario) throws InterruptedException { for (Query query : querySet.getQuery()) { QueryResult queryResult = new QueryResult(query); querySetResult.getQueryResults().add(queryResult); for (int cr = querySet.getMinConcurrency(); cr <= querySet .getMaxConcurrency(); cr++) { List<Thread> threads = new ArrayList<Thread>(); for (int i = 0; i < cr; i++) { Thread thread = executeRunner((i + 1) + "," + cr, dataModelResult, queryResult, querySetResult); threads.add(thread); } for (Thread thread : threads) { thread.join(); } } } } /** * Execute query set in parallel * @param dataModelResult * @param querySet * @param querySetResult * @param scenario * @throws InterruptedException */ protected void execcuteQuerySetParallel(DataModelResult dataModelResult, QuerySet querySet, QuerySetResult querySetResult, Scenario scenario) throws InterruptedException { for (int cr = querySet.getMinConcurrency(); cr <= querySet .getMaxConcurrency(); cr++) { List<Thread> threads = new ArrayList<Thread>(); for (int i = 0; i < cr; i++) { for (Query query : querySet.getQuery()) { QueryResult queryResult = new QueryResult(query); querySetResult.getQueryResults().add(queryResult); Thread thread = executeRunner((i + 1) + "," + cr, dataModelResult, queryResult, querySetResult); threads.add(thread); } } for (Thread thread : threads) { thread.join(); } } } /** * Execute multi-thread runner * @param name * @param dataModelResult * @param queryResult * @param querySet * @return */ protected Thread executeRunner(String name, DataModelResult dataModelResult, QueryResult queryResult, QuerySet querySet) { ThreadTime threadTime = new ThreadTime(); queryResult.getThreadTimes().add(threadTime); threadTime.setThreadName(name); queryResult.setHint(this.queryHint); logger.info("\nExecuting query " + queryResult.getStatement()); Thread thread; if (this.runMode == RunMode.FUNCTIONAL) { thread = new MultithreadedDiffer( threadTime.getThreadName(), queryResult, threadTime, querySet.getNumberOfExecutions(), querySet.getExecutionDurationInMs()) .start(); } else { thread = new MultithreadedRunner( threadTime.getThreadName(), queryResult, dataModelResult, threadTime, querySet.getNumberOfExecutions(), querySet.getExecutionDurationInMs()) .start(); } return thread; } }
apache-2.0
MaDaPHaKa/Orient-object
server/src/main/java/com/orientechnologies/orient/server/replication/conflict/OReplicationConflictException.java
2572
/* * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.server.replication.conflict; import com.orientechnologies.common.exception.OException; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; /** * Exception thrown when the two servers are not aligned. * * @author Luca Garulli (l.garulli--at--orientechnologies.com) * */ public class OReplicationConflictException extends OException { private static final String MESSAGE_RECORD_VERSION = "your=v"; private static final String MESSAGE_DB_VERSION = "db=v"; private static final long serialVersionUID = 1L; private final ORID rid; private final int databaseVersion; private final int recordVersion; /** * Rebuilds the original exception from the message. */ public OReplicationConflictException(final String message) { super(message); int beginPos = message.indexOf(ORID.PREFIX); int endPos = message.indexOf(' ', beginPos); rid = new ORecordId(message.substring(beginPos, endPos)); beginPos = message.indexOf(MESSAGE_DB_VERSION, endPos) + MESSAGE_DB_VERSION.length(); endPos = message.indexOf(' ', beginPos); databaseVersion = Integer.parseInt(message.substring(beginPos, endPos)); beginPos = message.indexOf(MESSAGE_RECORD_VERSION, endPos) + MESSAGE_RECORD_VERSION.length(); endPos = message.indexOf(')', beginPos); recordVersion = Integer.parseInt(message.substring(beginPos, endPos)); } public OReplicationConflictException(final String message, final ORID iRID, final int iDatabaseVersion, final int iRecordVersion) { super(message); rid = iRID; databaseVersion = iDatabaseVersion; recordVersion = iRecordVersion; } public int getDatabaseVersion() { return databaseVersion; } public int getRecordVersion() { return recordVersion; } public ORID getRid() { return rid; } }
apache-2.0
spotify/heroic
heroic-component/src/main/java/com/spotify/heroic/metric/RequestError.java
1323
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.metric; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes({ @JsonSubTypes.Type(value = NodeError.class, name = "node"), @JsonSubTypes.Type(value = ShardError.class, name = "shard"), @JsonSubTypes.Type(value = QueryError.class, name = "query") }) public interface RequestError { String getError(); }
apache-2.0
scalingdata/Impala
thirdparty/hive-1.2.1.2.3.0.0-2557/src/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
268414
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore; import static org.apache.commons.lang.StringUtils.join; import java.io.IOException; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Pattern; import javax.jdo.JDODataStoreException; import javax.jdo.JDOHelper; import javax.jdo.JDOObjectNotFoundException; import javax.jdo.PersistenceManager; import javax.jdo.PersistenceManagerFactory; import javax.jdo.Query; import javax.jdo.Transaction; import javax.jdo.datastore.DataStoreCache; import javax.jdo.identity.IntIdentity; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.HiveObjectType; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.NotificationEvent; import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionEventType; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.model.MColumnDescriptor; import org.apache.hadoop.hive.metastore.model.MDBPrivilege; import org.apache.hadoop.hive.metastore.model.MDatabase; import org.apache.hadoop.hive.metastore.model.MDelegationToken; import org.apache.hadoop.hive.metastore.model.MFieldSchema; import org.apache.hadoop.hive.metastore.model.MFunction; import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege; import org.apache.hadoop.hive.metastore.model.MIndex; import org.apache.hadoop.hive.metastore.model.MMasterKey; import org.apache.hadoop.hive.metastore.model.MNotificationLog; import org.apache.hadoop.hive.metastore.model.MNotificationNextId; import org.apache.hadoop.hive.metastore.model.MOrder; import org.apache.hadoop.hive.metastore.model.MPartition; import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege; import org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics; import org.apache.hadoop.hive.metastore.model.MPartitionEvent; import org.apache.hadoop.hive.metastore.model.MPartitionPrivilege; import org.apache.hadoop.hive.metastore.model.MResourceUri; import org.apache.hadoop.hive.metastore.model.MRole; import org.apache.hadoop.hive.metastore.model.MRoleMap; import org.apache.hadoop.hive.metastore.model.MSerDeInfo; import org.apache.hadoop.hive.metastore.model.MStorageDescriptor; import org.apache.hadoop.hive.metastore.model.MStringList; import org.apache.hadoop.hive.metastore.model.MTable; import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege; import org.apache.hadoop.hive.metastore.model.MTableColumnStatistics; import org.apache.hadoop.hive.metastore.model.MTablePrivilege; import org.apache.hadoop.hive.metastore.model.MType; import org.apache.hadoop.hive.metastore.model.MVersionTable; import org.apache.hadoop.hive.metastore.parser.ExpressionTree; import org.apache.hadoop.hive.metastore.parser.ExpressionTree.ANTLRNoCaseStringStream; import org.apache.hadoop.hive.metastore.parser.ExpressionTree.FilterBuilder; import org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode; import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator; import org.apache.hadoop.hive.metastore.parser.FilterLexer; import org.apache.hadoop.hive.metastore.parser.FilterParser; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.HiveStringUtils; import org.apache.thrift.TException; import org.datanucleus.store.rdbms.exceptions.MissingTableException; import com.google.common.collect.Lists; /** * This class is the interface between the application logic and the database * store that contains the objects. Refrain putting any logic in mode.M* objects * or in this file as former could be auto generated and this class would need * to be made into a interface that can read both from a database and a * filestore. */ public class ObjectStore implements RawStore, Configurable { private static Properties prop = null; private static PersistenceManagerFactory pmf = null; private static Lock pmfPropLock = new ReentrantLock(); /** * Verify the schema only once per JVM since the db connection info is static */ private final static AtomicBoolean isSchemaVerified = new AtomicBoolean(false); private static final Log LOG = LogFactory.getLog(ObjectStore.class.getName()); private static enum TXN_STATUS { NO_STATE, OPEN, COMMITED, ROLLBACK } private static final Map<String, Class> PINCLASSMAP; private static final String HOSTNAME; private static final String USER; static { Map<String, Class> map = new HashMap<String, Class>(); map.put("table", MTable.class); map.put("storagedescriptor", MStorageDescriptor.class); map.put("serdeinfo", MSerDeInfo.class); map.put("partition", MPartition.class); map.put("database", MDatabase.class); map.put("type", MType.class); map.put("fieldschema", MFieldSchema.class); map.put("order", MOrder.class); PINCLASSMAP = Collections.unmodifiableMap(map); String hostname = "UNKNOWN"; try { InetAddress clientAddr = InetAddress.getLocalHost(); hostname = clientAddr.getHostAddress(); } catch (IOException e) { } HOSTNAME = hostname; String user = System.getenv("USER"); if (user == null) { USER = "UNKNOWN"; } else { USER = user; } } private boolean isInitialized = false; private PersistenceManager pm = null; private MetaStoreDirectSql directSql = null; private PartitionExpressionProxy expressionProxy = null; private Configuration hiveConf; int openTrasactionCalls = 0; private Transaction currentTransaction = null; private TXN_STATUS transactionStatus = TXN_STATUS.NO_STATE; private Pattern partitionValidationPattern; public ObjectStore() { } @Override public Configuration getConf() { return hiveConf; } /** * Called whenever this object is instantiated using ReflectionUtils, and also * on connection retries. In cases of connection retries, conf will usually * contain modified values. */ @Override @SuppressWarnings("nls") public void setConf(Configuration conf) { // Although an instance of ObjectStore is accessed by one thread, there may // be many threads with ObjectStore instances. So the static variables // pmf and prop need to be protected with locks. pmfPropLock.lock(); try { isInitialized = false; hiveConf = conf; Properties propsFromConf = getDataSourceProps(conf); boolean propsChanged = !propsFromConf.equals(prop); if (propsChanged) { pmf = null; prop = null; } assert(!isActiveTransaction()); shutdown(); // Always want to re-create pm as we don't know if it were created by the // most recent instance of the pmf pm = null; directSql = null; expressionProxy = null; openTrasactionCalls = 0; currentTransaction = null; transactionStatus = TXN_STATUS.NO_STATE; initialize(propsFromConf); String partitionValidationRegex = hiveConf.get(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.name()); if (partitionValidationRegex != null && partitionValidationRegex.equals("")) { partitionValidationPattern = Pattern.compile(partitionValidationRegex); } else { partitionValidationPattern = null; } if (!isInitialized) { throw new RuntimeException( "Unable to create persistence manager. Check dss.log for details"); } else { LOG.info("Initialized ObjectStore"); } } finally { pmfPropLock.unlock(); } } private ClassLoader classLoader; { classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = ObjectStore.class.getClassLoader(); } } @SuppressWarnings("nls") private void initialize(Properties dsProps) { LOG.info("ObjectStore, initialize called"); prop = dsProps; pm = getPersistenceManager(); isInitialized = pm != null; if (isInitialized) { expressionProxy = createExpressionProxy(hiveConf); directSql = new MetaStoreDirectSql(pm, hiveConf); } LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm + " created in the thread with id: " + Thread.currentThread().getId()); } /** * Creates the proxy used to evaluate expressions. This is here to prevent circular * dependency - ql -&gt; metastore client &lt;-&gt metastore server -&gt ql. If server and * client are split, this can be removed. * @param conf Configuration. * @return The partition expression proxy. */ private static PartitionExpressionProxy createExpressionProxy(Configuration conf) { String className = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS); try { @SuppressWarnings("unchecked") Class<? extends PartitionExpressionProxy> clazz = (Class<? extends PartitionExpressionProxy>)MetaStoreUtils.getClass(className); return MetaStoreUtils.newInstance( clazz, new Class<?>[0], new Object[0]); } catch (MetaException e) { LOG.error("Error loading PartitionExpressionProxy", e); throw new RuntimeException("Error loading PartitionExpressionProxy: " + e.getMessage()); } } /** * Properties specified in hive-default.xml override the properties specified * in jpox.properties. */ @SuppressWarnings("nls") private static Properties getDataSourceProps(Configuration conf) { Properties prop = new Properties(); Iterator<Map.Entry<String, String>> iter = conf.iterator(); while (iter.hasNext()) { Map.Entry<String, String> e = iter.next(); if (e.getKey().contains("datanucleus") || e.getKey().contains("jdo")) { Object prevVal = prop.setProperty(e.getKey(), conf.get(e.getKey())); if (LOG.isDebugEnabled() && !e.getKey().equals(HiveConf.ConfVars.METASTOREPWD.varname)) { LOG.debug("Overriding " + e.getKey() + " value " + prevVal + " from jpox.properties with " + e.getValue()); } } } // Password may no longer be in the conf, use getPassword() try { String passwd = ShimLoader.getHadoopShims().getPassword(conf, HiveConf.ConfVars.METASTOREPWD.varname); if (passwd != null && !passwd.isEmpty()) { prop.setProperty(HiveConf.ConfVars.METASTOREPWD.varname, passwd); } } catch (IOException err) { throw new RuntimeException("Error getting metastore password: " + err.getMessage(), err); } if (LOG.isDebugEnabled()) { for (Entry<Object, Object> e : prop.entrySet()) { if (!e.getKey().equals(HiveConf.ConfVars.METASTOREPWD.varname)) { LOG.debug(e.getKey() + " = " + e.getValue()); } } } return prop; } private static synchronized PersistenceManagerFactory getPMF() { if (pmf == null) { pmf = JDOHelper.getPersistenceManagerFactory(prop); DataStoreCache dsc = pmf.getDataStoreCache(); if (dsc != null) { HiveConf conf = new HiveConf(ObjectStore.class); String objTypes = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_CACHE_PINOBJTYPES); LOG.info("Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"" + objTypes + "\""); if (objTypes != null && objTypes.length() > 0) { objTypes = objTypes.toLowerCase(); String[] typeTokens = objTypes.split(","); for (String type : typeTokens) { type = type.trim(); if (PINCLASSMAP.containsKey(type)) { dsc.pinAll(true, PINCLASSMAP.get(type)); } else { LOG.warn(type + " is not one of the pinnable object types: " + org.apache.commons.lang.StringUtils.join(PINCLASSMAP.keySet(), " ")); } } } } else { LOG.warn("PersistenceManagerFactory returned null DataStoreCache object. Unable to initialize object pin types defined by hive.metastore.cache.pinobjtypes"); } } return pmf; } @InterfaceAudience.LimitedPrivate({"HCATALOG"}) @InterfaceStability.Evolving public PersistenceManager getPersistenceManager() { return getPMF().getPersistenceManager(); } @Override public void shutdown() { if (pm != null) { LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm + " will be shutdown"); pm.close(); } } /** * Opens a new one or the one already created Every call of this function must * have corresponding commit or rollback function call * * @return an active transaction */ @Override public boolean openTransaction() { openTrasactionCalls++; if (openTrasactionCalls == 1) { currentTransaction = pm.currentTransaction(); currentTransaction.begin(); transactionStatus = TXN_STATUS.OPEN; } else { // openTransactionCalls > 1 means this is an interior transaction // We should already have a transaction created that is active. if ((currentTransaction == null) || (!currentTransaction.isActive())){ throw new RuntimeException("openTransaction called in an interior" + " transaction scope, but currentTransaction is not active."); } } boolean result = currentTransaction.isActive(); debugLog("Open transaction: count = " + openTrasactionCalls + ", isActive = " + result); return result; } /** * if this is the commit of the first open call then an actual commit is * called. * * @return Always returns true */ @Override @SuppressWarnings("nls") public boolean commitTransaction() { if (TXN_STATUS.ROLLBACK == transactionStatus) { debugLog("Commit transaction: rollback"); return false; } if (openTrasactionCalls <= 0) { RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = " + openTrasactionCalls + ". This probably indicates that there are unbalanced " + "calls to openTransaction/commitTransaction"); LOG.error(e); throw e; } if (!currentTransaction.isActive()) { RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = " + openTrasactionCalls + ". This probably indicates that there are unbalanced " + "calls to openTransaction/commitTransaction"); LOG.error(e); throw e; } openTrasactionCalls--; debugLog("Commit transaction: count = " + openTrasactionCalls + ", isactive "+ currentTransaction.isActive()); if ((openTrasactionCalls == 0) && currentTransaction.isActive()) { transactionStatus = TXN_STATUS.COMMITED; currentTransaction.commit(); } return true; } /** * @return true if there is an active transaction. If the current transaction * is either committed or rolled back it returns false */ public boolean isActiveTransaction() { if (currentTransaction == null) { return false; } return currentTransaction.isActive(); } /** * Rolls back the current transaction if it is active */ @Override public void rollbackTransaction() { if (openTrasactionCalls < 1) { debugLog("rolling back transaction: no open transactions: " + openTrasactionCalls); return; } debugLog("Rollback transaction, isActive: " + currentTransaction.isActive()); try { if (currentTransaction.isActive() && transactionStatus != TXN_STATUS.ROLLBACK) { currentTransaction.rollback(); } } finally { openTrasactionCalls = 0; transactionStatus = TXN_STATUS.ROLLBACK; // remove all detached objects from the cache, since the transaction is // being rolled back they are no longer relevant, and this prevents them // from reattaching in future transactions pm.evictAll(); } } @Override public void createDatabase(Database db) throws InvalidObjectException, MetaException { boolean commited = false; MDatabase mdb = new MDatabase(); mdb.setName(db.getName().toLowerCase()); mdb.setLocationUri(db.getLocationUri()); mdb.setDescription(db.getDescription()); mdb.setParameters(db.getParameters()); mdb.setOwnerName(db.getOwnerName()); PrincipalType ownerType = db.getOwnerType(); mdb.setOwnerType((null == ownerType ? PrincipalType.USER.name() : ownerType.name())); try { openTransaction(); pm.makePersistent(mdb); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } } @SuppressWarnings("nls") private MDatabase getMDatabase(String name) throws NoSuchObjectException { MDatabase mdb = null; boolean commited = false; try { openTransaction(); name = HiveStringUtils.normalizeIdentifier(name); Query query = pm.newQuery(MDatabase.class, "name == dbname"); query.declareParameters("java.lang.String dbname"); query.setUnique(true); mdb = (MDatabase) query.execute(name); pm.retrieve(mdb); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } if (mdb == null) { throw new NoSuchObjectException("There is no database named " + name); } return mdb; } @Override public Database getDatabase(String name) throws NoSuchObjectException { MetaException ex = null; Database db = null; try { db = getDatabaseInternal(name); } catch (MetaException e) { // Signature restriction to NSOE, and NSOE being a flat exception prevents us from // setting the cause of the NSOE as the MetaException. We should not lose the info // we got here, but it's very likely that the MetaException is irrelevant and is // actually an NSOE message, so we should log it and throw an NSOE with the msg. ex = e; } if (db == null) { LOG.warn("Failed to get database " + name +", returning NoSuchObjectException", ex); throw new NoSuchObjectException(name + (ex == null ? "" : (": " + ex.getMessage()))); } return db; } public Database getDatabaseInternal(String name) throws MetaException, NoSuchObjectException { return new GetDbHelper(name, null, true, true) { @Override protected Database getSqlResult(GetHelper<Database> ctx) throws MetaException { return directSql.getDatabase(dbName); } @Override protected Database getJdoResult(GetHelper<Database> ctx) throws MetaException, NoSuchObjectException { return getJDODatabase(dbName); } }.run(false); } public Database getJDODatabase(String name) throws NoSuchObjectException { MDatabase mdb = null; boolean commited = false; try { openTransaction(); mdb = getMDatabase(name); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } Database db = new Database(); db.setName(mdb.getName()); db.setDescription(mdb.getDescription()); db.setLocationUri(mdb.getLocationUri()); db.setParameters(convertMap(mdb.getParameters())); db.setOwnerName(mdb.getOwnerName()); String type = mdb.getOwnerType(); db.setOwnerType((null == type || type.trim().isEmpty()) ? null : PrincipalType.valueOf(type)); return db; } /** * Alter the database object in metastore. Currently only the parameters * of the database or the owner can be changed. * @param dbName the database name * @param db the Hive Database object * @throws MetaException * @throws NoSuchObjectException */ @Override public boolean alterDatabase(String dbName, Database db) throws MetaException, NoSuchObjectException { MDatabase mdb = null; boolean committed = false; try { mdb = getMDatabase(dbName); mdb.setParameters(db.getParameters()); mdb.setOwnerName(db.getOwnerName()); if (db.getOwnerType() != null) { mdb.setOwnerType(db.getOwnerType().name()); } openTransaction(); pm.makePersistent(mdb); committed = commitTransaction(); } finally { if (!committed) { rollbackTransaction(); return false; } } return true; } @Override public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException { boolean success = false; LOG.info("Dropping database " + dbname + " along with all tables"); dbname = HiveStringUtils.normalizeIdentifier(dbname); try { openTransaction(); // then drop the database MDatabase db = getMDatabase(dbname); pm.retrieve(db); if (db != null) { List<MDBPrivilege> dbGrants = this.listDatabaseGrants(dbname); if (dbGrants != null && dbGrants.size() > 0) { pm.deletePersistentAll(dbGrants); } pm.deletePersistent(db); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } @Override public List<String> getDatabases(String pattern) throws MetaException { boolean commited = false; List<String> databases = null; try { openTransaction(); // Take the pattern and split it on the | to get all the composing // patterns String[] subpatterns = pattern.trim().split("\\|"); String query = "select name from org.apache.hadoop.hive.metastore.model.MDatabase where ("; boolean first = true; for (String subpattern : subpatterns) { subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); if (!first) { query = query + " || "; } query = query + " name.matches(\"" + subpattern + "\")"; first = false; } query = query + ")"; Query q = pm.newQuery(query); q.setResult("name"); q.setOrdering("name ascending"); Collection names = (Collection) q.execute(); databases = new ArrayList<String>(); for (Iterator i = names.iterator(); i.hasNext();) { databases.add((String) i.next()); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return databases; } @Override public List<String> getAllDatabases() throws MetaException { return getDatabases(".*"); } private MType getMType(Type type) { List<MFieldSchema> fields = new ArrayList<MFieldSchema>(); if (type.getFields() != null) { for (FieldSchema field : type.getFields()) { fields.add(new MFieldSchema(field.getName(), field.getType(), field .getComment())); } } return new MType(type.getName(), type.getType1(), type.getType2(), fields); } private Type getType(MType mtype) { List<FieldSchema> fields = new ArrayList<FieldSchema>(); if (mtype.getFields() != null) { for (MFieldSchema field : mtype.getFields()) { fields.add(new FieldSchema(field.getName(), field.getType(), field .getComment())); } } Type ret = new Type(); ret.setName(mtype.getName()); ret.setType1(mtype.getType1()); ret.setType2(mtype.getType2()); ret.setFields(fields); return ret; } @Override public boolean createType(Type type) { boolean success = false; MType mtype = getMType(type); boolean commited = false; try { openTransaction(); pm.makePersistent(mtype); commited = commitTransaction(); success = true; } finally { if (!commited) { rollbackTransaction(); } } return success; } @Override public Type getType(String typeName) { Type type = null; boolean commited = false; try { openTransaction(); Query query = pm.newQuery(MType.class, "name == typeName"); query.declareParameters("java.lang.String typeName"); query.setUnique(true); MType mtype = (MType) query.execute(typeName.trim()); pm.retrieve(type); if (mtype != null) { type = getType(mtype); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return type; } @Override public boolean dropType(String typeName) { boolean success = false; try { openTransaction(); Query query = pm.newQuery(MType.class, "name == typeName"); query.declareParameters("java.lang.String typeName"); query.setUnique(true); MType type = (MType) query.execute(typeName.trim()); pm.retrieve(type); if (type != null) { pm.deletePersistent(type); } success = commitTransaction(); } catch (JDOObjectNotFoundException e) { success = commitTransaction(); LOG.debug("type not found " + typeName, e); } finally { if (!success) { rollbackTransaction(); } } return success; } @Override public void createTable(Table tbl) throws InvalidObjectException, MetaException { boolean commited = false; try { openTransaction(); MTable mtbl = convertToMTable(tbl); pm.makePersistent(mtbl); PrincipalPrivilegeSet principalPrivs = tbl.getPrivileges(); List<Object> toPersistPrivObjs = new ArrayList<Object>(); if (principalPrivs != null) { int now = (int)(System.currentTimeMillis()/1000); Map<String, List<PrivilegeGrantInfo>> userPrivs = principalPrivs.getUserPrivileges(); putPersistentPrivObjects(mtbl, toPersistPrivObjs, now, userPrivs, PrincipalType.USER); Map<String, List<PrivilegeGrantInfo>> groupPrivs = principalPrivs.getGroupPrivileges(); putPersistentPrivObjects(mtbl, toPersistPrivObjs, now, groupPrivs, PrincipalType.GROUP); Map<String, List<PrivilegeGrantInfo>> rolePrivs = principalPrivs.getRolePrivileges(); putPersistentPrivObjects(mtbl, toPersistPrivObjs, now, rolePrivs, PrincipalType.ROLE); } pm.makePersistentAll(toPersistPrivObjs); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } } /** * Convert PrivilegeGrantInfo from privMap to MTablePrivilege, and add all of * them to the toPersistPrivObjs. These privilege objects will be persisted as * part of createTable. * * @param mtbl * @param toPersistPrivObjs * @param now * @param privMap * @param type */ private void putPersistentPrivObjects(MTable mtbl, List<Object> toPersistPrivObjs, int now, Map<String, List<PrivilegeGrantInfo>> privMap, PrincipalType type) { if (privMap != null) { for (Map.Entry<String, List<PrivilegeGrantInfo>> entry : privMap .entrySet()) { String principalName = entry.getKey(); List<PrivilegeGrantInfo> privs = entry.getValue(); for (int i = 0; i < privs.size(); i++) { PrivilegeGrantInfo priv = privs.get(i); if (priv == null) { continue; } MTablePrivilege mTblSec = new MTablePrivilege( principalName, type.toString(), mtbl, priv.getPrivilege(), now, priv.getGrantor(), priv.getGrantorType().toString(), priv .isGrantOption()); toPersistPrivObjs.add(mTblSec); } } } } @Override public boolean dropTable(String dbName, String tableName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { boolean success = false; try { openTransaction(); MTable tbl = getMTable(dbName, tableName); pm.retrieve(tbl); if (tbl != null) { // first remove all the grants List<MTablePrivilege> tabGrants = listAllTableGrants(dbName, tableName); if (tabGrants != null && tabGrants.size() > 0) { pm.deletePersistentAll(tabGrants); } List<MTableColumnPrivilege> tblColGrants = listTableAllColumnGrants(dbName, tableName); if (tblColGrants != null && tblColGrants.size() > 0) { pm.deletePersistentAll(tblColGrants); } List<MPartitionPrivilege> partGrants = this.listTableAllPartitionGrants(dbName, tableName); if (partGrants != null && partGrants.size() > 0) { pm.deletePersistentAll(partGrants); } List<MPartitionColumnPrivilege> partColGrants = listTableAllPartitionColumnGrants(dbName, tableName); if (partColGrants != null && partColGrants.size() > 0) { pm.deletePersistentAll(partColGrants); } // delete column statistics if present try { deleteTableColumnStatistics(dbName, tableName, null); } catch (NoSuchObjectException e) { LOG.info("Found no table level column statistics associated with db " + dbName + " table " + tableName + " record to delete"); } preDropStorageDescriptor(tbl.getSd()); // then remove the table pm.deletePersistentAll(tbl); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } @Override public Table getTable(String dbName, String tableName) throws MetaException { boolean commited = false; Table tbl = null; try { openTransaction(); tbl = convertToTable(getMTable(dbName, tableName)); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return tbl; } @Override public List<String> getTables(String dbName, String pattern) throws MetaException { boolean commited = false; List<String> tbls = null; try { openTransaction(); dbName = HiveStringUtils.normalizeIdentifier(dbName); // Take the pattern and split it on the | to get all the composing // patterns String[] subpatterns = pattern.trim().split("\\|"); String query = "select tableName from org.apache.hadoop.hive.metastore.model.MTable " + "where database.name == dbName && ("; boolean first = true; for (String subpattern : subpatterns) { subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); if (!first) { query = query + " || "; } query = query + " tableName.matches(\"" + subpattern + "\")"; first = false; } query = query + ")"; Query q = pm.newQuery(query); q.declareParameters("java.lang.String dbName"); q.setResult("tableName"); q.setOrdering("tableName ascending"); Collection names = (Collection) q.execute(dbName); tbls = new ArrayList<String>(); for (Iterator i = names.iterator(); i.hasNext();) { tbls.add((String) i.next()); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return tbls; } @Override public List<String> getAllTables(String dbName) throws MetaException { return getTables(dbName, ".*"); } private MTable getMTable(String db, String table) { MTable mtbl = null; boolean commited = false; try { openTransaction(); db = HiveStringUtils.normalizeIdentifier(db); table = HiveStringUtils.normalizeIdentifier(table); Query query = pm.newQuery(MTable.class, "tableName == table && database.name == db"); query.declareParameters("java.lang.String table, java.lang.String db"); query.setUnique(true); mtbl = (MTable) query.execute(table, db); pm.retrieve(mtbl); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return mtbl; } @Override public List<Table> getTableObjectsByName(String db, List<String> tbl_names) throws MetaException, UnknownDBException { List<Table> tables = new ArrayList<Table>(); boolean committed = false; try { openTransaction(); db = HiveStringUtils.normalizeIdentifier(db); Query dbExistsQuery = pm.newQuery(MDatabase.class, "name == db"); dbExistsQuery.declareParameters("java.lang.String db"); dbExistsQuery.setUnique(true); dbExistsQuery.setResult("name"); String dbNameIfExists = (String) dbExistsQuery.execute(db); if (dbNameIfExists == null || dbNameIfExists.isEmpty()) { throw new UnknownDBException("Could not find database " + db); } List<String> lowered_tbl_names = new ArrayList<String>(); for (String t : tbl_names) { lowered_tbl_names.add(HiveStringUtils.normalizeIdentifier(t)); } Query query = pm.newQuery(MTable.class); query.setFilter("database.name == db && tbl_names.contains(tableName)"); query.declareParameters("java.lang.String db, java.util.Collection tbl_names"); Collection mtables = (Collection) query.execute(db, lowered_tbl_names); for (Iterator iter = mtables.iterator(); iter.hasNext();) { tables.add(convertToTable((MTable) iter.next())); } committed = commitTransaction(); } finally { if (!committed) { rollbackTransaction(); } } return tables; } /** Makes shallow copy of a list to avoid DataNucleus mucking with our objects. */ private <T> List<T> convertList(List<T> dnList) { return (dnList == null) ? null : Lists.newArrayList(dnList); } /** Makes shallow copy of a map to avoid DataNucleus mucking with our objects. */ private Map<String, String> convertMap(Map<String, String> dnMap) { return MetaStoreUtils.trimMapNulls(dnMap, HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_ORM_RETRIEVE_MAPNULLS_AS_EMPTY_STRINGS)); } private Table convertToTable(MTable mtbl) throws MetaException { if (mtbl == null) { return null; } String tableType = mtbl.getTableType(); if (tableType == null) { // for backwards compatibility with old metastore persistence if (mtbl.getViewOriginalText() != null) { tableType = TableType.VIRTUAL_VIEW.toString(); } else if ("TRUE".equals(mtbl.getParameters().get("EXTERNAL"))) { tableType = TableType.EXTERNAL_TABLE.toString(); } else { tableType = TableType.MANAGED_TABLE.toString(); } } return new Table(mtbl.getTableName(), mtbl.getDatabase().getName(), mtbl .getOwner(), mtbl.getCreateTime(), mtbl.getLastAccessTime(), mtbl .getRetention(), convertToStorageDescriptor(mtbl.getSd()), convertToFieldSchemas(mtbl.getPartitionKeys()), convertMap(mtbl.getParameters()), mtbl.getViewOriginalText(), mtbl.getViewExpandedText(), tableType); } private MTable convertToMTable(Table tbl) throws InvalidObjectException, MetaException { if (tbl == null) { return null; } MDatabase mdb = null; try { mdb = getMDatabase(tbl.getDbName()); } catch (NoSuchObjectException e) { LOG.error(StringUtils.stringifyException(e)); throw new InvalidObjectException("Database " + tbl.getDbName() + " doesn't exist."); } // If the table has property EXTERNAL set, update table type // accordingly String tableType = tbl.getTableType(); boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL")); if (TableType.MANAGED_TABLE.toString().equals(tableType)) { if (isExternal) { tableType = TableType.EXTERNAL_TABLE.toString(); } } if (TableType.EXTERNAL_TABLE.toString().equals(tableType)) { if (!isExternal) { tableType = TableType.MANAGED_TABLE.toString(); } } // A new table is always created with a new column descriptor return new MTable(HiveStringUtils.normalizeIdentifier(tbl.getTableName()), mdb, convertToMStorageDescriptor(tbl.getSd()), tbl.getOwner(), tbl .getCreateTime(), tbl.getLastAccessTime(), tbl.getRetention(), convertToMFieldSchemas(tbl.getPartitionKeys()), tbl.getParameters(), tbl.getViewOriginalText(), tbl.getViewExpandedText(), tableType); } private List<MFieldSchema> convertToMFieldSchemas(List<FieldSchema> keys) { List<MFieldSchema> mkeys = null; if (keys != null) { mkeys = new ArrayList<MFieldSchema>(keys.size()); for (FieldSchema part : keys) { mkeys.add(new MFieldSchema(HiveStringUtils.normalizeIdentifier(part.getName()), part.getType(), part.getComment())); } } return mkeys; } private List<FieldSchema> convertToFieldSchemas(List<MFieldSchema> mkeys) { List<FieldSchema> keys = null; if (mkeys != null) { keys = new ArrayList<FieldSchema>(mkeys.size()); for (MFieldSchema part : mkeys) { keys.add(new FieldSchema(part.getName(), part.getType(), part .getComment())); } } return keys; } private List<MOrder> convertToMOrders(List<Order> keys) { List<MOrder> mkeys = null; if (keys != null) { mkeys = new ArrayList<MOrder>(keys.size()); for (Order part : keys) { mkeys.add(new MOrder(HiveStringUtils.normalizeIdentifier(part.getCol()), part.getOrder())); } } return mkeys; } private List<Order> convertToOrders(List<MOrder> mkeys) { List<Order> keys = null; if (mkeys != null) { keys = new ArrayList<Order>(mkeys.size()); for (MOrder part : mkeys) { keys.add(new Order(part.getCol(), part.getOrder())); } } return keys; } private SerDeInfo convertToSerDeInfo(MSerDeInfo ms) throws MetaException { if (ms == null) { throw new MetaException("Invalid SerDeInfo object"); } return new SerDeInfo(ms.getName(), ms.getSerializationLib(), convertMap(ms.getParameters())); } private MSerDeInfo convertToMSerDeInfo(SerDeInfo ms) throws MetaException { if (ms == null) { throw new MetaException("Invalid SerDeInfo object"); } return new MSerDeInfo(ms.getName(), ms.getSerializationLib(), ms .getParameters()); } /** * Given a list of model field schemas, create a new model column descriptor. * @param cols the columns the column descriptor contains * @return a new column descriptor db-backed object */ private MColumnDescriptor createNewMColumnDescriptor(List<MFieldSchema> cols) { if (cols == null) { return null; } return new MColumnDescriptor(cols); } // MSD and SD should be same objects. Not sure how to make then same right now // MSerdeInfo *& SerdeInfo should be same as well private StorageDescriptor convertToStorageDescriptor(MStorageDescriptor msd, boolean noFS) throws MetaException { if (msd == null) { return null; } List<MFieldSchema> mFieldSchemas = msd.getCD() == null ? null : msd.getCD().getCols(); StorageDescriptor sd = new StorageDescriptor(noFS ? null : convertToFieldSchemas(mFieldSchemas), msd.getLocation(), msd.getInputFormat(), msd.getOutputFormat(), msd .isCompressed(), msd.getNumBuckets(), convertToSerDeInfo(msd .getSerDeInfo()), convertList(msd.getBucketCols()), convertToOrders(msd .getSortCols()), convertMap(msd.getParameters())); SkewedInfo skewedInfo = new SkewedInfo(convertList(msd.getSkewedColNames()), convertToSkewedValues(msd.getSkewedColValues()), covertToSkewedMap(msd.getSkewedColValueLocationMaps())); sd.setSkewedInfo(skewedInfo); sd.setStoredAsSubDirectories(msd.isStoredAsSubDirectories()); return sd; } private StorageDescriptor convertToStorageDescriptor(MStorageDescriptor msd) throws MetaException { return convertToStorageDescriptor(msd, false); } /** * Convert a list of MStringList to a list of list string * * @param mLists * @return */ private List<List<String>> convertToSkewedValues(List<MStringList> mLists) { List<List<String>> lists = null; if (mLists != null) { lists = new ArrayList<List<String>>(mLists.size()); for (MStringList element : mLists) { lists.add(new ArrayList<String>(element.getInternalList())); } } return lists; } private List<MStringList> convertToMStringLists(List<List<String>> mLists) { List<MStringList> lists = null ; if (null != mLists) { lists = new ArrayList<MStringList>(); for (List<String> mList : mLists) { lists.add(new MStringList(mList)); } } return lists; } /** * Convert a MStringList Map to a Map * @param mMap * @return */ private Map<List<String>, String> covertToSkewedMap(Map<MStringList, String> mMap) { Map<List<String>, String> map = null; if (mMap != null) { map = new HashMap<List<String>, String>(mMap.size()); Set<MStringList> keys = mMap.keySet(); for (MStringList key : keys) { map.put(new ArrayList<String>(key.getInternalList()), mMap.get(key)); } } return map; } /** * Covert a Map to a MStringList Map * @param mMap * @return */ private Map<MStringList, String> covertToMapMStringList(Map<List<String>, String> mMap) { Map<MStringList, String> map = null; if (mMap != null) { map = new HashMap<MStringList, String>(mMap.size()); Set<List<String>> keys = mMap.keySet(); for (List<String> key : keys) { map.put(new MStringList(key), mMap.get(key)); } } return map; } /** * Converts a storage descriptor to a db-backed storage descriptor. Creates a * new db-backed column descriptor object for this SD. * @param sd the storage descriptor to wrap in a db-backed object * @return the storage descriptor db-backed object * @throws MetaException */ private MStorageDescriptor convertToMStorageDescriptor(StorageDescriptor sd) throws MetaException { if (sd == null) { return null; } MColumnDescriptor mcd = createNewMColumnDescriptor(convertToMFieldSchemas(sd.getCols())); return convertToMStorageDescriptor(sd, mcd); } /** * Converts a storage descriptor to a db-backed storage descriptor. It points the * storage descriptor's column descriptor to the one passed as an argument, * so it does not create a new mcolumn descriptor object. * @param sd the storage descriptor to wrap in a db-backed object * @param mcd the db-backed column descriptor * @return the db-backed storage descriptor object * @throws MetaException */ private MStorageDescriptor convertToMStorageDescriptor(StorageDescriptor sd, MColumnDescriptor mcd) throws MetaException { if (sd == null) { return null; } return new MStorageDescriptor(mcd, sd .getLocation(), sd.getInputFormat(), sd.getOutputFormat(), sd .isCompressed(), sd.getNumBuckets(), convertToMSerDeInfo(sd .getSerdeInfo()), sd.getBucketCols(), convertToMOrders(sd.getSortCols()), sd.getParameters(), (null == sd.getSkewedInfo()) ? null : sd.getSkewedInfo().getSkewedColNames(), convertToMStringLists((null == sd.getSkewedInfo()) ? null : sd.getSkewedInfo() .getSkewedColValues()), covertToMapMStringList((null == sd.getSkewedInfo()) ? null : sd.getSkewedInfo() .getSkewedColValueLocationMaps()), sd.isStoredAsSubDirectories()); } @Override public boolean addPartitions(String dbName, String tblName, List<Partition> parts) throws InvalidObjectException, MetaException { boolean success = false; openTransaction(); try { List<MTablePrivilege> tabGrants = null; List<MTableColumnPrivilege> tabColumnGrants = null; MTable table = this.getMTable(dbName, tblName); if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { tabGrants = this.listAllTableGrants(dbName, tblName); tabColumnGrants = this.listTableAllColumnGrants(dbName, tblName); } List<Object> toPersist = new ArrayList<Object>(); for (Partition part : parts) { if (!part.getTableName().equals(tblName) || !part.getDbName().equals(dbName)) { throw new MetaException("Partition does not belong to target table " + dbName + "." + tblName + ": " + part); } MPartition mpart = convertToMPart(part, true); toPersist.add(mpart); int now = (int)(System.currentTimeMillis()/1000); if (tabGrants != null) { for (MTablePrivilege tab: tabGrants) { toPersist.add(new MPartitionPrivilege(tab.getPrincipalName(), tab.getPrincipalType(), mpart, tab.getPrivilege(), now, tab.getGrantor(), tab.getGrantorType(), tab.getGrantOption())); } } if (tabColumnGrants != null) { for (MTableColumnPrivilege col : tabColumnGrants) { toPersist.add(new MPartitionColumnPrivilege(col.getPrincipalName(), col.getPrincipalType(), mpart, col.getColumnName(), col.getPrivilege(), now, col.getGrantor(), col.getGrantorType(), col.getGrantOption())); } } } if (toPersist.size() > 0) { pm.makePersistentAll(toPersist); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } private boolean isValidPartition( Partition part, boolean ifNotExists) throws MetaException { MetaStoreUtils.validatePartitionNameCharacters(part.getValues(), partitionValidationPattern); boolean doesExist = doesPartitionExist( part.getDbName(), part.getTableName(), part.getValues()); if (doesExist && !ifNotExists) { throw new MetaException("Partition already exists: " + part); } return !doesExist; } @Override public boolean addPartitions(String dbName, String tblName, PartitionSpecProxy partitionSpec, boolean ifNotExists) throws InvalidObjectException, MetaException { boolean success = false; openTransaction(); try { List<MTablePrivilege> tabGrants = null; List<MTableColumnPrivilege> tabColumnGrants = null; MTable table = this.getMTable(dbName, tblName); if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { tabGrants = this.listAllTableGrants(dbName, tblName); tabColumnGrants = this.listTableAllColumnGrants(dbName, tblName); } if (!partitionSpec.getTableName().equals(tblName) || !partitionSpec.getDbName().equals(dbName)) { throw new MetaException("Partition does not belong to target table " + dbName + "." + tblName + ": " + partitionSpec); } PartitionSpecProxy.PartitionIterator iterator = partitionSpec.getPartitionIterator(); int now = (int)(System.currentTimeMillis()/1000); while (iterator.hasNext()) { Partition part = iterator.next(); if (isValidPartition(part, ifNotExists)) { MPartition mpart = convertToMPart(part, true); pm.makePersistent(mpart); if (tabGrants != null) { for (MTablePrivilege tab : tabGrants) { pm.makePersistent(new MPartitionPrivilege(tab.getPrincipalName(), tab.getPrincipalType(), mpart, tab.getPrivilege(), now, tab.getGrantor(), tab.getGrantorType(), tab.getGrantOption())); } } if (tabColumnGrants != null) { for (MTableColumnPrivilege col : tabColumnGrants) { pm.makePersistent(new MPartitionColumnPrivilege(col.getPrincipalName(), col.getPrincipalType(), mpart, col.getColumnName(), col.getPrivilege(), now, col.getGrantor(), col.getGrantorType(), col.getGrantOption())); } } } } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } @Override public boolean addPartition(Partition part) throws InvalidObjectException, MetaException { boolean success = false; boolean commited = false; try { MTable table = this.getMTable(part.getDbName(), part.getTableName()); List<MTablePrivilege> tabGrants = null; List<MTableColumnPrivilege> tabColumnGrants = null; if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { tabGrants = this.listAllTableGrants(part .getDbName(), part.getTableName()); tabColumnGrants = this.listTableAllColumnGrants( part.getDbName(), part.getTableName()); } openTransaction(); MPartition mpart = convertToMPart(part, true); pm.makePersistent(mpart); int now = (int)(System.currentTimeMillis()/1000); List<Object> toPersist = new ArrayList<Object>(); if (tabGrants != null) { for (MTablePrivilege tab: tabGrants) { MPartitionPrivilege partGrant = new MPartitionPrivilege(tab .getPrincipalName(), tab.getPrincipalType(), mpart, tab.getPrivilege(), now, tab.getGrantor(), tab .getGrantorType(), tab.getGrantOption()); toPersist.add(partGrant); } } if (tabColumnGrants != null) { for (MTableColumnPrivilege col : tabColumnGrants) { MPartitionColumnPrivilege partColumn = new MPartitionColumnPrivilege(col .getPrincipalName(), col.getPrincipalType(), mpart, col .getColumnName(), col.getPrivilege(), now, col.getGrantor(), col .getGrantorType(), col.getGrantOption()); toPersist.add(partColumn); } if (toPersist.size() > 0) { pm.makePersistentAll(toPersist); } } commited = commitTransaction(); success = true; } finally { if (!commited) { rollbackTransaction(); } } return success; } @Override public Partition getPartition(String dbName, String tableName, List<String> part_vals) throws NoSuchObjectException, MetaException { openTransaction(); Partition part = convertToPart(getMPartition(dbName, tableName, part_vals)); commitTransaction(); if(part == null) { throw new NoSuchObjectException("partition values=" + part_vals.toString()); } part.setValues(part_vals); return part; } private MPartition getMPartition(String dbName, String tableName, List<String> part_vals) throws MetaException { MPartition mpart = null; boolean commited = false; try { openTransaction(); dbName = HiveStringUtils.normalizeIdentifier(dbName); tableName = HiveStringUtils.normalizeIdentifier(tableName); MTable mtbl = getMTable(dbName, tableName); if (mtbl == null) { commited = commitTransaction(); return null; } // Change the query to use part_vals instead of the name which is // redundant TODO: callers of this often get part_vals out of name for no reason... String name = Warehouse.makePartName(convertToFieldSchemas(mtbl .getPartitionKeys()), part_vals); Query query = pm.newQuery(MPartition.class, "table.tableName == t1 && table.database.name == t2 && partitionName == t3"); query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3"); query.setUnique(true); mpart = (MPartition) query.execute(tableName, dbName, name); pm.retrieve(mpart); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return mpart; } /** * Convert a Partition object into an MPartition, which is an object backed by the db * If the Partition's set of columns is the same as the parent table's AND useTableCD * is true, then this partition's storage descriptor's column descriptor will point * to the same one as the table's storage descriptor. * @param part the partition to convert * @param useTableCD whether to try to use the parent table's column descriptor. * @return the model partition object * @throws InvalidObjectException * @throws MetaException */ private MPartition convertToMPart(Partition part, boolean useTableCD) throws InvalidObjectException, MetaException { if (part == null) { return null; } MTable mt = getMTable(part.getDbName(), part.getTableName()); if (mt == null) { throw new InvalidObjectException( "Partition doesn't have a valid table or database name"); } // If this partition's set of columns is the same as the parent table's, // use the parent table's, so we do not create a duplicate column descriptor, // thereby saving space MStorageDescriptor msd; if (useTableCD && mt.getSd() != null && mt.getSd().getCD() != null && mt.getSd().getCD().getCols() != null && part.getSd() != null && convertToFieldSchemas(mt.getSd().getCD().getCols()). equals(part.getSd().getCols())) { msd = convertToMStorageDescriptor(part.getSd(), mt.getSd().getCD()); } else { msd = convertToMStorageDescriptor(part.getSd()); } return new MPartition(Warehouse.makePartName(convertToFieldSchemas(mt .getPartitionKeys()), part.getValues()), mt, part.getValues(), part .getCreateTime(), part.getLastAccessTime(), msd, part.getParameters()); } private Partition convertToPart(MPartition mpart) throws MetaException { if (mpart == null) { return null; } return new Partition(convertList(mpart.getValues()), mpart.getTable().getDatabase() .getName(), mpart.getTable().getTableName(), mpart.getCreateTime(), mpart.getLastAccessTime(), convertToStorageDescriptor(mpart.getSd()), convertMap(mpart.getParameters())); } private Partition convertToPart(String dbName, String tblName, MPartition mpart) throws MetaException { if (mpart == null) { return null; } return new Partition(convertList(mpart.getValues()), dbName, tblName, mpart.getCreateTime(), mpart.getLastAccessTime(), convertToStorageDescriptor(mpart.getSd(), false), convertMap(mpart.getParameters())); } @Override public boolean dropPartition(String dbName, String tableName, List<String> part_vals) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { boolean success = false; try { openTransaction(); MPartition part = getMPartition(dbName, tableName, part_vals); dropPartitionCommon(part); success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } @Override public void dropPartitions(String dbName, String tblName, List<String> partNames) throws MetaException, NoSuchObjectException { if (partNames.isEmpty()) return; boolean success = false; openTransaction(); try { // Delete all things. dropPartitionGrantsNoTxn(dbName, tblName, partNames); dropPartitionAllColumnGrantsNoTxn(dbName, tblName, partNames); dropPartitionColumnStatisticsNoTxn(dbName, tblName, partNames); // CDs are reused; go thry partition SDs, detach all CDs from SDs, then remove unused CDs. for (MColumnDescriptor mcd : detachCdsFromSdsNoTxn(dbName, tblName, partNames)) { removeUnusedColumnDescriptor(mcd); } dropPartitionsNoTxn(dbName, tblName, partNames); if (!(success = commitTransaction())) { throw new MetaException("Failed to drop partitions"); // Should not happen? } } finally { if (!success) { rollbackTransaction(); } } } /** * Drop an MPartition and cascade deletes (e.g., delete partition privilege grants, * drop the storage descriptor cleanly, etc.) * @param part - the MPartition to drop * @return whether the transaction committed successfully * @throws InvalidInputException * @throws InvalidObjectException * @throws MetaException * @throws NoSuchObjectException */ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean success = false; try { openTransaction(); if (part != null) { List<MFieldSchema> schemas = part.getTable().getPartitionKeys(); List<String> colNames = new ArrayList<String>(); for (MFieldSchema col: schemas) { colNames.add(col.getName()); } String partName = FileUtils.makePartName(colNames, part.getValues()); List<MPartitionPrivilege> partGrants = listPartitionGrants( part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName)); if (partGrants != null && partGrants.size() > 0) { pm.deletePersistentAll(partGrants); } List<MPartitionColumnPrivilege> partColumnGrants = listPartitionAllColumnGrants( part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName)); if (partColumnGrants != null && partColumnGrants.size() > 0) { pm.deletePersistentAll(partColumnGrants); } String dbName = part.getTable().getDatabase().getName(); String tableName = part.getTable().getTableName(); // delete partition level column stats if it exists try { deletePartitionColumnStatistics(dbName, tableName, partName, part.getValues(), null); } catch (NoSuchObjectException e) { LOG.info("No column statistics records found to delete"); } preDropStorageDescriptor(part.getSd()); pm.deletePersistent(part); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } @Override public List<Partition> getPartitions( String dbName, String tableName, int maxParts) throws MetaException, NoSuchObjectException { return getPartitionsInternal(dbName, tableName, maxParts, true, true); } protected List<Partition> getPartitionsInternal( String dbName, String tblName, final int maxParts, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) { @Override protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException { Integer max = (maxParts < 0) ? null : maxParts; return directSql.getPartitions(dbName, tblName, max); } @Override protected List<Partition> getJdoResult( GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException { return convertToParts(listMPartitions(dbName, tblName, maxParts)); } }.run(false); } @Override public List<Partition> getPartitionsWithAuth(String dbName, String tblName, short max, String userName, List<String> groupNames) throws MetaException, NoSuchObjectException, InvalidObjectException { boolean success = false; try { openTransaction(); List<MPartition> mparts = listMPartitions(dbName, tblName, max); List<Partition> parts = new ArrayList<Partition>(mparts.size()); if (mparts != null && mparts.size()>0) { for (MPartition mpart : mparts) { MTable mtbl = mpart.getTable(); Partition part = convertToPart(mpart); parts.add(part); if ("TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl .getPartitionKeys()), part.getValues()); PrincipalPrivilegeSet partAuth = this.getPartitionPrivilegeSet(dbName, tblName, partName, userName, groupNames); part.setPrivileges(partAuth); } } } success = commitTransaction(); return parts; } finally { if (!success) { rollbackTransaction(); } } } @Override public Partition getPartitionWithAuth(String dbName, String tblName, List<String> partVals, String user_name, List<String> group_names) throws NoSuchObjectException, MetaException, InvalidObjectException { boolean success = false; try { openTransaction(); MPartition mpart = getMPartition(dbName, tblName, partVals); if (mpart == null) { commitTransaction(); throw new NoSuchObjectException("partition values=" + partVals.toString()); } Partition part = null; MTable mtbl = mpart.getTable(); part = convertToPart(mpart); if ("TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl .getPartitionKeys()), partVals); PrincipalPrivilegeSet partAuth = this.getPartitionPrivilegeSet(dbName, tblName, partName, user_name, group_names); part.setPrivileges(partAuth); } success = commitTransaction(); return part; } finally { if (!success) { rollbackTransaction(); } } } private List<Partition> convertToParts(List<MPartition> mparts) throws MetaException { return convertToParts(mparts, null); } private List<Partition> convertToParts(List<MPartition> src, List<Partition> dest) throws MetaException { if (src == null) { return dest; } if (dest == null) { dest = new ArrayList<Partition>(src.size()); } for (MPartition mp : src) { dest.add(convertToPart(mp)); Deadline.checkTimeout(); } return dest; } private List<Partition> convertToParts(String dbName, String tblName, List<MPartition> mparts) throws MetaException { List<Partition> parts = new ArrayList<Partition>(mparts.size()); for (MPartition mp : mparts) { parts.add(convertToPart(dbName, tblName, mp)); Deadline.checkTimeout(); } return parts; } // TODO:pc implement max @Override public List<String> listPartitionNames(String dbName, String tableName, short max) throws MetaException { List<String> pns = null; boolean success = false; try { openTransaction(); LOG.debug("Executing getPartitionNames"); pns = getPartitionNamesNoTxn(dbName, tableName, max); success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return pns; } private List<String> getPartitionNamesNoTxn(String dbName, String tableName, short max) { List<String> pns = new ArrayList<String>(); dbName = HiveStringUtils.normalizeIdentifier(dbName); tableName = HiveStringUtils.normalizeIdentifier(tableName); Query q = pm.newQuery( "select partitionName from org.apache.hadoop.hive.metastore.model.MPartition " + "where table.database.name == t1 && table.tableName == t2 " + "order by partitionName asc"); q.declareParameters("java.lang.String t1, java.lang.String t2"); q.setResult("partitionName"); if(max > 0) { q.setRange(0, max); } Collection names = (Collection) q.execute(dbName, tableName); for (Iterator i = names.iterator(); i.hasNext();) { pns.add((String) i.next()); } return pns; } /** * Retrieves a Collection of partition-related results from the database that match * the partial specification given for a specific table. * @param dbName the name of the database * @param tableName the name of the table * @param part_vals the partial specification values * @param max_parts the maximum number of partitions to return * @param resultsCol the metadata column of the data to return, e.g. partitionName, etc. * if resultsCol is empty or null, a collection of MPartition objects is returned * @throws NoSuchObjectException * @results A Collection of partition-related items from the db that match the partial spec * for a table. The type of each item in the collection corresponds to the column * you want results for. E.g., if resultsCol is partitionName, the Collection * has types of String, and if resultsCol is null, the types are MPartition. */ private Collection getPartitionPsQueryResults(String dbName, String tableName, List<String> part_vals, short max_parts, String resultsCol) throws MetaException, NoSuchObjectException { dbName = HiveStringUtils.normalizeIdentifier(dbName); tableName = HiveStringUtils.normalizeIdentifier(tableName); Table table = getTable(dbName, tableName); if (table == null) { throw new NoSuchObjectException(dbName + "." + tableName + " table not found"); } List<FieldSchema> partCols = table.getPartitionKeys(); int numPartKeys = partCols.size(); if (part_vals.size() > numPartKeys) { throw new MetaException("Incorrect number of partition values"); } partCols = partCols.subList(0, part_vals.size()); //Construct a pattern of the form: partKey=partVal/partKey2=partVal2/... // where partVal is either the escaped partition value given as input, // or a regex of the form ".*" //This works because the "=" and "/" separating key names and partition key/values // are not escaped. String partNameMatcher = Warehouse.makePartName(partCols, part_vals, ".*"); //add ".*" to the regex to match anything else afterwards the partial spec. if (part_vals.size() < numPartKeys) { partNameMatcher += ".*"; } Query q = pm.newQuery(MPartition.class); StringBuilder queryFilter = new StringBuilder("table.database.name == dbName"); queryFilter.append(" && table.tableName == tableName"); queryFilter.append(" && partitionName.matches(partialRegex)"); q.setFilter(queryFilter.toString()); q.declareParameters("java.lang.String dbName, " + "java.lang.String tableName, java.lang.String partialRegex"); if( max_parts >= 0 ) { //User specified a row limit, set it on the Query q.setRange(0, max_parts); } if (resultsCol != null && !resultsCol.isEmpty()) { q.setResult(resultsCol); } return (Collection) q.execute(dbName, tableName, partNameMatcher); } @Override public List<Partition> listPartitionsPsWithAuth(String db_name, String tbl_name, List<String> part_vals, short max_parts, String userName, List<String> groupNames) throws MetaException, InvalidObjectException, NoSuchObjectException { List<Partition> partitions = new ArrayList<Partition>(); boolean success = false; try { openTransaction(); LOG.debug("executing listPartitionNamesPsWithAuth"); Collection parts = getPartitionPsQueryResults(db_name, tbl_name, part_vals, max_parts, null); MTable mtbl = getMTable(db_name, tbl_name); for (Object o : parts) { Partition part = convertToPart((MPartition) o); //set auth privileges if (null != userName && null != groupNames && "TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl .getPartitionKeys()), part.getValues()); PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name, tbl_name, partName, userName, groupNames); part.setPrivileges(partAuth); } partitions.add(part); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return partitions; } @Override public List<String> listPartitionNamesPs(String dbName, String tableName, List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException { List<String> partitionNames = new ArrayList<String>(); boolean success = false; try { openTransaction(); LOG.debug("Executing listPartitionNamesPs"); Collection names = getPartitionPsQueryResults(dbName, tableName, part_vals, max_parts, "partitionName"); for (Object o : names) { partitionNames.add((String) o); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return partitionNames; } // TODO:pc implement max private List<MPartition> listMPartitions(String dbName, String tableName, int max) { boolean success = false; List<MPartition> mparts = null; try { openTransaction(); LOG.debug("Executing listMPartitions"); dbName = HiveStringUtils.normalizeIdentifier(dbName); tableName = HiveStringUtils.normalizeIdentifier(tableName); Query query = pm.newQuery(MPartition.class, "table.tableName == t1 && table.database.name == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); query.setOrdering("partitionName ascending"); if(max > 0) { query.setRange(0, max); } mparts = (List<MPartition>) query.execute(tableName, dbName); LOG.debug("Done executing query for listMPartitions"); pm.retrieveAll(mparts); success = commitTransaction(); LOG.debug("Done retrieving all objects for listMPartitions " + mparts); } finally { if (!success) { rollbackTransaction(); } } return mparts; } @Override public List<Partition> getPartitionsByNames(String dbName, String tblName, List<String> partNames) throws MetaException, NoSuchObjectException { return getPartitionsByNamesInternal(dbName, tblName, partNames, true, true); } protected List<Partition> getPartitionsByNamesInternal(String dbName, String tblName, final List<String> partNames, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) { @Override protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException { return directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames); } @Override protected List<Partition> getJdoResult( GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException { return getPartitionsViaOrmFilter(dbName, tblName, partNames); } }.run(false); } @Override public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr, String defaultPartitionName, short maxParts, List<Partition> result) throws TException { return getPartitionsByExprInternal( dbName, tblName, expr, defaultPartitionName, maxParts, result, true, true); } protected boolean getPartitionsByExprInternal(String dbName, String tblName, final byte[] expr, final String defaultPartitionName, final short maxParts, List<Partition> result, boolean allowSql, boolean allowJdo) throws TException { assert result != null; // We will try pushdown first, so make the filter. This will also validate the expression, // if serialization fails we will throw incompatible metastore error to the client. String filter = null; try { filter = expressionProxy.convertExprToFilter(expr); } catch (MetaException ex) { throw new IMetaStoreClient.IncompatibleMetastoreException(ex.getMessage()); } // Make a tree out of the filter. // TODO: this is all pretty ugly. The only reason we need all these transformations // is to maintain support for simple filters for HCat users that query metastore. // If forcing everyone to use thick client is out of the question, maybe we could // parse the filter into standard hive expressions and not all this separate tree // Filter.g stuff. That way this method and ...ByFilter would just be merged. final ExpressionTree exprTree = makeExpressionTree(filter); final AtomicBoolean hasUnknownPartitions = new AtomicBoolean(false); result.addAll(new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) { @Override protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException { // If we have some sort of expression tree, try SQL filter pushdown. List<Partition> result = null; if (exprTree != null) { result = directSql.getPartitionsViaSqlFilter(ctx.getTable(), exprTree, null); } if (result == null) { // We couldn't do SQL filter pushdown. Get names via normal means. List<String> partNames = new LinkedList<String>(); hasUnknownPartitions.set(getPartitionNamesPrunedByExprNoTxn( ctx.getTable(), expr, defaultPartitionName, maxParts, partNames)); result = directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames); } return result; } @Override protected List<Partition> getJdoResult( GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException { // If we have some sort of expression tree, try JDOQL filter pushdown. List<Partition> result = null; if (exprTree != null) { result = getPartitionsViaOrmFilter(ctx.getTable(), exprTree, maxParts, false); } if (result == null) { // We couldn't do JDOQL filter pushdown. Get names via normal means. List<String> partNames = new ArrayList<String>(); hasUnknownPartitions.set(getPartitionNamesPrunedByExprNoTxn( ctx.getTable(), expr, defaultPartitionName, maxParts, partNames)); result = getPartitionsViaOrmFilter(dbName, tblName, partNames); } return result; } }.run(true)); return hasUnknownPartitions.get(); } private class LikeChecker extends ExpressionTree.TreeVisitor { private boolean hasLike; public boolean hasLike() { return hasLike; } @Override protected boolean shouldStop() { return hasLike; } @Override protected void visit(LeafNode node) throws MetaException { hasLike = hasLike || (node.operator == Operator.LIKE); } } /** * Makes expression tree out of expr. * @param filter Filter. * @return Expression tree. Null if there was an error. */ private ExpressionTree makeExpressionTree(String filter) throws MetaException { // TODO: ExprNodeDesc is an expression tree, we could just use that and be rid of Filter.g. if (filter == null || filter.isEmpty()) { return ExpressionTree.EMPTY_TREE; } LOG.debug("Filter specified is " + filter); ExpressionTree tree = null; try { tree = getFilterParser(filter).tree; } catch (MetaException ex) { LOG.info("Unable to make the expression tree from expression string [" + filter + "]" + ex.getMessage()); // Don't log the stack, this is normal. } if (tree == null) { return null; } // We suspect that LIKE pushdown into JDO is invalid; see HIVE-5134. Check for like here. LikeChecker lc = new LikeChecker(); tree.accept(lc); return lc.hasLike() ? null : tree; } /** * Gets the partition names from a table, pruned using an expression. * @param table Table. * @param expr Expression. * @param defaultPartName Default partition name from job config, if any. * @param maxParts Maximum number of partition names to return. * @param result The resulting names. * @return Whether the result contains any unknown partitions. */ private boolean getPartitionNamesPrunedByExprNoTxn(Table table, byte[] expr, String defaultPartName, short maxParts, List<String> result) throws MetaException { result.addAll(getPartitionNamesNoTxn( table.getDbName(), table.getTableName(), maxParts)); List<String> columnNames = new ArrayList<String>(); List<PrimitiveTypeInfo> typeInfos = new ArrayList<PrimitiveTypeInfo>(); for (FieldSchema fs : table.getPartitionKeys()) { columnNames.add(fs.getName()); typeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(fs.getType())); } if (defaultPartName == null || defaultPartName.isEmpty()) { defaultPartName = HiveConf.getVar(getConf(), HiveConf.ConfVars.DEFAULTPARTITIONNAME); } return expressionProxy.filterPartitionsByExpr( columnNames, typeInfos, expr, defaultPartName, result); } /** * Gets partition names from the table via ORM (JDOQL) filter pushdown. * @param table The table. * @param tree The expression tree from which JDOQL filter will be made. * @param maxParts Maximum number of partitions to return. * @param isValidatedFilter Whether the filter was pre-validated for JDOQL pushdown by a client * (old hive client or non-hive one); if it was and we fail to create a filter, we will throw. * @return Resulting partitions. Can be null if isValidatedFilter is false, and * there was error deriving the JDO filter. */ private List<Partition> getPartitionsViaOrmFilter(Table table, ExpressionTree tree, short maxParts, boolean isValidatedFilter) throws MetaException { Map<String, Object> params = new HashMap<String, Object>(); String jdoFilter = makeQueryFilterString( table.getDbName(), table, tree, params, isValidatedFilter); if (jdoFilter == null) { assert !isValidatedFilter; return null; } Query query = pm.newQuery(MPartition.class, jdoFilter); if (maxParts >= 0) { // User specified a row limit, set it on the Query query.setRange(0, maxParts); } String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setOrdering("partitionName ascending"); @SuppressWarnings("unchecked") List<MPartition> mparts = (List<MPartition>) query.executeWithMap(params); LOG.debug("Done executing query for getPartitionsViaOrmFilter"); pm.retrieveAll(mparts); // TODO: why is this inconsistent with what we get by names? LOG.debug("Done retrieving all objects for getPartitionsViaOrmFilter"); List<Partition> results = convertToParts(mparts); query.closeAll(); return results; } private static class Out<T> { public T val; } /** * Gets partition names from the table via ORM (JDOQL) name filter. * @param dbName Database name. * @param tblName Table name. * @param partNames Partition names to get the objects for. * @return Resulting partitions. */ private List<Partition> getPartitionsViaOrmFilter( String dbName, String tblName, List<String> partNames) throws MetaException { if (partNames.isEmpty()) { return new ArrayList<Partition>(); } Out<Query> query = new Out<Query>(); List<MPartition> mparts = null; try { mparts = getMPartitionsViaOrmFilter(dbName, tblName, partNames, query); return convertToParts(dbName, tblName, mparts); } finally { if (query.val != null) { query.val.closeAll(); } } } private void dropPartitionsNoTxn(String dbName, String tblName, List<String> partNames) { ObjectPair<Query, Map<String, String>> queryWithParams = getPartQueryWithParams(dbName, tblName, partNames); Query query = queryWithParams.getFirst(); query.setClass(MPartition.class); long deleted = query.deletePersistentAll(queryWithParams.getSecond()); LOG.debug("Deleted " + deleted + " partition from store"); query.closeAll(); } /** * Detaches column descriptors from storage descriptors; returns the set of unique CDs * thus detached. This is done before dropping partitions because CDs are reused between * SDs; so, we remove the links to delete SDs and then check the returned CDs to see if * they are referenced by other SDs. */ private HashSet<MColumnDescriptor> detachCdsFromSdsNoTxn( String dbName, String tblName, List<String> partNames) { ObjectPair<Query, Map<String, String>> queryWithParams = getPartQueryWithParams(dbName, tblName, partNames); Query query = queryWithParams.getFirst(); query.setClass(MPartition.class); query.setResult("sd"); @SuppressWarnings("unchecked") List<MStorageDescriptor> sds = (List<MStorageDescriptor>)query.executeWithMap( queryWithParams.getSecond()); HashSet<MColumnDescriptor> candidateCds = new HashSet<MColumnDescriptor>(); for (MStorageDescriptor sd : sds) { if (sd != null && sd.getCD() != null) { candidateCds.add(sd.getCD()); sd.setCD(null); } } return candidateCds; } private List<MPartition> getMPartitionsViaOrmFilter(String dbName, String tblName, List<String> partNames, Out<Query> out) { ObjectPair<Query, Map<String, String>> queryWithParams = getPartQueryWithParams(dbName, tblName, partNames); Query query = out.val = queryWithParams.getFirst(); query.setResultClass(MPartition.class); query.setClass(MPartition.class); query.setOrdering("partitionName ascending"); @SuppressWarnings("unchecked") List<MPartition> result = (List<MPartition>)query.executeWithMap(queryWithParams.getSecond()); return result; } private ObjectPair<Query, Map<String, String>> getPartQueryWithParams( String dbName, String tblName, List<String> partNames) { StringBuilder sb = new StringBuilder( "table.tableName == t1 && table.database.name == t2 && ("); int n = 0; Map<String, String> params = new HashMap<String, String>(); for (Iterator<String> itr = partNames.iterator(); itr.hasNext();) { String pn = "p" + n; n++; String part = itr.next(); params.put(pn, part); sb.append("partitionName == ").append(pn); sb.append(" || "); } sb.setLength(sb.length() - 4); // remove the last " || " sb.append(')'); Query query = pm.newQuery(); query.setFilter(sb.toString()); LOG.debug(" JDOQL filter is " + sb.toString()); params.put("t1", HiveStringUtils.normalizeIdentifier(tblName)); params.put("t2", HiveStringUtils.normalizeIdentifier(dbName)); query.declareParameters(makeParameterDeclarationString(params)); return new ObjectPair<Query, Map<String,String>>(query, params); } @Override public List<Partition> getPartitionsByFilter(String dbName, String tblName, String filter, short maxParts) throws MetaException, NoSuchObjectException { return getPartitionsByFilterInternal(dbName, tblName, filter, maxParts, true, true); } /** Helper class for getting stuff w/transaction, direct SQL, perf logging, etc. */ private abstract class GetHelper<T> { private final boolean isInTxn, doTrace, allowJdo; private boolean doUseDirectSql; private long start; private Table table; protected final String dbName, tblName; private boolean success = false; protected T results = null; public GetHelper(String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException { assert allowSql || allowJdo; this.allowJdo = allowJdo; this.dbName = HiveStringUtils.normalizeIdentifier(dbName); if (tblName != null){ this.tblName = HiveStringUtils.normalizeIdentifier(tblName); } else { // tblName can be null in cases of Helper being used at a higher // abstraction level, such as with datbases this.tblName = null; this.table = null; } this.doTrace = LOG.isDebugEnabled(); this.isInTxn = isActiveTransaction(); // SQL usage inside a larger transaction (e.g. droptable) may not be desirable because // some databases (e.g. Postgres) abort the entire transaction when any query fails, so // the fallback from failed SQL to JDO is not possible. boolean isConfigEnabled = HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL) && (HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL_DDL) || !isInTxn); if (!allowJdo && isConfigEnabled && !directSql.isCompatibleDatastore()) { throw new MetaException("SQL is not operational"); // test path; SQL is enabled and broken. } this.doUseDirectSql = allowSql && isConfigEnabled && directSql.isCompatibleDatastore(); } protected abstract String describeResult(); protected abstract T getSqlResult(GetHelper<T> ctx) throws MetaException; protected abstract T getJdoResult( GetHelper<T> ctx) throws MetaException, NoSuchObjectException; public T run(boolean initTable) throws MetaException, NoSuchObjectException { try { start(initTable); if (doUseDirectSql) { try { setResult(getSqlResult(this)); } catch (Exception ex) { handleDirectSqlError(ex); } } if (!doUseDirectSql) { setResult(getJdoResult(this)); } return commit(); } catch (NoSuchObjectException ex) { throw ex; } catch (MetaException ex) { throw ex; } catch (Exception ex) { LOG.error("", ex); throw new MetaException(ex.getMessage()); } finally { close(); } } private void start(boolean initTable) throws MetaException, NoSuchObjectException { start = doTrace ? System.nanoTime() : 0; openTransaction(); if (initTable && (tblName != null)) { table = ensureGetTable(dbName, tblName); } } private boolean setResult(T results) { this.results = results; return this.results != null; } private void handleDirectSqlError(Exception ex) throws MetaException, NoSuchObjectException { LOG.warn("Direct SQL failed" + (allowJdo ? ", falling back to ORM" : ""), ex); if (!allowJdo) { if (ex instanceof MetaException) { throw (MetaException)ex; } throw new MetaException(ex.getMessage()); } if (!isInTxn) { rollbackTransaction(); start = doTrace ? System.nanoTime() : 0; openTransaction(); if (table != null) { table = ensureGetTable(dbName, tblName); } } else { start = doTrace ? System.nanoTime() : 0; } doUseDirectSql = false; } public void disableDirectSql() { this.doUseDirectSql = false; } private T commit() { success = commitTransaction(); if (doTrace) { LOG.debug(describeResult() + " retrieved using " + (doUseDirectSql ? "SQL" : "ORM") + " in " + ((System.nanoTime() - start) / 1000000.0) + "ms"); } return results; } private void close() { if (!success) { rollbackTransaction(); } } public Table getTable() { return table; } } private abstract class GetListHelper<T> extends GetHelper<List<T>> { public GetListHelper( String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException { super(dbName, tblName, allowSql, allowJdo); } @Override protected String describeResult() { return results.size() + " entries"; } } private abstract class GetDbHelper extends GetHelper<Database> { /** * GetHelper for returning db info using directSql/JDO. * Since this is a db-level call, tblName is ignored, and null is passed irrespective of what is passed in. * @param dbName The Database Name * @param tblName Placeholder param to match signature, always ignored. * @param allowSql Whether or not we allow DirectSQL to perform this query. * @param allowJdo Whether or not we allow ORM to perform this query. * @throws MetaException */ public GetDbHelper( String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException { super(dbName,null,allowSql,allowJdo); } @Override protected String describeResult() { return "db details for db " + dbName; } } private abstract class GetStatHelper extends GetHelper<ColumnStatistics> { public GetStatHelper( String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException { super(dbName, tblName, allowSql, allowJdo); } @Override protected String describeResult() { return "statistics for " + (results == null ? 0 : results.getStatsObjSize()) + " columns"; } } protected List<Partition> getPartitionsByFilterInternal(String dbName, String tblName, String filter, final short maxParts, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { final ExpressionTree tree = (filter != null && !filter.isEmpty()) ? getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE; return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) { @Override protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException { List<Partition> parts = directSql.getPartitionsViaSqlFilter( ctx.getTable(), tree, (maxParts < 0) ? null : (int)maxParts); if (parts == null) { // Cannot push down SQL filter. The message has been logged internally. // This is not an error so don't roll back, just go to JDO. ctx.disableDirectSql(); } return parts; } @Override protected List<Partition> getJdoResult( GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException { return getPartitionsViaOrmFilter(ctx.getTable(), tree, maxParts, true); } }.run(true); } /** * Gets the table object for a given table, throws if anything goes wrong. * @param dbName Database name. * @param tblName Table name. * @return Table object. */ private MTable ensureGetMTable( String dbName, String tblName) throws NoSuchObjectException, MetaException { MTable mtable = getMTable(dbName, tblName); if (mtable == null) { throw new NoSuchObjectException("Specified database/table does not exist : " + dbName + "." + tblName); } return mtable; } private Table ensureGetTable( String dbName, String tblName) throws NoSuchObjectException, MetaException { return convertToTable(ensureGetMTable(dbName, tblName)); } private FilterParser getFilterParser(String filter) throws MetaException { FilterLexer lexer = new FilterLexer(new ANTLRNoCaseStringStream(filter)); CommonTokenStream tokens = new CommonTokenStream(lexer); FilterParser parser = new FilterParser(tokens); try { parser.filter(); } catch(RecognitionException re) { throw new MetaException("Error parsing partition filter; lexer error: " + lexer.errorMsg + "; exception " + re); } if (lexer.errorMsg != null) { throw new MetaException("Error parsing partition filter : " + lexer.errorMsg); } return parser; } /** * Makes a JDO query filter string. * Makes a JDO query filter string for tables or partitions. * @param dbName Database name. * @param mtable Table. If null, the query returned is over tables in a database. * If not null, the query returned is over partitions in a table. * @param filter The filter from which JDOQL filter will be made. * @param params Parameters for the filter. Some parameters may be added here. * @return Resulting filter. */ private String makeQueryFilterString(String dbName, MTable mtable, String filter, Map<String, Object> params) throws MetaException { ExpressionTree tree = (filter != null && !filter.isEmpty()) ? getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE; return makeQueryFilterString(dbName, convertToTable(mtable), tree, params, true); } /** * Makes a JDO query filter string for tables or partitions. * @param dbName Database name. * @param table Table. If null, the query returned is over tables in a database. * If not null, the query returned is over partitions in a table. * @param tree The expression tree from which JDOQL filter will be made. * @param params Parameters for the filter. Some parameters may be added here. * @param isValidatedFilter Whether the filter was pre-validated for JDOQL pushdown * by the client; if it was and we fail to create a filter, we will throw. * @return Resulting filter. Can be null if isValidatedFilter is false, and there was error. */ private String makeQueryFilterString(String dbName, Table table, ExpressionTree tree, Map<String, Object> params, boolean isValidatedFilter) throws MetaException { assert tree != null; FilterBuilder queryBuilder = new FilterBuilder(isValidatedFilter); if (table != null) { queryBuilder.append("table.tableName == t1 && table.database.name == t2"); params.put("t1", table.getTableName()); params.put("t2", table.getDbName()); } else { queryBuilder.append("database.name == dbName"); params.put("dbName", dbName); } tree.generateJDOFilterFragment(getConf(), table, params, queryBuilder); if (queryBuilder.hasError()) { assert !isValidatedFilter; LOG.info("JDO filter pushdown cannot be used: " + queryBuilder.getErrorMessage()); return null; } String jdoFilter = queryBuilder.getFilter(); LOG.debug("jdoFilter = " + jdoFilter); return jdoFilter; } private String makeParameterDeclarationString(Map<String, String> params) { //Create the parameter declaration string StringBuilder paramDecl = new StringBuilder(); for (String key : params.keySet()) { paramDecl.append(", java.lang.String " + key); } return paramDecl.toString(); } private String makeParameterDeclarationStringObj(Map<String, Object> params) { //Create the parameter declaration string StringBuilder paramDecl = new StringBuilder(); for (Entry<String, Object> entry : params.entrySet()) { paramDecl.append(", "); paramDecl.append(entry.getValue().getClass().getName()); paramDecl.append(" "); paramDecl.append(entry.getKey()); } return paramDecl.toString(); } @Override public List<String> listTableNamesByFilter(String dbName, String filter, short maxTables) throws MetaException { boolean success = false; List<String> tableNames = new ArrayList<String>(); try { openTransaction(); LOG.debug("Executing listTableNamesByFilter"); dbName = HiveStringUtils.normalizeIdentifier(dbName); Map<String, Object> params = new HashMap<String, Object>(); String queryFilterString = makeQueryFilterString(dbName, null, filter, params); Query query = pm.newQuery(MTable.class); query.declareImports("import java.lang.String"); query.setResult("tableName"); query.setResultClass(java.lang.String.class); if (maxTables >= 0) { query.setRange(0, maxTables); } LOG.debug("filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); for (Entry<String, Object> entry : params.entrySet()) { LOG.debug("key: " + entry.getKey() + " value: " + entry.getValue() + " class: " + entry.getValue().getClass().getName()); } String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setFilter(queryFilterString); Collection names = (Collection) query.executeWithMap(params); //have to emulate "distinct", otherwise tables with the same name may be returned Set<String> tableNamesSet = new HashSet<String>(); for (Iterator i = names.iterator(); i.hasNext();) { tableNamesSet.add((String) i.next()); } tableNames = new ArrayList<String>(tableNamesSet); LOG.debug("Done executing query for listTableNamesByFilter"); success = commitTransaction(); LOG.debug("Done retrieving all objects for listTableNamesByFilter"); } finally { if (!success) { rollbackTransaction(); } } return tableNames; } @Override public List<String> listPartitionNamesByFilter(String dbName, String tableName, String filter, short maxParts) throws MetaException { boolean success = false; List<String> partNames = new ArrayList<String>(); try { openTransaction(); LOG.debug("Executing listMPartitionNamesByFilter"); dbName = HiveStringUtils.normalizeIdentifier(dbName); tableName = HiveStringUtils.normalizeIdentifier(tableName); MTable mtable = getMTable(dbName, tableName); if( mtable == null ) { // To be consistent with the behavior of listPartitionNames, if the // table or db does not exist, we return an empty list return partNames; } Map<String, Object> params = new HashMap<String, Object>(); String queryFilterString = makeQueryFilterString(dbName, mtable, filter, params); Query query = pm.newQuery( "select partitionName from org.apache.hadoop.hive.metastore.model.MPartition " + "where " + queryFilterString); if( maxParts >= 0 ) { //User specified a row limit, set it on the Query query.setRange(0, maxParts); } LOG.debug("Filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); LOG.debug("Parms is " + params); String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setOrdering("partitionName ascending"); query.setResult("partitionName"); Collection names = (Collection) query.executeWithMap(params); partNames = new ArrayList<String>(); for (Iterator i = names.iterator(); i.hasNext();) { partNames.add((String) i.next()); } LOG.debug("Done executing query for listMPartitionNamesByFilter"); success = commitTransaction(); LOG.debug("Done retrieving all objects for listMPartitionNamesByFilter"); } finally { if (!success) { rollbackTransaction(); } } return partNames; } @Override public void alterTable(String dbname, String name, Table newTable) throws InvalidObjectException, MetaException { boolean success = false; try { openTransaction(); name = HiveStringUtils.normalizeIdentifier(name); dbname = HiveStringUtils.normalizeIdentifier(dbname); MTable newt = convertToMTable(newTable); if (newt == null) { throw new InvalidObjectException("new table is invalid"); } MTable oldt = getMTable(dbname, name); if (oldt == null) { throw new MetaException("table " + dbname + "." + name + " doesn't exist"); } // For now only alter name, owner, parameters, cols, bucketcols are allowed oldt.setDatabase(newt.getDatabase()); oldt.setTableName(HiveStringUtils.normalizeIdentifier(newt.getTableName())); oldt.setParameters(newt.getParameters()); oldt.setOwner(newt.getOwner()); // Fully copy over the contents of the new SD into the old SD, // so we don't create an extra SD in the metastore db that has no references. copyMSD(newt.getSd(), oldt.getSd()); oldt.setRetention(newt.getRetention()); oldt.setPartitionKeys(newt.getPartitionKeys()); oldt.setTableType(newt.getTableType()); oldt.setLastAccessTime(newt.getLastAccessTime()); oldt.setViewOriginalText(newt.getViewOriginalText()); oldt.setViewExpandedText(newt.getViewExpandedText()); // commit the changes success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } } @Override public void alterIndex(String dbname, String baseTblName, String name, Index newIndex) throws InvalidObjectException, MetaException { boolean success = false; try { openTransaction(); name = HiveStringUtils.normalizeIdentifier(name); baseTblName = HiveStringUtils.normalizeIdentifier(baseTblName); dbname = HiveStringUtils.normalizeIdentifier(dbname); MIndex newi = convertToMIndex(newIndex); if (newi == null) { throw new InvalidObjectException("new index is invalid"); } MIndex oldi = getMIndex(dbname, baseTblName, name); if (oldi == null) { throw new MetaException("index " + name + " doesn't exist"); } // For now only alter parameters are allowed oldi.setParameters(newi.getParameters()); // commit the changes success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } } private void alterPartitionNoTxn(String dbname, String name, List<String> part_vals, Partition newPart) throws InvalidObjectException, MetaException { name = HiveStringUtils.normalizeIdentifier(name); dbname = HiveStringUtils.normalizeIdentifier(dbname); MPartition oldp = getMPartition(dbname, name, part_vals); MPartition newp = convertToMPart(newPart, false); if (oldp == null || newp == null) { throw new InvalidObjectException("partition does not exist."); } oldp.setValues(newp.getValues()); oldp.setPartitionName(newp.getPartitionName()); oldp.setParameters(newPart.getParameters()); if (!TableType.VIRTUAL_VIEW.name().equals(oldp.getTable().getTableType())) { copyMSD(newp.getSd(), oldp.getSd()); } if (newp.getCreateTime() != oldp.getCreateTime()) { oldp.setCreateTime(newp.getCreateTime()); } if (newp.getLastAccessTime() != oldp.getLastAccessTime()) { oldp.setLastAccessTime(newp.getLastAccessTime()); } } @Override public void alterPartition(String dbname, String name, List<String> part_vals, Partition newPart) throws InvalidObjectException, MetaException { boolean success = false; Exception e = null; try { openTransaction(); alterPartitionNoTxn(dbname, name, part_vals, newPart); // commit the changes success = commitTransaction(); } catch (Exception exception) { e = exception; } finally { if (!success) { rollbackTransaction(); MetaException metaException = new MetaException( "The transaction for alter partition did not commit successfully."); if (e != null) { metaException.initCause(e); } throw metaException; } } } @Override public void alterPartitions(String dbname, String name, List<List<String>> part_vals, List<Partition> newParts) throws InvalidObjectException, MetaException { boolean success = false; Exception e = null; try { openTransaction(); Iterator<List<String>> part_val_itr = part_vals.iterator(); for (Partition tmpPart: newParts) { List<String> tmpPartVals = part_val_itr.next(); alterPartitionNoTxn(dbname, name, tmpPartVals, tmpPart); } // commit the changes success = commitTransaction(); } catch (Exception exception) { e = exception; } finally { if (!success) { rollbackTransaction(); MetaException metaException = new MetaException( "The transaction for alter partition did not commit successfully."); if (e != null) { metaException.initCause(e); } throw metaException; } } } private void copyMSD(MStorageDescriptor newSd, MStorageDescriptor oldSd) { oldSd.setLocation(newSd.getLocation()); MColumnDescriptor oldCD = oldSd.getCD(); // If the columns of the old column descriptor != the columns of the new one, // then change the old storage descriptor's column descriptor. // Convert the MFieldSchema's to their thrift object counterparts, because we maintain // datastore identity (i.e., identity of the model objects are managed by JDO, // not the application). if (!(oldSd != null && oldSd.getCD() != null && oldSd.getCD().getCols() != null && newSd != null && newSd.getCD() != null && newSd.getCD().getCols() != null && convertToFieldSchemas(newSd.getCD().getCols()). equals(convertToFieldSchemas(oldSd.getCD().getCols())) )) { oldSd.setCD(newSd.getCD()); } //If oldCd does not have any more references, then we should delete it // from the backend db removeUnusedColumnDescriptor(oldCD); oldSd.setBucketCols(newSd.getBucketCols()); oldSd.setCompressed(newSd.isCompressed()); oldSd.setInputFormat(newSd.getInputFormat()); oldSd.setOutputFormat(newSd.getOutputFormat()); oldSd.setNumBuckets(newSd.getNumBuckets()); oldSd.getSerDeInfo().setName(newSd.getSerDeInfo().getName()); oldSd.getSerDeInfo().setSerializationLib( newSd.getSerDeInfo().getSerializationLib()); oldSd.getSerDeInfo().setParameters(newSd.getSerDeInfo().getParameters()); oldSd.setSkewedColNames(newSd.getSkewedColNames()); oldSd.setSkewedColValues(newSd.getSkewedColValues()); oldSd.setSkewedColValueLocationMaps(newSd.getSkewedColValueLocationMaps()); oldSd.setSortCols(newSd.getSortCols()); oldSd.setParameters(newSd.getParameters()); oldSd.setStoredAsSubDirectories(newSd.isStoredAsSubDirectories()); } /** * Checks if a column descriptor has any remaining references by storage descriptors * in the db. If it does not, then delete the CD. If it does, then do nothing. * @param oldCD the column descriptor to delete if it is no longer referenced anywhere */ private void removeUnusedColumnDescriptor(MColumnDescriptor oldCD) { if (oldCD == null) { return; } boolean success = false; try { openTransaction(); LOG.debug("execute removeUnusedColumnDescriptor"); List<MStorageDescriptor> referencedSDs = listStorageDescriptorsWithCD(oldCD, 1); //if no other SD references this CD, we can throw it out. if (referencedSDs != null && referencedSDs.isEmpty()) { pm.retrieve(oldCD); pm.deletePersistent(oldCD); } success = commitTransaction(); LOG.debug("successfully deleted a CD in removeUnusedColumnDescriptor"); } finally { if (!success) { rollbackTransaction(); } } } /** * Called right before an action that would drop a storage descriptor. * This function makes the SD's reference to a CD null, and then deletes the CD * if it no longer is referenced in the table. * @param msd the storage descriptor to drop */ private void preDropStorageDescriptor(MStorageDescriptor msd) { if (msd == null || msd.getCD() == null) { return; } MColumnDescriptor mcd = msd.getCD(); // Because there is a 1-N relationship between CDs and SDs, // we must set the SD's CD to null first before dropping the storage descriptor // to satisfy foreign key constraints. msd.setCD(null); removeUnusedColumnDescriptor(mcd); } /** * Get a list of storage descriptors that reference a particular Column Descriptor * @param oldCD the column descriptor to get storage descriptors for * @param maxSDs the maximum number of SDs to return * @return a list of storage descriptors */ private List<MStorageDescriptor> listStorageDescriptorsWithCD(MColumnDescriptor oldCD, long maxSDs) { boolean success = false; List<MStorageDescriptor> sds = null; try { openTransaction(); LOG.debug("Executing listStorageDescriptorsWithCD"); Query query = pm.newQuery(MStorageDescriptor.class, "this.cd == inCD"); query.declareParameters("MColumnDescriptor inCD"); if(maxSDs >= 0) { //User specified a row limit, set it on the Query query.setRange(0, maxSDs); } sds = (List<MStorageDescriptor>) query.execute(oldCD); LOG.debug("Done executing query for listStorageDescriptorsWithCD"); pm.retrieveAll(sds); success = commitTransaction(); LOG.debug("Done retrieving all objects for listStorageDescriptorsWithCD"); } finally { if (!success) { rollbackTransaction(); } } return sds; } @Override public boolean addIndex(Index index) throws InvalidObjectException, MetaException { boolean commited = false; try { openTransaction(); MIndex idx = convertToMIndex(index); pm.makePersistent(idx); commited = commitTransaction(); return true; } finally { if (!commited) { rollbackTransaction(); return false; } } } private MIndex convertToMIndex(Index index) throws InvalidObjectException, MetaException { StorageDescriptor sd = index.getSd(); if (sd == null) { throw new InvalidObjectException("Storage descriptor is not defined for index."); } MStorageDescriptor msd = this.convertToMStorageDescriptor(sd); MTable origTable = getMTable(index.getDbName(), index.getOrigTableName()); if (origTable == null) { throw new InvalidObjectException( "Original table does not exist for the given index."); } String[] qualified = MetaStoreUtils.getQualifiedName(index.getDbName(), index.getIndexTableName()); MTable indexTable = getMTable(qualified[0], qualified[1]); if (indexTable == null) { throw new InvalidObjectException( "Underlying index table does not exist for the given index."); } return new MIndex(HiveStringUtils.normalizeIdentifier(index.getIndexName()), origTable, index.getCreateTime(), index.getLastAccessTime(), index.getParameters(), indexTable, msd, index.getIndexHandlerClass(), index.isDeferredRebuild()); } @Override public boolean dropIndex(String dbName, String origTableName, String indexName) throws MetaException { boolean success = false; try { openTransaction(); MIndex index = getMIndex(dbName, origTableName, indexName); if (index != null) { pm.deletePersistent(index); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } private MIndex getMIndex(String dbName, String originalTblName, String indexName) throws MetaException { MIndex midx = null; boolean commited = false; try { openTransaction(); dbName = HiveStringUtils.normalizeIdentifier(dbName); originalTblName = HiveStringUtils.normalizeIdentifier(originalTblName); MTable mtbl = getMTable(dbName, originalTblName); if (mtbl == null) { commited = commitTransaction(); return null; } Query query = pm.newQuery(MIndex.class, "origTable.tableName == t1 && origTable.database.name == t2 && indexName == t3"); query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3"); query.setUnique(true); midx = (MIndex) query.execute(originalTblName, dbName, HiveStringUtils.normalizeIdentifier(indexName)); pm.retrieve(midx); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return midx; } @Override public Index getIndex(String dbName, String origTableName, String indexName) throws MetaException { openTransaction(); MIndex mIndex = this.getMIndex(dbName, origTableName, indexName); Index ret = convertToIndex(mIndex); commitTransaction(); return ret; } private Index convertToIndex(MIndex mIndex) throws MetaException { if (mIndex == null) { return null; } MTable origTable = mIndex.getOrigTable(); MTable indexTable = mIndex.getIndexTable(); return new Index( mIndex.getIndexName(), mIndex.getIndexHandlerClass(), origTable.getDatabase().getName(), origTable.getTableName(), mIndex.getCreateTime(), mIndex.getLastAccessTime(), indexTable.getTableName(), convertToStorageDescriptor(mIndex.getSd()), mIndex.getParameters(), mIndex.getDeferredRebuild()); } @Override public List<Index> getIndexes(String dbName, String origTableName, int max) throws MetaException { boolean success = false; try { openTransaction(); List<MIndex> mIndexList = listMIndexes(dbName, origTableName, max); List<Index> indexes = new ArrayList<Index>(mIndexList.size()); for (MIndex midx : mIndexList) { indexes.add(this.convertToIndex(midx)); } success = commitTransaction(); return indexes; } finally { if (!success) { rollbackTransaction(); } } } private List<MIndex> listMIndexes(String dbName, String origTableName, int max) { boolean success = false; List<MIndex> mindexes = null; try { openTransaction(); LOG.debug("Executing listMIndexes"); dbName = HiveStringUtils.normalizeIdentifier(dbName); origTableName = HiveStringUtils.normalizeIdentifier(origTableName); Query query = pm.newQuery(MIndex.class, "origTable.tableName == t1 && origTable.database.name == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mindexes = (List<MIndex>) query.execute(origTableName, dbName); LOG.debug("Done executing query for listMIndexes"); pm.retrieveAll(mindexes); success = commitTransaction(); LOG.debug("Done retrieving all objects for listMIndexes"); } finally { if (!success) { rollbackTransaction(); } } return mindexes; } @Override public List<String> listIndexNames(String dbName, String origTableName, short max) throws MetaException { List<String> pns = new ArrayList<String>(); boolean success = false; try { openTransaction(); LOG.debug("Executing listIndexNames"); dbName = HiveStringUtils.normalizeIdentifier(dbName); origTableName = HiveStringUtils.normalizeIdentifier(origTableName); Query q = pm.newQuery( "select indexName from org.apache.hadoop.hive.metastore.model.MIndex " + "where origTable.database.name == t1 && origTable.tableName == t2 " + "order by indexName asc"); q.declareParameters("java.lang.String t1, java.lang.String t2"); q.setResult("indexName"); Collection names = (Collection) q.execute(dbName, origTableName); for (Iterator i = names.iterator(); i.hasNext();) { pns.add((String) i.next()); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return pns; } @Override public boolean addRole(String roleName, String ownerName) throws InvalidObjectException, MetaException, NoSuchObjectException { boolean success = false; boolean commited = false; try { openTransaction(); MRole nameCheck = this.getMRole(roleName); if (nameCheck != null) { throw new InvalidObjectException("Role " + roleName + " already exists."); } int now = (int)(System.currentTimeMillis()/1000); MRole mRole = new MRole(roleName, now, ownerName); pm.makePersistent(mRole); commited = commitTransaction(); success = true; } finally { if (!commited) { rollbackTransaction(); } } return success; } @Override public boolean grantRole(Role role, String userName, PrincipalType principalType, String grantor, PrincipalType grantorType, boolean grantOption) throws MetaException, NoSuchObjectException,InvalidObjectException { boolean success = false; boolean commited = false; try { openTransaction(); MRoleMap roleMap = null; try { roleMap = this.getMSecurityUserRoleMap(userName, principalType, role .getRoleName()); } catch (Exception e) { } if (roleMap != null) { throw new InvalidObjectException("Principal " + userName + " already has the role " + role.getRoleName()); } if (principalType == PrincipalType.ROLE) { validateRole(userName); } MRole mRole = getMRole(role.getRoleName()); long now = System.currentTimeMillis()/1000; MRoleMap roleMember = new MRoleMap(userName, principalType.toString(), mRole, (int) now, grantor, grantorType.toString(), grantOption); pm.makePersistent(roleMember); commited = commitTransaction(); success = true; } finally { if (!commited) { rollbackTransaction(); } } return success; } /** * Verify that role with given name exists, if not throw exception * @param roleName * @throws NoSuchObjectException */ private void validateRole(String roleName) throws NoSuchObjectException { // if grantee is a role, check if it exists MRole granteeRole = getMRole(roleName); if (granteeRole == null) { throw new NoSuchObjectException("Role " + roleName + " does not exist"); } } @Override public boolean revokeRole(Role role, String userName, PrincipalType principalType, boolean grantOption) throws MetaException, NoSuchObjectException { boolean success = false; try { openTransaction(); MRoleMap roleMember = getMSecurityUserRoleMap(userName, principalType, role.getRoleName()); if (grantOption) { // Revoke with grant option - only remove the grant option but keep the role. if (roleMember.getGrantOption()) { roleMember.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with role " + role.getRoleName()); } } else { // No grant option in revoke, remove the whole role. pm.deletePersistent(roleMember); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } private MRoleMap getMSecurityUserRoleMap(String userName, PrincipalType principalType, String roleName) { MRoleMap mRoleMember = null; boolean commited = false; try { openTransaction(); Query query = pm.newQuery(MRoleMap.class, "principalName == t1 && principalType == t2 && role.roleName == t3"); query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3"); query.setUnique(true); mRoleMember = (MRoleMap) query.executeWithArray(userName, principalType.toString(), roleName); pm.retrieve(mRoleMember); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return mRoleMember; } @Override public boolean removeRole(String roleName) throws MetaException, NoSuchObjectException { boolean success = false; try { openTransaction(); MRole mRol = getMRole(roleName); pm.retrieve(mRol); if (mRol != null) { // first remove all the membership, the membership that this role has // been granted List<MRoleMap> roleMap = listRoleMembers(mRol.getRoleName()); if (roleMap.size() > 0) { pm.deletePersistentAll(roleMap); } List<MRoleMap> roleMember = listMSecurityPrincipalMembershipRole(mRol .getRoleName(), PrincipalType.ROLE); if (roleMember.size() > 0) { pm.deletePersistentAll(roleMember); } // then remove all the grants List<MGlobalPrivilege> userGrants = listPrincipalGlobalGrants( mRol.getRoleName(), PrincipalType.ROLE); if (userGrants.size() > 0) { pm.deletePersistentAll(userGrants); } List<MDBPrivilege> dbGrants = listPrincipalAllDBGrant(mRol .getRoleName(), PrincipalType.ROLE); if (dbGrants.size() > 0) { pm.deletePersistentAll(dbGrants); } List<MTablePrivilege> tabPartGrants = listPrincipalAllTableGrants( mRol.getRoleName(), PrincipalType.ROLE); if (tabPartGrants.size() > 0) { pm.deletePersistentAll(tabPartGrants); } List<MPartitionPrivilege> partGrants = listPrincipalAllPartitionGrants( mRol.getRoleName(), PrincipalType.ROLE); if (partGrants.size() > 0) { pm.deletePersistentAll(partGrants); } List<MTableColumnPrivilege> tblColumnGrants = listPrincipalAllTableColumnGrants( mRol.getRoleName(), PrincipalType.ROLE); if (tblColumnGrants.size() > 0) { pm.deletePersistentAll(tblColumnGrants); } List<MPartitionColumnPrivilege> partColumnGrants = listPrincipalAllPartitionColumnGrants( mRol.getRoleName(), PrincipalType.ROLE); if (partColumnGrants.size() > 0) { pm.deletePersistentAll(partColumnGrants); } // finally remove the role pm.deletePersistent(mRol); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } return success; } /** * Get all the roles in the role hierarchy that this user and groupNames belongs to * @param userName * @param groupNames * @return */ private Set<String> listAllRolesInHierarchy(String userName, List<String> groupNames) { List<MRoleMap> ret = new ArrayList<MRoleMap>(); if(userName != null) { ret.addAll(listRoles(userName, PrincipalType.USER)); } if (groupNames != null) { for (String groupName: groupNames) { ret.addAll(listRoles(groupName, PrincipalType.GROUP)); } } // get names of these roles and its ancestors Set<String> roleNames = new HashSet<String>(); getAllRoleAncestors(roleNames, ret); return roleNames; } /** * Add role names of parentRoles and its parents to processedRoles * * @param processedRoleNames * @param parentRoles */ private void getAllRoleAncestors(Set<String> processedRoleNames, List<MRoleMap> parentRoles) { for (MRoleMap parentRole : parentRoles) { String parentRoleName = parentRole.getRole().getRoleName(); if (!processedRoleNames.contains(parentRoleName)) { // unprocessed role: get its parents, add it to processed, and call this // function recursively List<MRoleMap> nextParentRoles = listRoles(parentRoleName, PrincipalType.ROLE); processedRoleNames.add(parentRoleName); getAllRoleAncestors(processedRoleNames, nextParentRoles); } } } @SuppressWarnings("unchecked") @Override public List<MRoleMap> listRoles(String principalName, PrincipalType principalType) { boolean success = false; List<MRoleMap> mRoleMember = null; try { openTransaction(); LOG.debug("Executing listRoles"); Query query = pm .newQuery( MRoleMap.class, "principalName == t1 && principalType == t2"); query .declareParameters("java.lang.String t1, java.lang.String t2"); query.setUnique(false); mRoleMember = (List<MRoleMap>) query.executeWithArray( principalName, principalType.toString()); LOG.debug("Done executing query for listMSecurityUserRoleMap"); pm.retrieveAll(mRoleMember); success = commitTransaction(); LOG.debug("Done retrieving all objects for listMSecurityUserRoleMap"); } finally { if (!success) { rollbackTransaction(); } } if (principalType == PrincipalType.USER) { // All users belong to public role implicitly, add that role if (mRoleMember == null) { mRoleMember = new ArrayList<MRoleMap>(); } else { mRoleMember = new ArrayList<MRoleMap>(mRoleMember); } MRole publicRole = new MRole(HiveMetaStore.PUBLIC, 0, HiveMetaStore.PUBLIC); mRoleMember.add(new MRoleMap(principalName, principalType.toString(), publicRole, 0, null, null, false)); } return mRoleMember; } @SuppressWarnings("unchecked") private List<MRoleMap> listMSecurityPrincipalMembershipRole(final String roleName, final PrincipalType principalType) { boolean success = false; List<MRoleMap> mRoleMemebership = null; try { openTransaction(); LOG.debug("Executing listMSecurityPrincipalMembershipRole"); Query query = pm.newQuery(MRoleMap.class, "principalName == t1 && principalType == t2"); query .declareParameters("java.lang.String t1, java.lang.String t2"); mRoleMemebership = (List<MRoleMap>) query.execute(roleName, principalType.toString()); LOG .debug("Done executing query for listMSecurityPrincipalMembershipRole"); pm.retrieveAll(mRoleMemebership); success = commitTransaction(); LOG .debug("Done retrieving all objects for listMSecurityPrincipalMembershipRole"); } finally { if (!success) { rollbackTransaction(); } } return mRoleMemebership; } @Override public Role getRole(String roleName) throws NoSuchObjectException { MRole mRole = this.getMRole(roleName); if (mRole == null) { throw new NoSuchObjectException(roleName + " role can not be found."); } Role ret = new Role(mRole.getRoleName(), mRole.getCreateTime(), mRole .getOwnerName()); return ret; } private MRole getMRole(String roleName) { MRole mrole = null; boolean commited = false; try { openTransaction(); Query query = pm.newQuery(MRole.class, "roleName == t1"); query.declareParameters("java.lang.String t1"); query.setUnique(true); mrole = (MRole) query.execute(roleName); pm.retrieve(mrole); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return mrole; } @Override public List<String> listRoleNames() { boolean success = false; try { openTransaction(); LOG.debug("Executing listAllRoleNames"); Query query = pm.newQuery("select roleName from org.apache.hadoop.hive.metastore.model.MRole"); query.setResult("roleName"); Collection names = (Collection) query.execute(); List<String> roleNames = new ArrayList<String>(); for (Iterator i = names.iterator(); i.hasNext();) { roleNames.add((String) i.next()); } success = commitTransaction(); return roleNames; } finally { if (!success) { rollbackTransaction(); } } } @Override public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, List<String> groupNames) throws InvalidObjectException, MetaException { boolean commited = false; PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); try { openTransaction(); if (userName != null) { List<MGlobalPrivilege> user = this.listPrincipalGlobalGrants(userName, PrincipalType.USER); if(user.size()>0) { Map<String, List<PrivilegeGrantInfo>> userPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(user.size()); for (int i = 0; i < user.size(); i++) { MGlobalPrivilege item = user.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item .getGrantorType()), item.getGrantOption())); } userPriv.put(userName, grantInfos); ret.setUserPrivileges(userPriv); } } if (groupNames != null && groupNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> groupPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for(String groupName: groupNames) { List<MGlobalPrivilege> group = this.listPrincipalGlobalGrants(groupName, PrincipalType.GROUP); if(group.size()>0) { List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(group.size()); for (int i = 0; i < group.size(); i++) { MGlobalPrivilege item = group.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item .getGrantorType()), item.getGrantOption())); } groupPriv.put(groupName, grantInfos); } } ret.setGroupPrivileges(groupPriv); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return ret; } public List<PrivilegeGrantInfo> getDBPrivilege(String dbName, String principalName, PrincipalType principalType) throws InvalidObjectException, MetaException { dbName = HiveStringUtils.normalizeIdentifier(dbName); if (principalName != null) { List<MDBPrivilege> userNameDbPriv = this.listPrincipalDBGrants( principalName, principalType, dbName); if (userNameDbPriv != null && userNameDbPriv.size() > 0) { List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>( userNameDbPriv.size()); for (int i = 0; i < userNameDbPriv.size(); i++) { MDBPrivilege item = userNameDbPriv.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item .getGrantorType()), item.getGrantOption())); } return grantInfos; } } return new ArrayList<PrivilegeGrantInfo>(0); } @Override public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { boolean commited = false; dbName = HiveStringUtils.normalizeIdentifier(dbName); PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); try { openTransaction(); if (userName != null) { Map<String, List<PrivilegeGrantInfo>> dbUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); dbUserPriv.put(userName, getDBPrivilege(dbName, userName, PrincipalType.USER)); ret.setUserPrivileges(dbUserPriv); } if (groupNames != null && groupNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> dbGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String groupName : groupNames) { dbGroupPriv.put(groupName, getDBPrivilege(dbName, groupName, PrincipalType.GROUP)); } ret.setGroupPrivileges(dbGroupPriv); } Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames); if (roleNames != null && roleNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> dbRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String roleName : roleNames) { dbRolePriv .put(roleName, getDBPrivilege(dbName, roleName, PrincipalType.ROLE)); } ret.setRolePrivileges(dbRolePriv); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return ret; } @Override public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, String tableName, String partition, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { boolean commited = false; PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); try { openTransaction(); if (userName != null) { Map<String, List<PrivilegeGrantInfo>> partUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); partUserPriv.put(userName, getPartitionPrivilege(dbName, tableName, partition, userName, PrincipalType.USER)); ret.setUserPrivileges(partUserPriv); } if (groupNames != null && groupNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> partGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String groupName : groupNames) { partGroupPriv.put(groupName, getPartitionPrivilege(dbName, tableName, partition, groupName, PrincipalType.GROUP)); } ret.setGroupPrivileges(partGroupPriv); } Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames); if (roleNames != null && roleNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> partRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String roleName : roleNames) { partRolePriv.put(roleName, getPartitionPrivilege(dbName, tableName, partition, roleName, PrincipalType.ROLE)); } ret.setRolePrivileges(partRolePriv); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return ret; } @Override public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, String tableName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { boolean commited = false; PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); try { openTransaction(); if (userName != null) { Map<String, List<PrivilegeGrantInfo>> tableUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); tableUserPriv.put(userName, getTablePrivilege(dbName, tableName, userName, PrincipalType.USER)); ret.setUserPrivileges(tableUserPriv); } if (groupNames != null && groupNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> tableGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String groupName : groupNames) { tableGroupPriv.put(groupName, getTablePrivilege(dbName, tableName, groupName, PrincipalType.GROUP)); } ret.setGroupPrivileges(tableGroupPriv); } Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames); if (roleNames != null && roleNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> tableRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String roleName : roleNames) { tableRolePriv.put(roleName, getTablePrivilege(dbName, tableName, roleName, PrincipalType.ROLE)); } ret.setRolePrivileges(tableRolePriv); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return ret; } @Override public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableName, String partitionName, String columnName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); columnName = HiveStringUtils.normalizeIdentifier(columnName); boolean commited = false; PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); try { openTransaction(); if (userName != null) { Map<String, List<PrivilegeGrantInfo>> columnUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); columnUserPriv.put(userName, getColumnPrivilege(dbName, tableName, columnName, partitionName, userName, PrincipalType.USER)); ret.setUserPrivileges(columnUserPriv); } if (groupNames != null && groupNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> columnGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String groupName : groupNames) { columnGroupPriv.put(groupName, getColumnPrivilege(dbName, tableName, columnName, partitionName, groupName, PrincipalType.GROUP)); } ret.setGroupPrivileges(columnGroupPriv); } Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames); if (roleNames != null && roleNames.size() > 0) { Map<String, List<PrivilegeGrantInfo>> columnRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>(); for (String roleName : roleNames) { columnRolePriv.put(roleName, getColumnPrivilege(dbName, tableName, columnName, partitionName, roleName, PrincipalType.ROLE)); } ret.setRolePrivileges(columnRolePriv); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return ret; } private List<PrivilegeGrantInfo> getPartitionPrivilege(String dbName, String tableName, String partName, String principalName, PrincipalType principalType) { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); if (principalName != null) { List<MPartitionPrivilege> userNameTabPartPriv = this .listPrincipalPartitionGrants(principalName, principalType, dbName, tableName, partName); if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>( userNameTabPartPriv.size()); for (int i = 0; i < userNameTabPartPriv.size(); i++) { MPartitionPrivilege item = userNameTabPartPriv.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item.getGrantorType()), item.getGrantOption())); } return grantInfos; } } return new ArrayList<PrivilegeGrantInfo>(0); } private PrincipalType getPrincipalTypeFromStr(String str) { return str == null ? null : PrincipalType.valueOf(str); } private List<PrivilegeGrantInfo> getTablePrivilege(String dbName, String tableName, String principalName, PrincipalType principalType) { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); if (principalName != null) { List<MTablePrivilege> userNameTabPartPriv = this .listAllTableGrants(principalName, principalType, dbName, tableName); if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>( userNameTabPartPriv.size()); for (int i = 0; i < userNameTabPartPriv.size(); i++) { MTablePrivilege item = userNameTabPartPriv.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item .getGrantorType()), item.getGrantOption())); } return grantInfos; } } return new ArrayList<PrivilegeGrantInfo>(0); } private List<PrivilegeGrantInfo> getColumnPrivilege(String dbName, String tableName, String columnName, String partitionName, String principalName, PrincipalType principalType) { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); columnName = HiveStringUtils.normalizeIdentifier(columnName); if (partitionName == null) { List<MTableColumnPrivilege> userNameColumnPriv = this .listPrincipalTableColumnGrants(principalName, principalType, dbName, tableName, columnName); if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) { List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>( userNameColumnPriv.size()); for (int i = 0; i < userNameColumnPriv.size(); i++) { MTableColumnPrivilege item = userNameColumnPriv.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item .getGrantorType()), item.getGrantOption())); } return grantInfos; } } else { List<MPartitionColumnPrivilege> userNameColumnPriv = this .listPrincipalPartitionColumnGrants(principalName, principalType, dbName, tableName, partitionName, columnName); if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) { List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>( userNameColumnPriv.size()); for (int i = 0; i < userNameColumnPriv.size(); i++) { MPartitionColumnPrivilege item = userNameColumnPriv.get(i); grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item .getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item .getGrantorType()), item.getGrantOption())); } return grantInfos; } } return new ArrayList<PrivilegeGrantInfo>(0); } @Override public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, MetaException, NoSuchObjectException { boolean committed = false; int now = (int) (System.currentTimeMillis() / 1000); try { openTransaction(); List<Object> persistentObjs = new ArrayList<Object>(); List<HiveObjectPrivilege> privilegeList = privileges.getPrivileges(); if (privilegeList != null && privilegeList.size() > 0) { Iterator<HiveObjectPrivilege> privIter = privilegeList.iterator(); Set<String> privSet = new HashSet<String>(); while (privIter.hasNext()) { HiveObjectPrivilege privDef = privIter.next(); HiveObjectRef hiveObject = privDef.getHiveObject(); String privilegeStr = privDef.getGrantInfo().getPrivilege(); String[] privs = privilegeStr.split(","); String userName = privDef.getPrincipalName(); PrincipalType principalType = privDef.getPrincipalType(); String grantor = privDef.getGrantInfo().getGrantor(); String grantorType = privDef.getGrantInfo().getGrantorType().toString(); boolean grantOption = privDef.getGrantInfo().isGrantOption(); privSet.clear(); if(principalType == PrincipalType.ROLE){ validateRole(userName); } if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { List<MGlobalPrivilege> globalPrivs = this .listPrincipalGlobalGrants(userName, principalType); if (globalPrivs != null) { for (MGlobalPrivilege priv : globalPrivs) { if (priv.getGrantor().equalsIgnoreCase(grantor)) { privSet.add(priv.getPrivilege()); } } } for (String privilege : privs) { if (privSet.contains(privilege)) { throw new InvalidObjectException(privilege + " is already granted by " + grantor); } MGlobalPrivilege mGlobalPrivs = new MGlobalPrivilege(userName, principalType.toString(), privilege, now, grantor, grantorType, grantOption); persistentObjs.add(mGlobalPrivs); } } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { MDatabase dbObj = getMDatabase(hiveObject.getDbName()); if (dbObj != null) { List<MDBPrivilege> dbPrivs = this.listPrincipalDBGrants( userName, principalType, hiveObject.getDbName()); if (dbPrivs != null) { for (MDBPrivilege priv : dbPrivs) { if (priv.getGrantor().equalsIgnoreCase(grantor)) { privSet.add(priv.getPrivilege()); } } } for (String privilege : privs) { if (privSet.contains(privilege)) { throw new InvalidObjectException(privilege + " is already granted on database " + hiveObject.getDbName() + " by " + grantor); } MDBPrivilege mDb = new MDBPrivilege(userName, principalType .toString(), dbObj, privilege, now, grantor, grantorType, grantOption); persistentObjs.add(mDb); } } } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) { MTable tblObj = getMTable(hiveObject.getDbName(), hiveObject .getObjectName()); if (tblObj != null) { List<MTablePrivilege> tablePrivs = this .listAllTableGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName()); if (tablePrivs != null) { for (MTablePrivilege priv : tablePrivs) { if (priv.getGrantor() != null && priv.getGrantor().equalsIgnoreCase(grantor)) { privSet.add(priv.getPrivilege()); } } } for (String privilege : privs) { if (privSet.contains(privilege)) { throw new InvalidObjectException(privilege + " is already granted on table [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "] by " + grantor); } MTablePrivilege mTab = new MTablePrivilege( userName, principalType.toString(), tblObj, privilege, now, grantor, grantorType, grantOption); persistentObjs.add(mTab); } } } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { MPartition partObj = this.getMPartition(hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getPartValues()); String partName = null; if (partObj != null) { partName = partObj.getPartitionName(); List<MPartitionPrivilege> partPrivs = this .listPrincipalPartitionGrants(userName, principalType, hiveObject.getDbName(), hiveObject .getObjectName(), partObj.getPartitionName()); if (partPrivs != null) { for (MPartitionPrivilege priv : partPrivs) { if (priv.getGrantor().equalsIgnoreCase(grantor)) { privSet.add(priv.getPrivilege()); } } } for (String privilege : privs) { if (privSet.contains(privilege)) { throw new InvalidObjectException(privilege + " is already granted on partition [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "," + partName + "] by " + grantor); } MPartitionPrivilege mTab = new MPartitionPrivilege(userName, principalType.toString(), partObj, privilege, now, grantor, grantorType, grantOption); persistentObjs.add(mTab); } } } else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { MTable tblObj = getMTable(hiveObject.getDbName(), hiveObject .getObjectName()); if (tblObj != null) { if (hiveObject.getPartValues() != null) { MPartition partObj = null; List<MPartitionColumnPrivilege> colPrivs = null; partObj = this.getMPartition(hiveObject.getDbName(), hiveObject .getObjectName(), hiveObject.getPartValues()); if (partObj == null) { continue; } colPrivs = this.listPrincipalPartitionColumnGrants( userName, principalType, hiveObject.getDbName(), hiveObject .getObjectName(), partObj.getPartitionName(), hiveObject.getColumnName()); if (colPrivs != null) { for (MPartitionColumnPrivilege priv : colPrivs) { if (priv.getGrantor().equalsIgnoreCase(grantor)) { privSet.add(priv.getPrivilege()); } } } for (String privilege : privs) { if (privSet.contains(privilege)) { throw new InvalidObjectException(privilege + " is already granted on column " + hiveObject.getColumnName() + " [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "," + partObj.getPartitionName() + "] by " + grantor); } MPartitionColumnPrivilege mCol = new MPartitionColumnPrivilege(userName, principalType.toString(), partObj, hiveObject .getColumnName(), privilege, now, grantor, grantorType, grantOption); persistentObjs.add(mCol); } } else { List<MTableColumnPrivilege> colPrivs = null; colPrivs = this.listPrincipalTableColumnGrants( userName, principalType, hiveObject.getDbName(), hiveObject .getObjectName(), hiveObject.getColumnName()); if (colPrivs != null) { for (MTableColumnPrivilege priv : colPrivs) { if (priv.getGrantor().equalsIgnoreCase(grantor)) { privSet.add(priv.getPrivilege()); } } } for (String privilege : privs) { if (privSet.contains(privilege)) { throw new InvalidObjectException(privilege + " is already granted on column " + hiveObject.getColumnName() + " [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "] by " + grantor); } MTableColumnPrivilege mCol = new MTableColumnPrivilege(userName, principalType.toString(), tblObj, hiveObject .getColumnName(), privilege, now, grantor, grantorType, grantOption); persistentObjs.add(mCol); } } } } } } if (persistentObjs.size() > 0) { pm.makePersistentAll(persistentObjs); } committed = commitTransaction(); } finally { if (!committed) { rollbackTransaction(); } } return committed; } @Override public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) throws InvalidObjectException, MetaException, NoSuchObjectException { boolean committed = false; try { openTransaction(); List<Object> persistentObjs = new ArrayList<Object>(); List<HiveObjectPrivilege> privilegeList = privileges.getPrivileges(); if (privilegeList != null && privilegeList.size() > 0) { Iterator<HiveObjectPrivilege> privIter = privilegeList.iterator(); while (privIter.hasNext()) { HiveObjectPrivilege privDef = privIter.next(); HiveObjectRef hiveObject = privDef.getHiveObject(); String privilegeStr = privDef.getGrantInfo().getPrivilege(); if (privilegeStr == null || privilegeStr.trim().equals("")) { continue; } String[] privs = privilegeStr.split(","); String userName = privDef.getPrincipalName(); PrincipalType principalType = privDef.getPrincipalType(); if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { List<MGlobalPrivilege> mSecUser = this.listPrincipalGlobalGrants( userName, principalType); boolean found = false; if (mSecUser != null) { for (String privilege : privs) { for (MGlobalPrivilege userGrant : mSecUser) { String userGrantPrivs = userGrant.getPrivilege(); if (privilege.equals(userGrantPrivs)) { found = true; if (grantOption) { if (userGrant.getGrantOption()) { userGrant.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with privilege " + privilege); } } persistentObjs.add(userGrant); break; } } if (!found) { throw new InvalidObjectException( "No user grant found for privileges " + privilege); } } } } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { MDatabase dbObj = getMDatabase(hiveObject.getDbName()); if (dbObj != null) { String db = hiveObject.getDbName(); boolean found = false; List<MDBPrivilege> dbGrants = this.listPrincipalDBGrants( userName, principalType, db); for (String privilege : privs) { for (MDBPrivilege dbGrant : dbGrants) { String dbGrantPriv = dbGrant.getPrivilege(); if (privilege.equals(dbGrantPriv)) { found = true; if (grantOption) { if (dbGrant.getGrantOption()) { dbGrant.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with privilege " + privilege); } } persistentObjs.add(dbGrant); break; } } if (!found) { throw new InvalidObjectException( "No database grant found for privileges " + privilege + " on database " + db); } } } } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) { boolean found = false; List<MTablePrivilege> tableGrants = this .listAllTableGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName()); for (String privilege : privs) { for (MTablePrivilege tabGrant : tableGrants) { String tableGrantPriv = tabGrant.getPrivilege(); if (privilege.equalsIgnoreCase(tableGrantPriv)) { found = true; if (grantOption) { if (tabGrant.getGrantOption()) { tabGrant.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with privilege " + privilege); } } persistentObjs.add(tabGrant); break; } } if (!found) { throw new InvalidObjectException("No grant (" + privilege + ") found " + " on table " + hiveObject.getObjectName() + ", database is " + hiveObject.getDbName()); } } } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { boolean found = false; Table tabObj = this.getTable(hiveObject.getDbName(), hiveObject.getObjectName()); String partName = null; if (hiveObject.getPartValues() != null) { partName = Warehouse.makePartName(tabObj.getPartitionKeys(), hiveObject.getPartValues()); } List<MPartitionPrivilege> partitionGrants = this .listPrincipalPartitionGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName(), partName); for (String privilege : privs) { for (MPartitionPrivilege partGrant : partitionGrants) { String partPriv = partGrant.getPrivilege(); if (partPriv.equalsIgnoreCase(privilege)) { found = true; if (grantOption) { if (partGrant.getGrantOption()) { partGrant.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with privilege " + privilege); } } persistentObjs.add(partGrant); break; } } if (!found) { throw new InvalidObjectException("No grant (" + privilege + ") found " + " on table " + tabObj.getTableName() + ", partition is " + partName + ", database is " + tabObj.getDbName()); } } } else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { Table tabObj = this.getTable(hiveObject.getDbName(), hiveObject .getObjectName()); String partName = null; if (hiveObject.getPartValues() != null) { partName = Warehouse.makePartName(tabObj.getPartitionKeys(), hiveObject.getPartValues()); } if (partName != null) { List<MPartitionColumnPrivilege> mSecCol = listPrincipalPartitionColumnGrants( userName, principalType, hiveObject.getDbName(), hiveObject .getObjectName(), partName, hiveObject.getColumnName()); boolean found = false; if (mSecCol != null) { for (String privilege : privs) { for (MPartitionColumnPrivilege col : mSecCol) { String colPriv = col.getPrivilege(); if (colPriv.equalsIgnoreCase(privilege)) { found = true; if (grantOption) { if (col.getGrantOption()) { col.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with privilege " + privilege); } } persistentObjs.add(col); break; } } if (!found) { throw new InvalidObjectException("No grant (" + privilege + ") found " + " on table " + tabObj.getTableName() + ", partition is " + partName + ", column name = " + hiveObject.getColumnName() + ", database is " + tabObj.getDbName()); } } } } else { List<MTableColumnPrivilege> mSecCol = listPrincipalTableColumnGrants( userName, principalType, hiveObject.getDbName(), hiveObject .getObjectName(), hiveObject.getColumnName()); boolean found = false; if (mSecCol != null) { for (String privilege : privs) { for (MTableColumnPrivilege col : mSecCol) { String colPriv = col.getPrivilege(); if (colPriv.equalsIgnoreCase(privilege)) { found = true; if (grantOption) { if (col.getGrantOption()) { col.setGrantOption(false); } else { throw new MetaException("User " + userName + " does not have grant option with privilege " + privilege); } } persistentObjs.add(col); break; } } if (!found) { throw new InvalidObjectException("No grant (" + privilege + ") found " + " on table " + tabObj.getTableName() + ", column name = " + hiveObject.getColumnName() + ", database is " + tabObj.getDbName()); } } } } } } } if (persistentObjs.size() > 0) { if (grantOption) { // If grant option specified, only update the privilege, don't remove it. // Grant option has already been removed from the privileges in the section above } else { pm.deletePersistentAll(persistentObjs); } } committed = commitTransaction(); } finally { if (!committed) { rollbackTransaction(); } } return committed; } @SuppressWarnings("unchecked") @Override public List<MRoleMap> listRoleMembers(String roleName) { boolean success = false; List<MRoleMap> mRoleMemeberList = null; try { openTransaction(); LOG.debug("Executing listMSecurityUserRoleMember"); Query query = pm.newQuery(MRoleMap.class, "role.roleName == t1"); query.declareParameters("java.lang.String t1"); query.setUnique(false); mRoleMemeberList = (List<MRoleMap>) query.execute( roleName); LOG.debug("Done executing query for listMSecurityUserRoleMember"); pm.retrieveAll(mRoleMemeberList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listMSecurityUserRoleMember"); } finally { if (!success) { rollbackTransaction(); } } return mRoleMemeberList; } @SuppressWarnings("unchecked") @Override public List<MGlobalPrivilege> listPrincipalGlobalGrants(String principalName, PrincipalType principalType) { boolean commited = false; List<MGlobalPrivilege> userNameDbPriv = null; try { openTransaction(); if (principalName != null) { Query query = pm.newQuery(MGlobalPrivilege.class, "principalName == t1 && principalType == t2 "); query.declareParameters( "java.lang.String t1, java.lang.String t2"); userNameDbPriv = (List<MGlobalPrivilege>) query .executeWithArray(principalName, principalType.toString()); pm.retrieveAll(userNameDbPriv); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return userNameDbPriv; } @Override public List<HiveObjectPrivilege> listGlobalGrantsAll() { boolean commited = false; try { openTransaction(); Query query = pm.newQuery(MGlobalPrivilege.class); List<MGlobalPrivilege> userNameDbPriv = (List<MGlobalPrivilege>) query.execute(); pm.retrieveAll(userNameDbPriv); commited = commitTransaction(); return convertGlobal(userNameDbPriv); } finally { if (!commited) { rollbackTransaction(); } } } private List<HiveObjectPrivilege> convertGlobal(List<MGlobalPrivilege> privs) { List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>(); for (MGlobalPrivilege priv : privs) { String pname = priv.getPrincipalName(); PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null); PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); } return result; } @SuppressWarnings("unchecked") @Override public List<MDBPrivilege> listPrincipalDBGrants(String principalName, PrincipalType principalType, String dbName) { boolean success = false; List<MDBPrivilege> mSecurityDBList = null; dbName = HiveStringUtils.normalizeIdentifier(dbName); try { openTransaction(); LOG.debug("Executing listPrincipalDBGrants"); Query query = pm.newQuery(MDBPrivilege.class, "principalName == t1 && principalType == t2 && database.name == t3"); query .declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3"); mSecurityDBList = (List<MDBPrivilege>) query.executeWithArray(principalName, principalType.toString(), dbName); LOG.debug("Done executing query for listPrincipalDBGrants"); pm.retrieveAll(mSecurityDBList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalDBGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityDBList; } @Override public List<HiveObjectPrivilege> listPrincipalDBGrantsAll( String principalName, PrincipalType principalType) { return convertDB(listPrincipalAllDBGrant(principalName, principalType)); } @Override public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) { return convertDB(listDatabaseGrants(dbName)); } private List<HiveObjectPrivilege> convertDB(List<MDBPrivilege> privs) { List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>(); for (MDBPrivilege priv : privs) { String pname = priv.getPrincipalName(); PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); String database = priv.getDatabase().getName(); HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.DATABASE, database, null, null, null); PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); } return result; } @SuppressWarnings("unchecked") private List<MDBPrivilege> listPrincipalAllDBGrant( String principalName, PrincipalType principalType) { boolean success = false; List<MDBPrivilege> mSecurityDBList = null; try { openTransaction(); LOG.debug("Executing listPrincipalAllDBGrant"); if (principalName != null && principalType != null) { Query query = pm.newQuery(MDBPrivilege.class, "principalName == t1 && principalType == t2"); query .declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityDBList = (List<MDBPrivilege>) query.execute(principalName, principalType.toString()); } else { Query query = pm.newQuery(MDBPrivilege.class); mSecurityDBList = (List<MDBPrivilege>) query.execute(); } LOG.debug("Done executing query for listPrincipalAllDBGrant"); pm.retrieveAll(mSecurityDBList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalAllDBGrant"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityDBList; } @SuppressWarnings("unchecked") public List<MTablePrivilege> listAllTableGrants(String dbName, String tableName) { boolean success = false; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); List<MTablePrivilege> mSecurityTabList = null; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); try { openTransaction(); LOG.debug("Executing listAllTableGrants"); String queryStr = "table.tableName == t1 && table.database.name == t2"; Query query = pm.newQuery( MTablePrivilege.class, queryStr); query.declareParameters( "java.lang.String t1, java.lang.String t2"); mSecurityTabList = (List<MTablePrivilege>) query .executeWithArray(tableName, dbName); LOG.debug("Done executing query for listAllTableGrants"); pm.retrieveAll(mSecurityTabList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listAllTableGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabList; } @SuppressWarnings("unchecked") public List<MPartitionPrivilege> listTableAllPartitionGrants(String dbName, String tableName) { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); boolean success = false; List<MPartitionPrivilege> mSecurityTabPartList = null; try { openTransaction(); LOG.debug("Executing listTableAllPartitionGrants"); String queryStr = "partition.table.tableName == t1 && partition.table.database.name == t2"; Query query = pm.newQuery( MPartitionPrivilege.class, queryStr); query.declareParameters( "java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MPartitionPrivilege>) query .executeWithArray(tableName, dbName); LOG.debug("Done executing query for listTableAllPartitionGrants"); pm.retrieveAll(mSecurityTabPartList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listTableAllPartitionGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabPartList; } @SuppressWarnings("unchecked") public List<MTableColumnPrivilege> listTableAllColumnGrants(String dbName, String tableName) { boolean success = false; List<MTableColumnPrivilege> mTblColPrivilegeList = null; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); try { openTransaction(); LOG.debug("Executing listTableAllColumnGrants"); String queryStr = "table.tableName == t1 && table.database.name == t2"; Query query = pm.newQuery(MTableColumnPrivilege.class, queryStr); query.declareParameters("java.lang.String t1, java.lang.String t2"); mTblColPrivilegeList = (List<MTableColumnPrivilege>) query .executeWithArray(tableName, dbName); LOG.debug("Done executing query for listTableAllColumnGrants"); pm.retrieveAll(mTblColPrivilegeList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listTableAllColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mTblColPrivilegeList; } @SuppressWarnings("unchecked") public List<MPartitionColumnPrivilege> listTableAllPartitionColumnGrants(String dbName, String tableName) { boolean success = false; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); List<MPartitionColumnPrivilege> mSecurityColList = null; try { openTransaction(); LOG.debug("Executing listTableAllPartitionColumnGrants"); String queryStr = "partition.table.tableName == t1 && partition.table.database.name == t2"; Query query = pm.newQuery(MPartitionColumnPrivilege.class, queryStr); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityColList = (List<MPartitionColumnPrivilege>) query .executeWithArray(tableName, dbName); LOG.debug("Done executing query for listTableAllPartitionColumnGrants"); pm.retrieveAll(mSecurityColList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listTableAllPartitionColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityColList; } @SuppressWarnings("unchecked") public List<MPartitionColumnPrivilege> listPartitionAllColumnGrants(String dbName, String tableName, List<String> partNames) { boolean success = false; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); List<MPartitionColumnPrivilege> mSecurityColList = null; try { openTransaction(); LOG.debug("Executing listPartitionAllColumnGrants"); mSecurityColList = queryByPartitionNames( dbName, tableName, partNames, MPartitionColumnPrivilege.class, "partition.table.tableName", "partition.table.database.name", "partition.partitionName"); LOG.debug("Done executing query for listPartitionAllColumnGrants"); pm.retrieveAll(mSecurityColList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPartitionAllColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityColList; } public void dropPartitionAllColumnGrantsNoTxn( String dbName, String tableName, List<String> partNames) { ObjectPair<Query, Object[]> queryWithParams = makeQueryByPartitionNames( dbName, tableName, partNames, MPartitionColumnPrivilege.class, "partition.table.tableName", "partition.table.database.name", "partition.partitionName"); queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond()); } @SuppressWarnings("unchecked") private List<MDBPrivilege> listDatabaseGrants(String dbName) { dbName = HiveStringUtils.normalizeIdentifier(dbName); boolean success = false; try { openTransaction(); LOG.debug("Executing listDatabaseGrants"); Query query = pm.newQuery(MDBPrivilege.class, "database.name == t1"); query.declareParameters("java.lang.String t1"); List<MDBPrivilege> mSecurityDBList = (List<MDBPrivilege>) query .executeWithArray(dbName); LOG.debug("Done executing query for listDatabaseGrants"); pm.retrieveAll(mSecurityDBList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listDatabaseGrants"); return mSecurityDBList; } finally { if (!success) { rollbackTransaction(); } } } @SuppressWarnings("unchecked") private List<MPartitionPrivilege> listPartitionGrants(String dbName, String tableName, List<String> partNames) { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); boolean success = false; List<MPartitionPrivilege> mSecurityTabPartList = null; try { openTransaction(); LOG.debug("Executing listPartitionGrants"); mSecurityTabPartList = queryByPartitionNames( dbName, tableName, partNames, MPartitionPrivilege.class, "partition.table.tableName", "partition.table.database.name", "partition.partitionName"); LOG.debug("Done executing query for listPartitionGrants"); pm.retrieveAll(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPartitionGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabPartList; } private void dropPartitionGrantsNoTxn(String dbName, String tableName, List<String> partNames) { ObjectPair<Query, Object[]> queryWithParams = makeQueryByPartitionNames( dbName, tableName, partNames,MPartitionPrivilege.class, "partition.table.tableName", "partition.table.database.name", "partition.partitionName"); queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond()); } @SuppressWarnings("unchecked") private <T> List<T> queryByPartitionNames(String dbName, String tableName, List<String> partNames, Class<T> clazz, String tbCol, String dbCol, String partCol) { ObjectPair<Query, Object[]> queryAndParams = makeQueryByPartitionNames( dbName, tableName, partNames, clazz, tbCol, dbCol, partCol); return (List<T>)queryAndParams.getFirst().executeWithArray(queryAndParams.getSecond()); } private ObjectPair<Query, Object[]> makeQueryByPartitionNames( String dbName, String tableName, List<String> partNames, Class<?> clazz, String tbCol, String dbCol, String partCol) { String queryStr = tbCol + " == t1 && " + dbCol + " == t2"; String paramStr = "java.lang.String t1, java.lang.String t2"; Object[] params = new Object[2 + partNames.size()]; params[0] = HiveStringUtils.normalizeIdentifier(tableName); params[1] = HiveStringUtils.normalizeIdentifier(dbName); int index = 0; for (String partName : partNames) { params[index + 2] = partName; queryStr += ((index == 0) ? " && (" : " || ") + partCol + " == p" + index; paramStr += ", java.lang.String p" + index; ++index; } queryStr += ")"; Query query = pm.newQuery(clazz, queryStr); query.declareParameters(paramStr); return new ObjectPair<Query, Object[]>(query, params); } @Override @SuppressWarnings("unchecked") public List<MTablePrivilege> listAllTableGrants( String principalName, PrincipalType principalType, String dbName, String tableName) { tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); boolean success = false; List<MTablePrivilege> mSecurityTabPartList = null; try { openTransaction(); LOG.debug("Executing listAllTableGrants"); Query query = pm.newQuery( MTablePrivilege.class, "principalName == t1 && principalType == t2 && table.tableName == t3 && table.database.name == t4"); query.declareParameters( "java.lang.String t1, java.lang.String t2, java.lang.String t3, java.lang.String t4"); mSecurityTabPartList = (List<MTablePrivilege>) query .executeWithArray(principalName, principalType.toString(), tableName, dbName); LOG.debug("Done executing query for listAllTableGrants"); pm.retrieveAll(mSecurityTabPartList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listAllTableGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabPartList; } @SuppressWarnings("unchecked") @Override public List<MPartitionPrivilege> listPrincipalPartitionGrants( String principalName, PrincipalType principalType, String dbName, String tableName, String partName) { boolean success = false; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); List<MPartitionPrivilege> mSecurityTabPartList = null; try { openTransaction(); LOG.debug("Executing listMSecurityPrincipalPartitionGrant"); Query query = pm.newQuery( MPartitionPrivilege.class, "principalName == t1 && principalType == t2 && partition.table.tableName == t3 " + "&& partition.table.database.name == t4 && partition.partitionName == t5"); query.declareParameters( "java.lang.String t1, java.lang.String t2, java.lang.String t3, java.lang.String t4, " + "java.lang.String t5"); mSecurityTabPartList = (List<MPartitionPrivilege>) query .executeWithArray(principalName, principalType.toString(), tableName, dbName, partName); LOG.debug("Done executing query for listMSecurityPrincipalPartitionGrant"); pm.retrieveAll(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listMSecurityPrincipalPartitionGrant"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabPartList; } @SuppressWarnings("unchecked") @Override public List<MTableColumnPrivilege> listPrincipalTableColumnGrants( String principalName, PrincipalType principalType, String dbName, String tableName, String columnName) { boolean success = false; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); columnName = HiveStringUtils.normalizeIdentifier(columnName); List<MTableColumnPrivilege> mSecurityColList = null; try { openTransaction(); LOG.debug("Executing listPrincipalTableColumnGrants"); String queryStr = "principalName == t1 && principalType == t2 && " + "table.tableName == t3 && table.database.name == t4 && columnName == t5 "; Query query = pm.newQuery(MTableColumnPrivilege.class, queryStr); query .declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3, " + "java.lang.String t4, java.lang.String t5"); mSecurityColList = (List<MTableColumnPrivilege>) query.executeWithArray( principalName, principalType.toString(), tableName, dbName, columnName); LOG.debug("Done executing query for listPrincipalTableColumnGrants"); pm.retrieveAll(mSecurityColList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listPrincipalTableColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityColList; } @Override @SuppressWarnings("unchecked") public List<MPartitionColumnPrivilege> listPrincipalPartitionColumnGrants( String principalName, PrincipalType principalType, String dbName, String tableName, String partitionName, String columnName) { boolean success = false; tableName = HiveStringUtils.normalizeIdentifier(tableName); dbName = HiveStringUtils.normalizeIdentifier(dbName); columnName = HiveStringUtils.normalizeIdentifier(columnName); List<MPartitionColumnPrivilege> mSecurityColList = null; try { openTransaction(); LOG.debug("Executing listPrincipalPartitionColumnGrants"); Query query = pm .newQuery( MPartitionColumnPrivilege.class, "principalName == t1 && principalType == t2 && partition.table.tableName == t3 " + "&& partition.table.database.name == t4 && partition.partitionName == t5 && columnName == t6"); query .declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3, " + "java.lang.String t4, java.lang.String t5, java.lang.String t6"); mSecurityColList = (List<MPartitionColumnPrivilege>) query .executeWithArray(principalName, principalType.toString(), tableName, dbName, partitionName, columnName); LOG.debug("Done executing query for listPrincipalPartitionColumnGrants"); pm.retrieveAll(mSecurityColList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listPrincipalPartitionColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityColList; } @Override public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll( String principalName, PrincipalType principalType) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPrincipalPartitionColumnGrantsAll"); List<MPartitionColumnPrivilege> mSecurityTabPartList; if (principalName != null && principalType != null) { Query query = pm.newQuery(MPartitionColumnPrivilege.class, "principalName == t1 && principalType == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MPartitionColumnPrivilege>) query.executeWithArray(principalName, principalType.toString()); } else { Query query = pm.newQuery(MPartitionColumnPrivilege.class); mSecurityTabPartList = (List<MPartitionColumnPrivilege>) query.execute(); } LOG.debug("Done executing query for listPrincipalPartitionColumnGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertPartCols(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalPartitionColumnGrantsAll"); return result; } finally { if (!success) { rollbackTransaction(); } } } @Override public List<HiveObjectPrivilege> listPartitionColumnGrantsAll( String dbName, String tableName, String partitionName, String columnName) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPartitionColumnGrantsAll"); Query query = pm.newQuery(MPartitionColumnPrivilege.class, "partition.table.tableName == t3 && partition.table.database.name == t4 && " + "partition.partitionName == t5 && columnName == t6"); query.declareParameters( "java.lang.String t3, java.lang.String t4, java.lang.String t5, java.lang.String t6"); List<MPartitionColumnPrivilege> mSecurityTabPartList = (List<MPartitionColumnPrivilege>) query.executeWithArray(tableName, dbName, partitionName, columnName); LOG.debug("Done executing query for listPartitionColumnGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertPartCols(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPartitionColumnGrantsAll"); return result; } finally { if (!success) { rollbackTransaction(); } } } private List<HiveObjectPrivilege> convertPartCols(List<MPartitionColumnPrivilege> privs) { List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>(); for (MPartitionColumnPrivilege priv : privs) { String pname = priv.getPrincipalName(); PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); MPartition mpartition = priv.getPartition(); MTable mtable = mpartition.getTable(); MDatabase mdatabase = mtable.getDatabase(); HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN, mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), priv.getColumnName()); PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); } return result; } @SuppressWarnings("unchecked") private List<MTablePrivilege> listPrincipalAllTableGrants( String principalName, PrincipalType principalType) { boolean success = false; List<MTablePrivilege> mSecurityTabPartList = null; try { openTransaction(); LOG.debug("Executing listPrincipalAllTableGrants"); Query query = pm.newQuery(MTablePrivilege.class, "principalName == t1 && principalType == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MTablePrivilege>) query.execute( principalName, principalType.toString()); LOG .debug("Done executing query for listPrincipalAllTableGrants"); pm.retrieveAll(mSecurityTabPartList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listPrincipalAllTableGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabPartList; } @Override public List<HiveObjectPrivilege> listPrincipalTableGrantsAll( String principalName, PrincipalType principalType) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPrincipalAllTableGrants"); List<MTablePrivilege> mSecurityTabPartList; if (principalName != null && principalType != null) { Query query = pm.newQuery(MTablePrivilege.class, "principalName == t1 && principalType == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MTablePrivilege>) query.execute( principalName, principalType.toString()); } else { Query query = pm.newQuery(MTablePrivilege.class); mSecurityTabPartList = (List<MTablePrivilege>) query.execute(); } LOG.debug("Done executing query for listPrincipalAllTableGrants"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertTable(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants"); return result; } finally { if (!success) { rollbackTransaction(); } } } @Override public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) { boolean success = false; try { openTransaction(); LOG.debug("Executing listTableGrantsAll"); Query query = pm.newQuery(MTablePrivilege.class, "table.tableName == t1 && table.database.name == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); List<MTablePrivilege> mSecurityTabPartList = (List<MTablePrivilege>) query.executeWithArray(tableName, dbName); LOG.debug("Done executing query for listTableGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertTable(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants"); return result; } finally { if (!success) { rollbackTransaction(); } } } private List<HiveObjectPrivilege> convertTable(List<MTablePrivilege> privs) { List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>(); for (MTablePrivilege priv : privs) { String pname = priv.getPrincipalName(); PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); String table = priv.getTable().getTableName(); String database = priv.getTable().getDatabase().getName(); HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.TABLE, database, table, null, null); PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); } return result; } @SuppressWarnings("unchecked") private List<MPartitionPrivilege> listPrincipalAllPartitionGrants( String principalName, PrincipalType principalType) { boolean success = false; List<MPartitionPrivilege> mSecurityTabPartList = null; try { openTransaction(); LOG.debug("Executing listPrincipalAllPartitionGrants"); Query query = pm.newQuery(MPartitionPrivilege.class, "principalName == t1 && principalType == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MPartitionPrivilege>) query.execute( principalName, principalType.toString()); LOG .debug("Done executing query for listPrincipalAllPartitionGrants"); pm.retrieveAll(mSecurityTabPartList); success = commitTransaction(); LOG .debug("Done retrieving all objects for listPrincipalAllPartitionGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityTabPartList; } @Override public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll( String principalName, PrincipalType principalType) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPrincipalPartitionGrantsAll"); List<MPartitionPrivilege> mSecurityTabPartList; if (principalName != null && principalType != null) { Query query = pm.newQuery(MPartitionPrivilege.class, "principalName == t1 && principalType == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MPartitionPrivilege>) query.execute(principalName, principalType.toString()); } else { Query query = pm.newQuery(MPartitionPrivilege.class); mSecurityTabPartList = (List<MPartitionPrivilege>) query.execute(); } LOG.debug("Done executing query for listPrincipalPartitionGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertPartition(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll"); return result; } finally { if (!success) { rollbackTransaction(); } } } @Override public List<HiveObjectPrivilege> listPartitionGrantsAll( String dbName, String tableName, String partitionName) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPrincipalPartitionGrantsAll"); Query query = pm.newQuery(MPartitionPrivilege.class, "partition.table.tableName == t3 && partition.table.database.name == t4 && " + "partition.partitionName == t5"); query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5"); List<MPartitionPrivilege> mSecurityTabPartList = (List<MPartitionPrivilege>) query.executeWithArray(tableName, dbName, partitionName); LOG.debug("Done executing query for listPrincipalPartitionGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertPartition(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll"); return result; } finally { if (!success) { rollbackTransaction(); } } } private List<HiveObjectPrivilege> convertPartition(List<MPartitionPrivilege> privs) { List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>(); for (MPartitionPrivilege priv : privs) { String pname = priv.getPrincipalName(); PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); MPartition mpartition = priv.getPartition(); MTable mtable = mpartition.getTable(); MDatabase mdatabase = mtable.getDatabase(); HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.PARTITION, mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), null); PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); } return result; } @SuppressWarnings("unchecked") private List<MTableColumnPrivilege> listPrincipalAllTableColumnGrants( String principalName, PrincipalType principalType) { boolean success = false; List<MTableColumnPrivilege> mSecurityColumnList = null; try { openTransaction(); LOG.debug("Executing listPrincipalAllTableColumnGrants"); Query query = pm.newQuery(MTableColumnPrivilege.class, "principalName == t1 && principalType == t2"); query .declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityColumnList = (List<MTableColumnPrivilege>) query.execute( principalName, principalType.toString()); LOG.debug("Done executing query for listPrincipalAllTableColumnGrants"); pm.retrieveAll(mSecurityColumnList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalAllTableColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityColumnList; } @Override public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll( String principalName, PrincipalType principalType) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPrincipalTableColumnGrantsAll"); List<MTableColumnPrivilege> mSecurityTabPartList; if (principalName != null && principalType != null) { Query query = pm.newQuery(MTableColumnPrivilege.class, "principalName == t1 && principalType == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityTabPartList = (List<MTableColumnPrivilege>) query.execute(principalName, principalType.toString()); } else { Query query = pm.newQuery(MTableColumnPrivilege.class); mSecurityTabPartList = (List<MTableColumnPrivilege>) query.execute(); } LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertTableCols(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll"); return result; } finally { if (!success) { rollbackTransaction(); } } } @Override public List<HiveObjectPrivilege> listTableColumnGrantsAll( String dbName, String tableName, String columnName) { boolean success = false; try { openTransaction(); LOG.debug("Executing listPrincipalTableColumnGrantsAll"); Query query = pm.newQuery(MTableColumnPrivilege.class, "table.tableName == t3 && table.database.name == t4 && columnName == t5"); query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5"); List<MTableColumnPrivilege> mSecurityTabPartList = (List<MTableColumnPrivilege>) query.executeWithArray(tableName, dbName, columnName); LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll"); pm.retrieveAll(mSecurityTabPartList); List<HiveObjectPrivilege> result = convertTableCols(mSecurityTabPartList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll"); return result; } finally { if (!success) { rollbackTransaction(); } } } private List<HiveObjectPrivilege> convertTableCols(List<MTableColumnPrivilege> privs) { List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>(); for (MTableColumnPrivilege priv : privs) { String pname = priv.getPrincipalName(); PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); MTable mtable = priv.getTable(); MDatabase mdatabase = mtable.getDatabase(); HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN, mdatabase.getName(), mtable.getTableName(), null, priv.getColumnName()); PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); } return result; } @SuppressWarnings("unchecked") private List<MPartitionColumnPrivilege> listPrincipalAllPartitionColumnGrants( String principalName, PrincipalType principalType) { boolean success = false; List<MPartitionColumnPrivilege> mSecurityColumnList = null; try { openTransaction(); LOG.debug("Executing listPrincipalAllTableColumnGrants"); Query query = pm.newQuery(MPartitionColumnPrivilege.class, "principalName == t1 && principalType == t2"); query .declareParameters("java.lang.String t1, java.lang.String t2"); mSecurityColumnList = (List<MPartitionColumnPrivilege>) query.execute( principalName, principalType.toString()); LOG.debug("Done executing query for listPrincipalAllTableColumnGrants"); pm.retrieveAll(mSecurityColumnList); success = commitTransaction(); LOG.debug("Done retrieving all objects for listPrincipalAllTableColumnGrants"); } finally { if (!success) { rollbackTransaction(); } } return mSecurityColumnList; } @Override public boolean isPartitionMarkedForEvent(String dbName, String tblName, Map<String, String> partName, PartitionEventType evtType) throws UnknownTableException, MetaException, InvalidPartitionException, UnknownPartitionException { Collection<MPartitionEvent> partEvents; boolean success = false; LOG.debug("Begin Executing isPartitionMarkedForEvent"); try{ openTransaction(); Query query = pm.newQuery(MPartitionEvent.class, "dbName == t1 && tblName == t2 && partName == t3 && eventType == t4"); query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3, int t4"); Table tbl = getTable(dbName, tblName); // Make sure dbName and tblName are valid. if(null == tbl) { throw new UnknownTableException("Table: "+ tblName + " is not found."); } partEvents = (Collection<MPartitionEvent>) query.executeWithArray(dbName, tblName, getPartitionStr(tbl, partName), evtType.getValue()); pm.retrieveAll(partEvents); success = commitTransaction(); LOG.debug("Done executing isPartitionMarkedForEvent"); } finally{ if (!success) { rollbackTransaction(); } } return (partEvents != null && !partEvents.isEmpty()) ? true : false; } @Override public Table markPartitionForEvent(String dbName, String tblName, Map<String,String> partName, PartitionEventType evtType) throws MetaException, UnknownTableException, InvalidPartitionException, UnknownPartitionException { LOG.debug("Begin executing markPartitionForEvent"); boolean success = false; Table tbl = null; try{ openTransaction(); tbl = getTable(dbName, tblName); // Make sure dbName and tblName are valid. if(null == tbl) { throw new UnknownTableException("Table: "+ tblName + " is not found."); } pm.makePersistent(new MPartitionEvent(dbName,tblName,getPartitionStr(tbl, partName), evtType.getValue())); success = commitTransaction(); LOG.debug("Done executing markPartitionForEvent"); } finally { if(!success) { rollbackTransaction(); } } return tbl; } private String getPartitionStr(Table tbl, Map<String,String> partName) throws InvalidPartitionException{ if(tbl.getPartitionKeysSize() != partName.size()){ throw new InvalidPartitionException("Number of partition columns in table: "+ tbl.getPartitionKeysSize() + " doesn't match with number of supplied partition values: "+partName.size()); } final List<String> storedVals = new ArrayList<String>(tbl.getPartitionKeysSize()); for(FieldSchema partKey : tbl.getPartitionKeys()){ String partVal = partName.get(partKey.getName()); if(null == partVal) { throw new InvalidPartitionException("No value found for partition column: "+partKey.getName()); } storedVals.add(partVal); } return join(storedVals,','); } /** The following API * * - executeJDOQLSelect * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public Collection<?> executeJDOQLSelect(String query) { boolean committed = false; Collection<?> result = null; try { openTransaction(); Query q = pm.newQuery(query); result = (Collection<?>) q.execute(); committed = commitTransaction(); if (committed) { return result; } else { return null; } } finally { if (!committed) { rollbackTransaction(); } } } /** The following API * * - executeJDOQLUpdate * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public long executeJDOQLUpdate(String query) { boolean committed = false; long numUpdated = 0; try { openTransaction(); Query q = pm.newQuery(query); numUpdated = (Long) q.execute(); committed = commitTransaction(); if (committed) { return numUpdated; } else { return -1; } } finally { if (!committed) { rollbackTransaction(); } } } /** The following API * * - listFSRoots * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public Set<String> listFSRoots() { boolean committed = false; Set<String> fsRoots = new HashSet<String>(); try { openTransaction(); Query query = pm.newQuery(MDatabase.class); List<MDatabase> mDBs = (List<MDatabase>) query.execute(); pm.retrieveAll(mDBs); for (MDatabase mDB:mDBs) { fsRoots.add(mDB.getLocationUri()); } committed = commitTransaction(); if (committed) { return fsRoots; } else { return null; } } finally { if (!committed) { rollbackTransaction(); } } } private boolean shouldUpdateURI(URI onDiskUri, URI inputUri) { String onDiskHost = onDiskUri.getHost(); String inputHost = inputUri.getHost(); int onDiskPort = onDiskUri.getPort(); int inputPort = inputUri.getPort(); String onDiskScheme = onDiskUri.getScheme(); String inputScheme = inputUri.getScheme(); //compare ports if (inputPort != -1) { if (inputPort != onDiskPort) { return false; } } //compare schemes if (inputScheme != null) { if (onDiskScheme == null) { return false; } if (!inputScheme.equalsIgnoreCase(onDiskScheme)) { return false; } } //compare hosts if (onDiskHost != null) { if (!inputHost.equalsIgnoreCase(onDiskHost)) { return false; } } else { return false; } return true; } public class UpdateMDatabaseURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdateMDatabaseURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } /** The following APIs * * - updateMDatabaseURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdateMDatabaseURIRetVal updateMDatabaseURI(URI oldLoc, URI newLoc, boolean dryRun) { boolean committed = false; Map<String, String> updateLocations = new HashMap<String, String>(); List<String> badRecords = new ArrayList<String>(); UpdateMDatabaseURIRetVal retVal = null; try { openTransaction(); Query query = pm.newQuery(MDatabase.class); List<MDatabase> mDBs = (List<MDatabase>) query.execute(); pm.retrieveAll(mDBs); for(MDatabase mDB:mDBs) { URI locationURI = null; String location = mDB.getLocationUri(); try { locationURI = new URI(location); } catch(URISyntaxException e) { badRecords.add(location); } catch (NullPointerException e) { badRecords.add(location); } if (locationURI == null) { badRecords.add(location); } else { if (shouldUpdateURI(locationURI, oldLoc)) { String dbLoc = mDB.getLocationUri().replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(locationURI.toString(), dbLoc); if (!dryRun) { mDB.setLocationUri(dbLoc); } } } } committed = commitTransaction(); if (committed) { retVal = new UpdateMDatabaseURIRetVal(badRecords, updateLocations); } return retVal; } finally { if (!committed) { rollbackTransaction(); } } } public class UpdatePropURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdatePropURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } private void updatePropURIHelper(URI oldLoc, URI newLoc, String tblPropKey, boolean isDryRun, List<String> badRecords, Map<String, String> updateLocations, Map<String, String> parameters) { URI tablePropLocationURI = null; if (parameters.containsKey(tblPropKey)) { String tablePropLocation = parameters.get(tblPropKey); try { tablePropLocationURI = new URI(tablePropLocation); } catch (URISyntaxException e) { badRecords.add(tablePropLocation); } catch (NullPointerException e) { badRecords.add(tablePropLocation); } // if tablePropKey that was passed in lead to a valid URI resolution, update it if //parts of it match the old-NN-loc, else add to badRecords if (tablePropLocationURI == null) { badRecords.add(tablePropLocation); } else { if (shouldUpdateURI(tablePropLocationURI, oldLoc)) { String tblPropLoc = parameters.get(tblPropKey).replaceAll(oldLoc.toString(), newLoc .toString()); updateLocations.put(tablePropLocationURI.toString(), tblPropLoc); if (!isDryRun) { parameters.put(tblPropKey, tblPropLoc); } } } } } /** The following APIs * * - updateMStorageDescriptorTblPropURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdatePropURIRetVal updateTblPropURI(URI oldLoc, URI newLoc, String tblPropKey, boolean isDryRun) { boolean committed = false; Map<String, String> updateLocations = new HashMap<>(); List<String> badRecords = new ArrayList<>(); UpdatePropURIRetVal retVal = null; try { openTransaction(); Query query = pm.newQuery(MTable.class); List<MTable> mTbls = (List<MTable>) query.execute(); pm.retrieveAll(mTbls); for (MTable mTbl : mTbls) { updatePropURIHelper(oldLoc, newLoc, tblPropKey, isDryRun, badRecords, updateLocations, mTbl.getParameters()); } committed = commitTransaction(); if (committed) { retVal = new UpdatePropURIRetVal(badRecords, updateLocations); } return retVal; } finally { if (!committed) { rollbackTransaction(); } } } /** The following APIs * * - updateMStorageDescriptorTblPropURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ @Deprecated public UpdatePropURIRetVal updateMStorageDescriptorTblPropURI(URI oldLoc, URI newLoc, String tblPropKey, boolean isDryRun) { boolean committed = false; Map<String, String> updateLocations = new HashMap<String, String>(); List<String> badRecords = new ArrayList<String>(); UpdatePropURIRetVal retVal = null; try { openTransaction(); Query query = pm.newQuery(MStorageDescriptor.class); List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute(); pm.retrieveAll(mSDSs); for(MStorageDescriptor mSDS:mSDSs) { updatePropURIHelper(oldLoc, newLoc, tblPropKey, isDryRun, badRecords, updateLocations, mSDS.getParameters()); } committed = commitTransaction(); if (committed) { retVal = new UpdatePropURIRetVal(badRecords, updateLocations); } return retVal; } finally { if (!committed) { rollbackTransaction(); } } } public class UpdateMStorageDescriptorTblURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdateMStorageDescriptorTblURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } /** The following APIs * * - updateMStorageDescriptorTblURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdateMStorageDescriptorTblURIRetVal updateMStorageDescriptorTblURI(URI oldLoc, URI newLoc, boolean isDryRun) { boolean committed = false; Map<String, String> updateLocations = new HashMap<String, String>(); List<String> badRecords = new ArrayList<String>(); UpdateMStorageDescriptorTblURIRetVal retVal = null; try { openTransaction(); Query query = pm.newQuery(MStorageDescriptor.class); List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute(); pm.retrieveAll(mSDSs); for(MStorageDescriptor mSDS:mSDSs) { URI locationURI = null; String location = mSDS.getLocation(); try { locationURI = new URI(location); } catch (URISyntaxException e) { badRecords.add(location); } catch (NullPointerException e) { badRecords.add(location); } if (locationURI == null) { badRecords.add(location); } else { if (shouldUpdateURI(locationURI, oldLoc)) { String tblLoc = mSDS.getLocation().replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(locationURI.toString(), tblLoc); if (!isDryRun) { mSDS.setLocation(tblLoc); } } } } committed = commitTransaction(); if (committed) { retVal = new UpdateMStorageDescriptorTblURIRetVal(badRecords, updateLocations); } return retVal; } finally { if (!committed) { rollbackTransaction(); } } } public class UpdateSerdeURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdateSerdeURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } /** The following APIs * * - updateSerdeURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdateSerdeURIRetVal updateSerdeURI(URI oldLoc, URI newLoc, String serdeProp, boolean isDryRun) { boolean committed = false; Map<String, String> updateLocations = new HashMap<String, String>(); List<String> badRecords = new ArrayList<String>(); UpdateSerdeURIRetVal retVal = null; try { openTransaction(); Query query = pm.newQuery(MSerDeInfo.class); List<MSerDeInfo> mSerdes = (List<MSerDeInfo>) query.execute(); pm.retrieveAll(mSerdes); for(MSerDeInfo mSerde:mSerdes) { if (mSerde.getParameters().containsKey(serdeProp)) { String schemaLoc = mSerde.getParameters().get(serdeProp); URI schemaLocURI = null; try { schemaLocURI = new URI(schemaLoc); } catch (URISyntaxException e) { badRecords.add(schemaLoc); } catch (NullPointerException e) { badRecords.add(schemaLoc); } if (schemaLocURI == null) { badRecords.add(schemaLoc); } else { if (shouldUpdateURI(schemaLocURI, oldLoc)) { String newSchemaLoc = schemaLoc.replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(schemaLocURI.toString(), newSchemaLoc); if (!isDryRun) { mSerde.getParameters().put(serdeProp, newSchemaLoc); } } } } } committed = commitTransaction(); if (committed) { retVal = new UpdateSerdeURIRetVal(badRecords, updateLocations); } return retVal; } finally { if (!committed) { rollbackTransaction(); } } } private void writeMTableColumnStatistics(Table table, MTableColumnStatistics mStatsObj) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { String dbName = mStatsObj.getDbName(); String tableName = mStatsObj.getTableName(); String colName = mStatsObj.getColName(); LOG.info("Updating table level column statistics for db=" + dbName + " tableName=" + tableName + " colName=" + colName); validateTableCols(table, Lists.newArrayList(colName)); List<MTableColumnStatistics> oldStats = getMTableColumnStatistics(table, Lists.newArrayList(colName)); if (!oldStats.isEmpty()) { assert oldStats.size() == 1; StatObjectConverter.setFieldsIntoOldStats(mStatsObj, oldStats.get(0)); } else { pm.makePersistent(mStatsObj); } } private void writeMPartitionColumnStatistics(Table table, Partition partition, MPartitionColumnStatistics mStatsObj) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { String dbName = mStatsObj.getDbName(); String tableName = mStatsObj.getTableName(); String partName = mStatsObj.getPartitionName(); String colName = mStatsObj.getColName(); LOG.info("Updating partition level column statistics for db=" + dbName + " tableName=" + tableName + " partName=" + partName + " colName=" + colName); boolean foundCol = false; List<FieldSchema> colList = partition.getSd().getCols(); for (FieldSchema col : colList) { if (col.getName().equals(mStatsObj.getColName().trim())) { foundCol = true; break; } } if (!foundCol) { throw new NoSuchObjectException("Column " + colName + " for which stats gathering is requested doesn't exist."); } List<MPartitionColumnStatistics> oldStats = getMPartitionColumnStatistics( table, Lists.newArrayList(partName), Lists.newArrayList(colName)); if (!oldStats.isEmpty()) { assert oldStats.size() == 1; StatObjectConverter.setFieldsIntoOldStats(mStatsObj, oldStats.get(0)); } else { pm.makePersistent(mStatsObj); } } @Override public boolean updateTableColumnStatistics(ColumnStatistics colStats) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean committed = false; openTransaction(); try { List<ColumnStatisticsObj> statsObjs = colStats.getStatsObj(); ColumnStatisticsDesc statsDesc = colStats.getStatsDesc(); // DataNucleus objects get detached all over the place for no (real) reason. // So let's not use them anywhere unless absolutely necessary. Table table = ensureGetTable(statsDesc.getDbName(), statsDesc.getTableName()); for (ColumnStatisticsObj statsObj:statsObjs) { // We have to get mtable again because DataNucleus. MTableColumnStatistics mStatsObj = StatObjectConverter.convertToMTableColumnStatistics( ensureGetMTable(statsDesc.getDbName(), statsDesc.getTableName()), statsDesc, statsObj); writeMTableColumnStatistics(table, mStatsObj); } committed = commitTransaction(); return committed; } finally { if (!committed) { rollbackTransaction(); } } } @Override public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List<String> partVals) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean committed = false; try { openTransaction(); List<ColumnStatisticsObj> statsObjs = colStats.getStatsObj(); ColumnStatisticsDesc statsDesc = colStats.getStatsDesc(); Table table = ensureGetTable(statsDesc.getDbName(), statsDesc.getTableName()); Partition partition = convertToPart(getMPartition( statsDesc.getDbName(), statsDesc.getTableName(), partVals)); for (ColumnStatisticsObj statsObj:statsObjs) { // We have to get partition again because DataNucleus MPartition mPartition = getMPartition( statsDesc.getDbName(), statsDesc.getTableName(), partVals); if (partition == null) { throw new NoSuchObjectException("Partition for which stats is gathered doesn't exist."); } MPartitionColumnStatistics mStatsObj = StatObjectConverter.convertToMPartitionColumnStatistics(mPartition, statsDesc, statsObj); writeMPartitionColumnStatistics(table, partition, mStatsObj); } committed = commitTransaction(); return committed; } finally { if (!committed) { rollbackTransaction(); } } } private List<MTableColumnStatistics> getMTableColumnStatistics( Table table, List<String> colNames) throws MetaException { boolean committed = false; openTransaction(); try { List<MTableColumnStatistics> result = null; validateTableCols(table, colNames); Query query = pm.newQuery(MTableColumnStatistics.class); String filter = "tableName == t1 && dbName == t2 && ("; String paramStr = "java.lang.String t1, java.lang.String t2"; Object[] params = new Object[colNames.size() + 2]; params[0] = table.getTableName(); params[1] = table.getDbName(); for (int i = 0; i < colNames.size(); ++i) { filter += ((i == 0) ? "" : " || ") + "colName == c" + i; paramStr += ", java.lang.String c" + i; params[i + 2] = colNames.get(i); } filter += ")"; query.setFilter(filter); query.declareParameters(paramStr); result = (List<MTableColumnStatistics>) query.executeWithArray(params); pm.retrieveAll(result); if (result.size() > colNames.size()) { throw new MetaException( "Unexpected " + result.size() + " statistics for " + colNames.size() + " columns"); } committed = commitTransaction(); return result; } catch (Exception ex) { LOG.error("Error retrieving statistics via jdo", ex); if (ex instanceof MetaException) { throw (MetaException)ex; } throw new MetaException(ex.getMessage()); } finally { if (!committed) { rollbackTransaction(); return Lists.newArrayList(); } } } private void validateTableCols(Table table, List<String> colNames) throws MetaException { List<FieldSchema> colList = table.getSd().getCols(); for (String colName : colNames) { boolean foundCol = false; for (FieldSchema mCol : colList) { if (mCol.getName().equals(colName.trim())) { foundCol = true; break; } } if (!foundCol) { throw new MetaException("Column " + colName + " doesn't exist."); } } } @Override public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, List<String> colNames) throws MetaException, NoSuchObjectException { return getTableColumnStatisticsInternal(dbName, tableName, colNames, true, true); } protected ColumnStatistics getTableColumnStatisticsInternal( String dbName, String tableName, final List<String> colNames, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { return new GetStatHelper(HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tableName), allowSql, allowJdo) { @Override protected ColumnStatistics getSqlResult(GetHelper<ColumnStatistics> ctx) throws MetaException { return directSql.getTableStats(dbName, tblName, colNames); } @Override protected ColumnStatistics getJdoResult( GetHelper<ColumnStatistics> ctx) throws MetaException, NoSuchObjectException { List<MTableColumnStatistics> mStats = getMTableColumnStatistics(getTable(), colNames); if (mStats.isEmpty()) return null; // LastAnalyzed is stored per column, but thrift object has it per multiple columns. // Luckily, nobody actually uses it, so we will set to lowest value of all columns for now. ColumnStatisticsDesc desc = StatObjectConverter.getTableColumnStatisticsDesc(mStats.get(0)); List<ColumnStatisticsObj> statObjs = new ArrayList<ColumnStatisticsObj>(mStats.size()); for (MTableColumnStatistics mStat : mStats) { if (desc.getLastAnalyzed() > mStat.getLastAnalyzed()) { desc.setLastAnalyzed(mStat.getLastAnalyzed()); } statObjs.add(StatObjectConverter.getTableColumnStatisticsObj(mStat)); Deadline.checkTimeout(); } return new ColumnStatistics(desc, statObjs); } }.run(true); } @Override public List<ColumnStatistics> getPartitionColumnStatistics(String dbName, String tableName, List<String> partNames, List<String> colNames) throws MetaException, NoSuchObjectException { return getPartitionColumnStatisticsInternal( dbName, tableName, partNames, colNames, true, true); } protected List<ColumnStatistics> getPartitionColumnStatisticsInternal( String dbName, String tableName, final List<String> partNames, final List<String> colNames, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { return new GetListHelper<ColumnStatistics>(dbName, tableName, allowSql, allowJdo) { @Override protected List<ColumnStatistics> getSqlResult( GetHelper<List<ColumnStatistics>> ctx) throws MetaException { return directSql.getPartitionStats(dbName, tblName, partNames, colNames); } @Override protected List<ColumnStatistics> getJdoResult( GetHelper<List<ColumnStatistics>> ctx) throws MetaException, NoSuchObjectException { List<MPartitionColumnStatistics> mStats = getMPartitionColumnStatistics(getTable(), partNames, colNames); List<ColumnStatistics> result = new ArrayList<ColumnStatistics>( Math.min(mStats.size(), partNames.size())); String lastPartName = null; List<ColumnStatisticsObj> curList = null; ColumnStatisticsDesc csd = null; for (int i = 0; i <= mStats.size(); ++i) { boolean isLast = i == mStats.size(); MPartitionColumnStatistics mStatsObj = isLast ? null : mStats.get(i); String partName = isLast ? null : (String)mStatsObj.getPartitionName(); if (isLast || !partName.equals(lastPartName)) { if (i != 0) { result.add(new ColumnStatistics(csd, curList)); } if (isLast) { continue; } csd = StatObjectConverter.getPartitionColumnStatisticsDesc(mStatsObj); curList = new ArrayList<ColumnStatisticsObj>(colNames.size()); } curList.add(StatObjectConverter.getPartitionColumnStatisticsObj(mStatsObj)); lastPartName = partName; Deadline.checkTimeout(); } return result; } }.run(true); } @Override public AggrStats get_aggr_stats_for(String dbName, String tblName, final List<String> partNames, final List<String> colNames) throws MetaException, NoSuchObjectException { final boolean useDensityFunctionForNDVEstimation = HiveConf.getBoolVar(getConf(), HiveConf.ConfVars.HIVE_METASTORE_STATS_NDV_DENSITY_FUNCTION); return new GetHelper<AggrStats>(dbName, tblName, true, false) { @Override protected AggrStats getSqlResult(GetHelper<AggrStats> ctx) throws MetaException { return directSql.aggrColStatsForPartitions(dbName, tblName, partNames, colNames, useDensityFunctionForNDVEstimation); } @Override protected AggrStats getJdoResult(GetHelper<AggrStats> ctx) throws MetaException, NoSuchObjectException { // This is fast path for query optimizations, if we can find this info // quickly using // directSql, do it. No point in failing back to slow path here. throw new MetaException("Jdo path is not implemented for stats aggr."); } @Override protected String describeResult() { return null; } }.run(true); } private List<MPartitionColumnStatistics> getMPartitionColumnStatistics( Table table, List<String> partNames, List<String> colNames) throws NoSuchObjectException, MetaException { boolean committed = false; MPartitionColumnStatistics mStatsObj = null; try { openTransaction(); // We are not going to verify SD for each partition. Just verify for the table. validateTableCols(table, colNames); boolean foundCol = false; Query query = pm.newQuery(MPartitionColumnStatistics.class); String paramStr = "java.lang.String t1, java.lang.String t2"; String filter = "tableName == t1 && dbName == t2 && ("; Object[] params = new Object[colNames.size() + partNames.size() + 2]; int i = 0; params[i++] = table.getTableName(); params[i++] = table.getDbName(); int firstI = i; for (String s : partNames) { filter += ((i == firstI) ? "" : " || ") + "partitionName == p" + i; paramStr += ", java.lang.String p" + i; params[i++] = s; } filter += ") && ("; firstI = i; for (String s : colNames) { filter += ((i == firstI) ? "" : " || ") + "colName == c" + i; paramStr += ", java.lang.String c" + i; params[i++] = s; } filter += ")"; query.setFilter(filter); query.declareParameters(paramStr); query.setOrdering("partitionName ascending"); @SuppressWarnings("unchecked") List<MPartitionColumnStatistics> result = (List<MPartitionColumnStatistics>) query.executeWithArray(params); pm.retrieveAll(result); committed = commitTransaction(); return result; } catch (Exception ex) { LOG.error("Error retrieving statistics via jdo", ex); if (ex instanceof MetaException) { throw (MetaException)ex; } throw new MetaException(ex.getMessage()); } finally { if (!committed) { rollbackTransaction(); return Lists.newArrayList(); } } } private void dropPartitionColumnStatisticsNoTxn( String dbName, String tableName, List<String> partNames) throws MetaException { ObjectPair<Query, Object[]> queryWithParams = makeQueryByPartitionNames( dbName, tableName, partNames, MPartitionColumnStatistics.class, "tableName", "dbName", "partition.partitionName"); queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond()); } @Override public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName, List<String> partVals, String colName) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean ret = false; if (dbName == null) { dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; } if (tableName == null) { throw new InvalidInputException("Table name is null."); } try { openTransaction(); MTable mTable = getMTable(dbName, tableName); MPartitionColumnStatistics mStatsObj; List<MPartitionColumnStatistics> mStatsObjColl; if (mTable == null) { throw new NoSuchObjectException("Table " + tableName + " for which stats deletion is requested doesn't exist"); } MPartition mPartition = getMPartition(dbName, tableName, partVals); if (mPartition == null) { throw new NoSuchObjectException("Partition " + partName + " for which stats deletion is requested doesn't exist"); } Query query = pm.newQuery(MPartitionColumnStatistics.class); String filter; String parameters; if (colName != null) { filter = "partition.partitionName == t1 && dbName == t2 && tableName == t3 && " + "colName == t4"; parameters = "java.lang.String t1, java.lang.String t2, " + "java.lang.String t3, java.lang.String t4"; } else { filter = "partition.partitionName == t1 && dbName == t2 && tableName == t3"; parameters = "java.lang.String t1, java.lang.String t2, java.lang.String t3"; } query.setFilter(filter); query .declareParameters(parameters); if (colName != null) { query.setUnique(true); mStatsObj = (MPartitionColumnStatistics)query.executeWithArray(partName.trim(), HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tableName), HiveStringUtils.normalizeIdentifier(colName)); pm.retrieve(mStatsObj); if (mStatsObj != null) { pm.deletePersistent(mStatsObj); } else { throw new NoSuchObjectException("Column stats doesn't exist for db=" +dbName + " table=" + tableName + " partition=" + partName + " col=" + colName); } } else { mStatsObjColl= (List<MPartitionColumnStatistics>)query.execute(partName.trim(), HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tableName)); pm.retrieveAll(mStatsObjColl); if (mStatsObjColl != null) { pm.deletePersistentAll(mStatsObjColl); } else { throw new NoSuchObjectException("Column stats doesn't exist for db=" + dbName + " table=" + tableName + " partition" + partName); } } ret = commitTransaction(); } catch(NoSuchObjectException e) { rollbackTransaction(); throw e; } finally { if (!ret) { rollbackTransaction(); } } return ret; } @Override public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean ret = false; if (dbName == null) { dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; } if (tableName == null) { throw new InvalidInputException("Table name is null."); } try { openTransaction(); MTable mTable = getMTable(dbName, tableName); MTableColumnStatistics mStatsObj; List<MTableColumnStatistics> mStatsObjColl; if (mTable == null) { throw new NoSuchObjectException("Table " + tableName + " for which stats deletion is requested doesn't exist"); } Query query = pm.newQuery(MTableColumnStatistics.class); String filter; String parameters; if (colName != null) { filter = "table.tableName == t1 && dbName == t2 && colName == t3"; parameters = "java.lang.String t1, java.lang.String t2, java.lang.String t3"; } else { filter = "table.tableName == t1 && dbName == t2"; parameters = "java.lang.String t1, java.lang.String t2"; } query.setFilter(filter); query .declareParameters(parameters); if (colName != null) { query.setUnique(true); mStatsObj = (MTableColumnStatistics)query.execute( HiveStringUtils.normalizeIdentifier(tableName), HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(colName)); pm.retrieve(mStatsObj); if (mStatsObj != null) { pm.deletePersistent(mStatsObj); } else { throw new NoSuchObjectException("Column stats doesn't exist for db=" +dbName + " table=" + tableName + " col=" + colName); } } else { mStatsObjColl= (List<MTableColumnStatistics>)query.execute( HiveStringUtils.normalizeIdentifier(tableName), HiveStringUtils.normalizeIdentifier(dbName)); pm.retrieveAll(mStatsObjColl); if (mStatsObjColl != null) { pm.deletePersistentAll(mStatsObjColl); } else { throw new NoSuchObjectException("Column stats doesn't exist for db=" + dbName + " table=" + tableName); } } ret = commitTransaction(); } catch(NoSuchObjectException e) { rollbackTransaction(); throw e; } finally { if (!ret) { rollbackTransaction(); } } return ret; } @Override public long cleanupEvents() { boolean commited = false; long delCnt; LOG.debug("Begin executing cleanupEvents"); Long expiryTime = HiveConf.getTimeVar(getConf(), ConfVars.METASTORE_EVENT_EXPIRY_DURATION, TimeUnit.MILLISECONDS); Long curTime = System.currentTimeMillis(); try { openTransaction(); Query query = pm.newQuery(MPartitionEvent.class,"curTime - eventTime > expiryTime"); query.declareParameters("java.lang.Long curTime, java.lang.Long expiryTime"); delCnt = query.deletePersistentAll(curTime, expiryTime); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } LOG.debug("Done executing cleanupEvents"); } return delCnt; } private MDelegationToken getTokenFrom(String tokenId) { Query query = pm.newQuery(MDelegationToken.class, "tokenIdentifier == tokenId"); query.declareParameters("java.lang.String tokenId"); query.setUnique(true); return (MDelegationToken)query.execute(tokenId); } @Override public boolean addToken(String tokenId, String delegationToken) { LOG.debug("Begin executing addToken"); boolean committed = false; MDelegationToken token; try{ openTransaction(); token = getTokenFrom(tokenId); if (token == null) { // add Token, only if it already doesn't exist pm.makePersistent(new MDelegationToken(tokenId, delegationToken)); } committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing addToken with status : " + committed); return committed && (token == null); } @Override public boolean removeToken(String tokenId) { LOG.debug("Begin executing removeToken"); boolean committed = false; MDelegationToken token; try{ openTransaction(); token = getTokenFrom(tokenId); if (null != token) { pm.deletePersistent(token); } committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing removeToken with status : " + committed); return committed && (token != null); } @Override public String getToken(String tokenId) { LOG.debug("Begin executing getToken"); boolean committed = false; MDelegationToken token; try{ openTransaction(); token = getTokenFrom(tokenId); if (null != token) { pm.retrieve(token); } committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing getToken with status : " + committed); return (null == token) ? null : token.getTokenStr(); } @Override public List<String> getAllTokenIdentifiers() { LOG.debug("Begin executing getAllTokenIdentifiers"); boolean committed = false; List<MDelegationToken> tokens; try{ openTransaction(); Query query = pm.newQuery(MDelegationToken.class); tokens = (List<MDelegationToken>) query.execute(); pm.retrieveAll(tokens); committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing getAllTokenIdentifers with status : " + committed); List<String> tokenIdents = new ArrayList<String>(tokens.size()); for (MDelegationToken token : tokens) { tokenIdents.add(token.getTokenIdentifier()); } return tokenIdents; } @Override public int addMasterKey(String key) throws MetaException{ LOG.debug("Begin executing addMasterKey"); boolean committed = false; MMasterKey masterKey = new MMasterKey(key); try{ openTransaction(); pm.makePersistent(masterKey); committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing addMasterKey with status : " + committed); if (committed) { return ((IntIdentity)pm.getObjectId(masterKey)).getKey(); } else { throw new MetaException("Failed to add master key."); } } @Override public void updateMasterKey(Integer id, String key) throws NoSuchObjectException, MetaException { LOG.debug("Begin executing updateMasterKey"); boolean committed = false; MMasterKey masterKey; try{ openTransaction(); Query query = pm.newQuery(MMasterKey.class, "keyId == id"); query.declareParameters("java.lang.Integer id"); query.setUnique(true); masterKey = (MMasterKey)query.execute(id); if (null != masterKey) { masterKey.setMasterKey(key); } committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing updateMasterKey with status : " + committed); if (null == masterKey) { throw new NoSuchObjectException("No key found with keyId: " + id); } if (!committed) { throw new MetaException("Though key is found, failed to update it. " + id); } } @Override public boolean removeMasterKey(Integer id) { LOG.debug("Begin executing removeMasterKey"); boolean success = false; MMasterKey masterKey; try{ openTransaction(); Query query = pm.newQuery(MMasterKey.class, "keyId == id"); query.declareParameters("java.lang.Integer id"); query.setUnique(true); masterKey = (MMasterKey)query.execute(id); if (null != masterKey) { pm.deletePersistent(masterKey); } success = commitTransaction(); } finally { if(!success) { rollbackTransaction(); } } LOG.debug("Done executing removeMasterKey with status : " + success); return (null != masterKey) && success; } @Override public String[] getMasterKeys() { LOG.debug("Begin executing getMasterKeys"); boolean committed = false; List<MMasterKey> keys; try{ openTransaction(); Query query = pm.newQuery(MMasterKey.class); keys = (List<MMasterKey>) query.execute(); pm.retrieveAll(keys); committed = commitTransaction(); } finally { if(!committed) { rollbackTransaction(); } } LOG.debug("Done executing getMasterKeys with status : " + committed); String[] masterKeys = new String[keys.size()]; for (int i = 0; i < keys.size(); i++) { masterKeys[i] = keys.get(i).getMasterKey(); } return masterKeys; } // compare hive version and metastore version @Override public void verifySchema() throws MetaException { // If the schema version is already checked, then go ahead and use this metastore if (isSchemaVerified.get()) { return; } checkSchema(); } public static void setSchemaVerified(boolean val) { isSchemaVerified.set(val); } private synchronized void checkSchema() throws MetaException { // recheck if it got verified by another thread while we were waiting if (isSchemaVerified.get()) { return; } boolean strictValidation = HiveConf.getBoolVar(getConf(), HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION); // read the schema version stored in metastore db String schemaVer = getMetaStoreSchemaVersion(); if (schemaVer == null) { if (strictValidation) { throw new MetaException("Version information not found in metastore. "); } else { LOG.warn("Version information not found in metastore. " + HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString() + " is not enabled so recording the schema version " + MetaStoreSchemaInfo.getHiveSchemaVersion()); setMetaStoreSchemaVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), "Set by MetaStore " + USER + "@" + HOSTNAME); } } else { // metastore schema version is different than Hive distribution needs if (schemaVer.equalsIgnoreCase(MetaStoreSchemaInfo.getHiveSchemaVersion())) { LOG.debug("Found expected HMS version of " + schemaVer); } else { if (strictValidation) { throw new MetaException("Hive Schema version " + MetaStoreSchemaInfo.getHiveSchemaVersion() + " does not match metastore's schema version " + schemaVer + " Metastore is not upgraded or corrupt"); } else { LOG.error("Version information found in metastore differs " + schemaVer + " from expected schema version " + MetaStoreSchemaInfo.getHiveSchemaVersion() + ". Schema verififcation is disabled " + HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION + " so setting version."); setMetaStoreSchemaVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), "Set by MetaStore " + USER + "@" + HOSTNAME); } } } isSchemaVerified.set(true); return; } // load the schema version stored in metastore db @Override public String getMetaStoreSchemaVersion() throws MetaException { MVersionTable mSchemaVer; try { mSchemaVer = getMSchemaVersion(); } catch (NoSuchObjectException e) { return null; } return mSchemaVer.getSchemaVersion(); } @SuppressWarnings("unchecked") private MVersionTable getMSchemaVersion() throws NoSuchObjectException, MetaException { boolean committed = false; List<MVersionTable> mVerTables = new ArrayList<MVersionTable>(); try { openTransaction(); Query query = pm.newQuery(MVersionTable.class); try { mVerTables = (List<MVersionTable>)query.execute(); pm.retrieveAll(mVerTables); } catch (JDODataStoreException e) { if (e.getCause() instanceof MissingTableException) { throw new MetaException("Version table not found. " + "The metastore is not upgraded to " + MetaStoreSchemaInfo.getHiveSchemaVersion()); } else { throw e; } } committed = commitTransaction(); } finally { if (!committed) { rollbackTransaction(); } } if (mVerTables.isEmpty()) { throw new NoSuchObjectException("No matching version found"); } if (mVerTables.size() > 1) { String msg = "Metastore contains multiple versions (" + mVerTables.size() + ") "; for (MVersionTable version : mVerTables) { msg += "[ version = " + version.getSchemaVersion() + ", comment = " + version.getVersionComment() + " ] "; } throw new MetaException(msg.trim()); } return mVerTables.get(0); } @Override public void setMetaStoreSchemaVersion(String schemaVersion, String comment) throws MetaException { MVersionTable mSchemaVer; boolean commited = false; boolean recordVersion = HiveConf.getBoolVar(getConf(), HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION_RECORD_VERSION); if (!recordVersion) { LOG.warn("setMetaStoreSchemaVersion called but recording version is disabled: " + "version = " + schemaVersion + ", comment = " + comment); return; } try { mSchemaVer = getMSchemaVersion(); } catch (NoSuchObjectException e) { // if the version doesn't exist, then create it mSchemaVer = new MVersionTable(); } mSchemaVer.setSchemaVersion(schemaVersion); mSchemaVer.setVersionComment(comment); try { openTransaction(); pm.makePersistent(mSchemaVer); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } } @Override public boolean doesPartitionExist(String dbName, String tableName, List<String> partVals) throws MetaException { boolean success = false; try { openTransaction(); dbName = HiveStringUtils.normalizeIdentifier(dbName); tableName = HiveStringUtils.normalizeIdentifier(tableName); // TODO: this could also be passed from upper layer; or this method should filter the list. MTable mtbl = getMTable(dbName, tableName); if (mtbl == null) { success = commitTransaction(); return false; } Query query = pm.newQuery( "select partitionName from org.apache.hadoop.hive.metastore.model.MPartition " + "where table.tableName == t1 && table.database.name == t2 && partitionName == t3"); query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3"); query.setUnique(true); query.setResult("partitionName"); String name = Warehouse.makePartName( convertToFieldSchemas(mtbl.getPartitionKeys()), partVals); String result = (String)query.execute(tableName, dbName, name); success = commitTransaction(); return result != null; } finally { if (!success) { rollbackTransaction(); } } } private void debugLog(String message) { if (LOG.isDebugEnabled()) { LOG.debug(message + getCallStack()); } } private static final int stackLimit = 5; private String getCallStack() { StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); int thislimit = Math.min(stackLimit, stackTrace.length); StringBuilder sb = new StringBuilder(); sb.append(" at:"); for (int i = 4; i < thislimit; i++) { sb.append("\n\t"); sb.append(stackTrace[i].toString()); } return sb.toString(); } private Function convertToFunction(MFunction mfunc) { if (mfunc == null) { return null; } Function func = new Function(mfunc.getFunctionName(), mfunc.getDatabase().getName(), mfunc.getClassName(), mfunc.getOwnerName(), PrincipalType.valueOf(mfunc.getOwnerType()), mfunc.getCreateTime(), FunctionType.findByValue(mfunc.getFunctionType()), convertToResourceUriList(mfunc.getResourceUris())); return func; } private MFunction convertToMFunction(Function func) throws InvalidObjectException { if (func == null) { return null; } MDatabase mdb = null; try { mdb = getMDatabase(func.getDbName()); } catch (NoSuchObjectException e) { LOG.error(StringUtils.stringifyException(e)); throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist."); } MFunction mfunc = new MFunction(func.getFunctionName(), mdb, func.getClassName(), func.getOwnerName(), func.getOwnerType().name(), func.getCreateTime(), func.getFunctionType().getValue(), convertToMResourceUriList(func.getResourceUris())); return mfunc; } private List<ResourceUri> convertToResourceUriList(List<MResourceUri> mresourceUriList) { List<ResourceUri> resourceUriList = null; if (mresourceUriList != null) { resourceUriList = new ArrayList<ResourceUri>(mresourceUriList.size()); for (MResourceUri mres : mresourceUriList) { resourceUriList.add( new ResourceUri(ResourceType.findByValue(mres.getResourceType()), mres.getUri())); } } return resourceUriList; } private List<MResourceUri> convertToMResourceUriList(List<ResourceUri> resourceUriList) { List<MResourceUri> mresourceUriList = null; if (resourceUriList != null) { mresourceUriList = new ArrayList<MResourceUri>(resourceUriList.size()); for (ResourceUri res : resourceUriList) { mresourceUriList.add(new MResourceUri(res.getResourceType().getValue(), res.getUri())); } } return mresourceUriList; } @Override public void createFunction(Function func) throws InvalidObjectException, MetaException { boolean committed = false; try { openTransaction(); MFunction mfunc = convertToMFunction(func); pm.makePersistent(mfunc); committed = commitTransaction(); } finally { if (!committed) { rollbackTransaction(); } } } @Override public void alterFunction(String dbName, String funcName, Function newFunction) throws InvalidObjectException, MetaException { boolean success = false; try { openTransaction(); funcName = HiveStringUtils.normalizeIdentifier(funcName); dbName = HiveStringUtils.normalizeIdentifier(dbName); MFunction newf = convertToMFunction(newFunction); if (newf == null) { throw new InvalidObjectException("new function is invalid"); } MFunction oldf = getMFunction(dbName, funcName); if (oldf == null) { throw new MetaException("function " + funcName + " doesn't exist"); } // For now only alter name, owner, class name, type oldf.setFunctionName(HiveStringUtils.normalizeIdentifier(newf.getFunctionName())); oldf.setDatabase(newf.getDatabase()); oldf.setOwnerName(newf.getOwnerName()); oldf.setOwnerType(newf.getOwnerType()); oldf.setClassName(newf.getClassName()); oldf.setFunctionType(newf.getFunctionType()); // commit the changes success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } } @Override public void dropFunction(String dbName, String funcName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { boolean success = false; try { openTransaction(); MFunction mfunc = getMFunction(dbName, funcName); pm.retrieve(mfunc); if (mfunc != null) { // TODO: When function privileges are implemented, they should be deleted here. pm.deletePersistentAll(mfunc); } success = commitTransaction(); } finally { if (!success) { rollbackTransaction(); } } } private MFunction getMFunction(String db, String function) { MFunction mfunc = null; boolean commited = false; try { openTransaction(); db = HiveStringUtils.normalizeIdentifier(db); function = HiveStringUtils.normalizeIdentifier(function); Query query = pm.newQuery(MFunction.class, "functionName == function && database.name == db"); query.declareParameters("java.lang.String function, java.lang.String db"); query.setUnique(true); mfunc = (MFunction) query.execute(function, db); pm.retrieve(mfunc); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return mfunc; } @Override public Function getFunction(String dbName, String funcName) throws MetaException { boolean commited = false; Function func = null; try { openTransaction(); func = convertToFunction(getMFunction(dbName, funcName)); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return func; } @Override public List<String> getFunctions(String dbName, String pattern) throws MetaException { boolean commited = false; List<String> funcs = null; try { openTransaction(); dbName = HiveStringUtils.normalizeIdentifier(dbName); // Take the pattern and split it on the | to get all the composing // patterns String[] subpatterns = pattern.trim().split("\\|"); String query = "select functionName from org.apache.hadoop.hive.metastore.model.MFunction " + "where database.name == dbName && ("; boolean first = true; for (String subpattern : subpatterns) { subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); if (!first) { query = query + " || "; } query = query + " functionName.matches(\"" + subpattern + "\")"; first = false; } query = query + ")"; Query q = pm.newQuery(query); q.declareParameters("java.lang.String dbName"); q.setResult("functionName"); q.setOrdering("functionName ascending"); Collection names = (Collection) q.execute(dbName); funcs = new ArrayList<String>(); for (Iterator i = names.iterator(); i.hasNext();) { funcs.add((String) i.next()); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } return funcs; } @Override public NotificationEventResponse getNextNotification(NotificationEventRequest rqst) { boolean commited = false; try { openTransaction(); long lastEvent = rqst.getLastEvent(); Query query = pm.newQuery(MNotificationLog.class, "eventId > lastEvent"); query.declareParameters("java.lang.Long lastEvent"); query.setOrdering("eventId ascending"); Collection<MNotificationLog> events = (Collection)query.execute(lastEvent); commited = commitTransaction(); if (events == null) { return null; } Iterator<MNotificationLog> i = events.iterator(); NotificationEventResponse result = new NotificationEventResponse(); result.setEvents(new ArrayList<NotificationEvent>()); int maxEvents = rqst.getMaxEvents() > 0 ? rqst.getMaxEvents() : Integer.MAX_VALUE; int numEvents = 0; while (i.hasNext() && numEvents++ < maxEvents) { result.addToEvents(translateDbToThrift(i.next())); } return result; } finally { if (!commited) { rollbackTransaction(); return null; } } } @Override public void addNotificationEvent(NotificationEvent entry) { boolean commited = false; try { openTransaction(); Query query = pm.newQuery(MNotificationNextId.class); Collection<MNotificationNextId> ids = (Collection) query.execute(); MNotificationNextId id = null; boolean needToPersistId; if (ids == null || ids.size() == 0) { id = new MNotificationNextId(1L); needToPersistId = true; } else { id = ids.iterator().next(); needToPersistId = false; } entry.setEventId(id.getNextEventId()); id.incrementEventId(); if (needToPersistId) pm.makePersistent(id); pm.makePersistent(translateThriftToDb(entry)); commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } } @Override public void cleanNotificationEvents(int olderThan) { boolean commited = false; try { openTransaction(); long tmp = System.currentTimeMillis() / 1000 - olderThan; int tooOld = (tmp > Integer.MAX_VALUE) ? 0 : (int)tmp; Query query = pm.newQuery(MNotificationLog.class, "eventTime < tooOld"); query.declareParameters("java.lang.Integer tooOld"); Collection<MNotificationLog> toBeRemoved = (Collection)query.execute(tooOld); if (toBeRemoved != null && toBeRemoved.size() > 0) { pm.deletePersistent(toBeRemoved); } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } } @Override public CurrentNotificationEventId getCurrentNotificationEventId() { boolean commited = false; try { openTransaction(); Query query = pm.newQuery(MNotificationNextId.class); Collection<MNotificationNextId> ids = (Collection)query.execute(); long id = 0; if (ids != null && ids.size() > 0) { id = ids.iterator().next().getNextEventId() - 1; } commited = commitTransaction(); return new CurrentNotificationEventId(id); } finally { if (!commited) { rollbackTransaction(); } } } private MNotificationLog translateThriftToDb(NotificationEvent entry) { MNotificationLog dbEntry = new MNotificationLog(); dbEntry.setEventId(entry.getEventId()); dbEntry.setEventTime(entry.getEventTime()); dbEntry.setEventType(entry.getEventType()); dbEntry.setDbName(entry.getDbName()); dbEntry.setTableName(entry.getTableName()); dbEntry.setMessage(entry.getMessage()); return dbEntry; } private NotificationEvent translateDbToThrift(MNotificationLog dbEvent) { NotificationEvent event = new NotificationEvent(); event.setEventId(dbEvent.getEventId()); event.setEventTime(dbEvent.getEventTime()); event.setEventType(dbEvent.getEventType()); event.setDbName(dbEvent.getDbName()); event.setTableName(dbEvent.getTableName()); event.setMessage((dbEvent.getMessage())); return event; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-wafv2/src/main/java/com/amazonaws/services/wafv2/model/transform/TextTransformationMarshaller.java
2268
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.wafv2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.wafv2.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * TextTransformationMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class TextTransformationMarshaller { private static final MarshallingInfo<Integer> PRIORITY_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Priority").build(); private static final MarshallingInfo<String> TYPE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Type").build(); private static final TextTransformationMarshaller instance = new TextTransformationMarshaller(); public static TextTransformationMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(TextTransformation textTransformation, ProtocolMarshaller protocolMarshaller) { if (textTransformation == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(textTransformation.getPriority(), PRIORITY_BINDING); protocolMarshaller.marshall(textTransformation.getType(), TYPE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
byktol/jcr-qb
src/main/java/com/byktol/jcr/qb/criteria/impl/CriteriaImpl.java
5316
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.byktol.jcr.qb.criteria.impl; import java.util.List; import com.byktol.jcr.qb.criteria.Criteria; import com.byktol.jcr.qb.criteria.Criterion; import com.byktol.jcr.qb.criteria.Order; import com.byktol.jcr.qb.criteria.Prop; import com.google.common.base.Strings; /** * Abstract implementation of the {@link Criteria} interface. Defines common * methods to be used by child classes and convenience methods for building the * query string. * * @since 1.0 * @author Victor Alvarez */ public abstract class CriteriaImpl implements Criteria, Prop { private final List<Criterion> criterion; private final List<Order> orders; private String nodeName; private String nodeType; private String path; private long limit; private long offset; /** * The constructor is only accessible to the child classes and is meant to set * the specific implementation of {@link List} for both criterion and orders. * I'm not assuming a specific implementation here, but leave that to whoever * construct the child class. Both lists may or may not be empty. * * @param criterionList * The {@link List} implementation for filtering the results. * @param orderList * The {@link List} implementation for orders. */ protected CriteriaImpl( final List<Criterion> criterionList, final List<Order> orderList) { this.criterion = criterionList; this.orders = orderList; this.path = this.nodeType = ""; } /** * Adds a {@link Criterion} to the list used to refine the query. This method * may be called multiple times, and the restrictions would then be added to * the list. The child class is responsible for dealing with this. * * @param criterion * The restriction being added to the list * @return The current {@link Criteria} to enable chaining */ @Override public Criteria add(final Criterion criterion) { getCriterion().add(criterion); return this; } /** * Adds an {@link Order} to the list used to sort the results. This method may * be called multiple times, which adds complexity to the ordering clause so * pay careful attention to what you do. * * @param order * The order being added to the list * @return The current {@link Criteria} to enable chaining */ @Override public Criteria addOrder(final Order order) { getOrders().add(order); return this; } /** * Adds a path to the query which is used to narrow the search hierarchy in * the repository. This path should start with a slash (/) and not have a * trailing slash, e.g. /content/some/path * * @param path * The path to start the query on * @return The current {@link Criteria} to enable chaining */ @Override public Criteria setPath(final String path) { this.path = Strings.nullToEmpty(path); return this; } @Override public String getPath() { return path; } /** * @return the limit set for the query results */ @Override public long getLimit() { return limit; } /** * @param limit * the maximum number of query results * @return the current {@link Criteria} to enable chaining */ @Override public Criteria setLimit(final long limit) { this.limit = limit; return this; } /** * @return the offset */ @Override public long getOffset() { return offset; } /** * @param offset * the number of results to skip * @return the current {@link Criteria} to enable chaining */ @Override public Criteria setOffset(final long offset) { this.offset = offset; return this; } @Override public List<Criterion> getCriterion() { return criterion; } @Override public List<Order> getOrders() { return orders; } @Override public String getNodeType() { return nodeType; } /** * Sets the node type being queried, i.e. you may want to query only the nodes * whose node type is cq:PageContent or nt:base. Because the are several types * I haven't defined a list or hard coded one. * * @param nodeType * the node type used to narrow the results of possible nodes * @return the current {@link Criteria} to enable chaining */ @Override public Criteria setNodeType(final String nodeType) { this.nodeType = Strings.nullToEmpty(nodeType); return this; } /** * @return the nodeName */ public String getNodeName() { return nodeName; } /** * @param nodeName * The nodeName to set */ public Criteria setNodeName(final String nodeName) { this.nodeName = Strings.nullToEmpty(nodeName); return this; } }
apache-2.0
tractionsoftware/gwt-traction
src-demo/com/tractionsoftware/gwt/demo/opacity/client/OpacityDemo.java
2606
/* * Copyright 2010 Traction Software, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.tractionsoftware.gwt.demo.opacity.client; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.RootPanel; import com.google.gwt.user.client.ui.TextBox; import com.tractionsoftware.gwt.user.client.animation.OpacityAnimation; public class OpacityDemo implements EntryPoint { private TextBox startOpacity; private TextBox endOpacity; private TextBox duration; @Override public void onModuleLoad() { Panel controls = RootPanel.get("controls"); startOpacity = createTextBox("1.0"); endOpacity = createTextBox("0.1"); duration = createTextBox("5000"); addTextBox(controls, "Start Opacity", startOpacity); addTextBox(controls, "End Opacity", endOpacity); addTextBox(controls, "Duration", duration); Button start = new Button("Start"); start.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { OpacityAnimation animation = new OpacityAnimation(new Element[] { Document.get().getElementById("box1"), Document.get().getElementById("box2"), Document.get().getElementById("box3") }, Float.parseFloat(startOpacity.getText()), Float.parseFloat(endOpacity.getText())); animation.run(Integer.parseInt(duration.getText())); } }); controls.add(start); } private static final TextBox createTextBox(String text) { TextBox ret = new TextBox(); ret.setVisibleLength(10); ret.setText(text); return ret; } private static final void addTextBox(Panel panel, String label, TextBox box) { panel.add(new Label(label)); panel.add(box); } }
apache-2.0
UCLA-BD2K/aztec-text-analysis-tools
src/main/java/edu/ucla/cs/scai/aztec/summarization/RankedString.java
1493
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package edu.ucla.cs.scai.aztec.summarization; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; /** * * @author Giuseppe M. Mazzeo <mazzeo@cs.ucla.edu> */ public class RankedString implements Externalizable, Comparable<RankedString> { String string; double rank; public RankedString() { } public RankedString(String string, double rank) { this.string = string; this.rank = rank; } public String getString() { return string; } public void setString(String string) { this.string = string; } public double getRank() { return rank; } public void setRank(double rank) { this.rank = rank; } @Override public int compareTo(RankedString o) { return Double.compare(o.rank, rank); } @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(string); out.writeDouble(rank); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { string = (String) in.readObject(); rank = in.readDouble(); } @Override public String toString() { return string + " " + rank; } }
apache-2.0
treejames/AcFun-Area63
app/src/tv/acfun/a63/CommentsActivity.java
29918
/* * Copyright (C) 2013 YROM.NET * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tv.acfun.a63; import android.annotation.TargetApi; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.ClipData; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Configuration; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.ResultReceiver; import android.support.v4.view.MenuItemCompat; import android.support.v4.view.WindowCompat; import android.support.v7.app.ActionBar; import android.text.ClipboardManager; import android.text.Editable; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.style.ImageSpan; import android.util.Log; import android.util.SparseArray; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnTouchListener; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.view.animation.AnimationUtils; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemLongClickListener; import android.widget.BaseAdapter; import android.widget.EditText; import android.widget.GridView; import android.widget.ImageButton; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; import android.widget.Toast; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.android.volley.Cache; import com.android.volley.NetworkResponse; import com.android.volley.ParseError; import com.android.volley.Request; import com.android.volley.Response; import com.android.volley.Response.ErrorListener; import com.android.volley.Response.Listener; import com.android.volley.VolleyError; import com.android.volley.toolbox.HttpHeaderParser; import com.handmark.pulltorefresh.library.PullToRefreshBase; import com.handmark.pulltorefresh.library.PullToRefreshBase.OnLastItemVisibleListener; import com.handmark.pulltorefresh.library.PullToRefreshBase.OnRefreshListener; import com.handmark.pulltorefresh.library.PullToRefreshListView; import com.umeng.analytics.MobclickAgent; import org.apache.commons.httpclient.Cookie; import org.apache.commons.httpclient.HttpException; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; import tv.acfun.a63.adapter.CommentsAdapter; import tv.acfun.a63.api.ArticleApi; import tv.acfun.a63.api.entity.Comment; import tv.acfun.a63.api.entity.Comments; import tv.acfun.a63.api.entity.User; import tv.acfun.a63.base.BaseActivity; import tv.acfun.a63.util.ActionBarUtil; import tv.acfun.a63.util.ArrayUtil; import tv.acfun.a63.util.BaseAnimationListener; import tv.acfun.a63.util.Connectivity; import tv.acfun.a63.util.CustomUARequest; import tv.acfun.a63.util.MemberUtils; import tv.acfun.a63.util.TextViewUtils; import tv.acfun.a63.view.EmotionView; /** * @author Yrom * */ @SuppressWarnings("deprecation") public class CommentsActivity extends BaseActivity implements OnClickListener, Listener<Comments>, ErrorListener, OnItemClickListener, OnItemLongClickListener { private static final String TAG = "Comments"; private int aid; private InputMethodManager mKeyboard; private ListView mList; private ProgressBar mLoadingBar; private TextView mTimeOutText; private View mFootview; private CommentsAdapter mAdapter; private int pageIndex = 1; private boolean hasNextPage; private ImageButton mBtnSend; private EditText mCommentText; private View mBtnEmotion; private GridView mEmotionGrid; private boolean isBarShowing = true; private PullToRefreshListView mPtr; public static void start(Context context, int aid) { Intent intent = new Intent(context, CommentsActivity.class); intent.putExtra("aid", aid); context.startActivity(intent); } @Override protected void onCreate(Bundle savedInstanceState) { supportRequestWindowFeature(WindowCompat.FEATURE_ACTION_BAR_OVERLAY); super.onCreate(savedInstanceState); aid = getIntent().getIntExtra("aid", 0); if (aid == 0) return; setContentView(R.layout.activity_comments); MobclickAgent.onEvent(this, "view_comment"); ActionBar ab = getSupportActionBar(); ab.setBackgroundDrawable(getResources().getDrawable(R.drawable.ab_bg_trans)); mKeyboard = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); ActionBarUtil.setXiaomiFilterDisplayOptions(ab, false); ab.setTitle("ac" + aid + " / 评论"); initCommentsBar(); initList(); requestData(1, true); } private void initList() { mPtr = (PullToRefreshListView) findViewById(R.id.list); mPtr.setOnRefreshListener(new OnRefreshListener<ListView>() { @Override public void onRefresh(PullToRefreshBase<ListView> refreshView) { if(!isloading){ pageIndex = 1; requestData(pageIndex, true); } } }); mPtr.setOnLastItemVisibleListener(new OnLastItemVisibleListener() { @Override public void onLastItemVisible() { if(hasNextPage){ if(!isloading){ requestData(++pageIndex,false); } } else{ mFootview.findViewById(R.id.list_footview_progress).setVisibility(View.GONE); ((TextView)mFootview.findViewById(R.id.list_footview_text)).setText(R.string.no_more); } } }); mList = mPtr.getRefreshableView(); mLoadingBar = (ProgressBar) findViewById(R.id.time_progress); mTimeOutText = (TextView) findViewById(R.id.time_out_text); mTimeOutText.setOnClickListener(this); mList.setHeaderDividersEnabled(false); mFootview = LayoutInflater.from(this).inflate(R.layout.list_footerview, mList, false); mList.setVisibility(View.INVISIBLE); mList.addFooterView(mFootview); mList.setFooterDividersEnabled(false); mList.setOnItemClickListener(this); mList.setOnItemLongClickListener(this); mList.setOnTouchListener(new OnTouchListener() { private int mMotionY; public boolean onTouch(View v, MotionEvent event) { if(isInputShow) return false; int y = (int) event.getY(); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: mMotionY = y; break; case MotionEvent.ACTION_MOVE: int delta = y - mMotionY; if (Math.abs(delta) < 100) break; if (delta > 0) { showBar(); } else { hideBar(); } mMotionY = y; break; } return false; } }); mAdapter = new CommentsAdapter(this, data, commentIdList); mList.setAdapter(mAdapter); } private boolean isInputShow; private View mCommentBar; private ResultReceiver mIMResultRecevier = new ResultReceiver(null){ @Override protected void onReceiveResult(int resultCode, Bundle resultData) { isInputShow = (resultCode == InputMethodManager.RESULT_SHOWN || resultCode == InputMethodManager.RESULT_UNCHANGED_SHOWN); } }; private void initCommentsBar() { mCommentBar = findViewById(R.id.comments_bar); if(ActionBarUtil.hasSB() && getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE){ RelativeLayout.LayoutParams params = (LayoutParams) mCommentBar.getLayoutParams(); params.bottomMargin = getResources().getDimensionPixelSize(R.dimen.abc_action_bar_default_height); params.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); mCommentBar.setLayoutParams(params); } mBtnSend = (ImageButton) findViewById(R.id.comments_send_btn); mCommentText = (EditText) findViewById(R.id.comments_edit); mCommentText.setOnFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if (hasFocus) { mKeyboard.showSoftInput(v, InputMethodManager.SHOW_FORCED, mIMResultRecevier); } else { mKeyboard.hideSoftInputFromWindow(v.getWindowToken(), 0, mIMResultRecevier); } } }); mBtnEmotion = findViewById(R.id.comments_emotion_btn); mEmotionGrid = (GridView) findViewById(R.id.emotions); mBtnSend.setOnClickListener(this); mBtnEmotion.setOnClickListener(this); mEmotionGrid.setAdapter(mEmotionAdapter); mEmotionGrid.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { int index = mCommentText.getSelectionEnd(); Editable text = mCommentText.getText(); String emotion = parent.getItemAtPosition(position).toString(); text.insert(index, emotion); EmotionView v = (EmotionView) parent.getAdapter().getView(position, null, null); Drawable drawable = TextViewUtils.convertViewToDrawable(v); drawable.setBounds(0, 0, drawable.getIntrinsicWidth() / 2, drawable.getIntrinsicHeight() / 2); text.setSpan(new ImageSpan(drawable), index, index + emotion.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } }); } public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); if(ActionBarUtil.hasSB() ){ RelativeLayout.LayoutParams params = (LayoutParams) mCommentBar.getLayoutParams(); params.bottomMargin = (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) ? 0 : getResources().getDimensionPixelSize(R.dimen.abc_action_bar_default_height); params.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); mCommentBar.setLayoutParams(params); } } ListAdapter mEmotionAdapter = new BaseAdapter() { @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = new EmotionView(getApplicationContext()); } ((EmotionView) convertView).setEmotionId(position + 1); return convertView; } @Override public long getItemId(int position) { return position; } @Override public String getItem(int position) { String cat = position >= 54 ? "ais" : "ac"; int id = position >= 54 ? position - 53 : position + 1; return String.format(Locale.US, "[emot=%s,%02d/]", cat, id); } @Override public int getCount() { return 94; } }; private void requestData(int page, boolean requestNewData) { isloading = true; Request<?> request = new CommentsRequest(getApplicationContext(), aid, page, this, this); request.setTag(TAG); request.setShouldCache(true); if (requestNewData) { mTimeOutText.setVisibility(View.GONE); if(mAdapter == null || mAdapter.isEmpty()) mLoadingBar.setVisibility(View.VISIBLE); AcApp.getGloableQueue().getCache().invalidate(request.getCacheKey(), true); } AcApp.addRequest(request); } AnimationListener mHideListener = new BaseAnimationListener() { @TargetApi(Build.VERSION_CODES.HONEYCOMB) public void onAnimationEnd(Animation animation) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) mCommentBar.setTranslationY(mCommentBar.getHeight()); mCommentBar.setVisibility(View.GONE); } }; AnimationListener mShowListener = new BaseAnimationListener() { @TargetApi(Build.VERSION_CODES.HONEYCOMB) public void onAnimationStart(Animation animation) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) mCommentBar.setTranslationY(0); mCommentBar.setVisibility(View.VISIBLE); } }; Animation mAnim; void hideBar() { if (!isBarShowing) return; isBarShowing = false; getSupportActionBar().hide(); if (mAnim != null) mAnim.cancel(); Animation anim = AnimationUtils.loadAnimation(this, R.anim.slide_out); anim.setAnimationListener(mHideListener); mAnim = anim; mCommentBar.startAnimation(mAnim); } void showBar() { if (isBarShowing) return; isBarShowing = true; getSupportActionBar().show(); if (mAnim != null) mAnim.cancel(); Animation anim = AnimationUtils.loadAnimation(this, R.anim.slide_in); anim.setAnimationListener(mShowListener); mAnim = anim; mCommentBar.startAnimation(mAnim); } static class CommentsRequest extends CustomUARequest<Comments> { public CommentsRequest(Context context, int aid, int page, Listener<Comments> listener, ErrorListener errListener) { super(ArticleApi.getCommentUrl(context, aid, page), Comments.class, listener, errListener); } @Override protected Response<Comments> parseNetworkResponse(NetworkResponse response) { try { String json = new String(response.data, HttpHeaderParser.parseCharset(response.headers)); JSONObject parseObject = JSON.parseObject(json); Comments comments = parseObject.getObject("data", Comments.class); JSONObject commentContentArr = parseObject.getJSONObject("data").getJSONObject("commentContentArr"); comments.commentArr = parseContentAttr(commentContentArr); return Response.success(comments, cache(response)); } catch (Exception e) { Log.e(TAG, "parse article error", e); return Response.error(new ParseError(e)); } } private SparseArray<Comment> parseContentAttr(JSONObject commentContentArr) { SparseArray<Comment> attr = new SparseArray<Comment>(); for (Iterator<String> iterator = commentContentArr.keySet().iterator(); iterator .hasNext();) { String key = iterator.next(); JSONObject content = commentContentArr.getJSONObject(key); Comment comment = JSON.toJavaObject(content, Comment.class); attr.put(comment.cid, comment); } return attr; } private Cache.Entry cache(NetworkResponse response){ return Connectivity.newCache(response,60); } } SparseArray<Comment> data = new SparseArray<>(); List<Integer> commentIdList = new ArrayList<>(); private boolean isloading; private boolean isreload; private Quote mQuoteSpan; private ImageSpan mQuoteImage; private User mUser; private AlertDialog sizeChooser; @Override public void onClick(View v) { switch (v.getId()) { case R.id.time_out_text: pageIndex = 1; requestData(pageIndex, true); break; case R.id.comments_send_btn: mKeyboard.hideSoftInputFromWindow(mCommentText.getWindowToken(), 0, mIMResultRecevier); postComment(); break; case R.id.comments_emotion_btn: if (isInputShow) { mKeyboard.hideSoftInputFromWindow(mCommentText.getWindowToken(), 0, mIMResultRecevier); if (mEmotionGrid.getVisibility() != View.VISIBLE) mEmotionGrid.postDelayed(new Runnable() { @Override public void run() { mEmotionGrid.setVisibility(View.VISIBLE); } }, 20); } else { mEmotionGrid.setVisibility(mEmotionGrid.getVisibility() == View.VISIBLE ? View.GONE : View.VISIBLE); } break; } } private void postComment() { if (!validate()) { return; } mEmotionGrid.setVisibility(View.GONE); MobclickAgent.onEvent(this, "post_comment"); int count = getQuoteCount(); String comment = getComment(); Comment quote = data == null ? null : data.get(findCid(count)); new CommentPostTask(comment, quote).execute(); } class CommentPostTask extends AsyncTask<Void, Void, Boolean> { protected void onPreExecute() { mBtnSend.setEnabled(false); dialog = ProgressDialog.show(CommentsActivity.this, null, getString(R.string.posting_comment), true, false); } String comment; Comment quote; ProgressDialog dialog; public CommentPostTask(String comment, Comment quote) { this.comment = comment; this.quote = quote; } @Override protected Boolean doInBackground(Void... params) { Cookie[] cookies = JSON.parseObject(mUser.cookies, Cookie[].class); for (int i = 0; i < 3; i++) try { if (MemberUtils.postComments(comment, quote, aid, ArticleApi.getDomainRoot(getApplicationContext()), cookies)) return true; } catch (HttpException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return false; } @Override protected void onPostExecute(Boolean result) { dialog.dismiss(); mBtnSend.setEnabled(true); mCommentText.setText(""); if (result) { Toast.makeText(getApplicationContext(), getString(R.string.comment_success), Toast.LENGTH_SHORT).show(); pageIndex = 1; requestData(pageIndex, true); } else { Toast.makeText(getApplicationContext(), getString(R.string.comment_failed), Toast.LENGTH_SHORT).show(); } } } int findCid(int floorCount) { for (int i = 0; i < commentIdList.size(); i++) { int key = commentIdList.get(i); Comment c = data.get(key); if (c.count == floorCount) return c.cid; } return 0; } @Override public void onErrorResponse(VolleyError error) { Log.w(TAG, Log.getStackTraceString(error)); if (pageIndex > 1) { isreload = true; mFootview.findViewById(R.id.list_footview_progress).setVisibility(View.GONE); TextView textview = (TextView) mFootview.findViewById(R.id.list_footview_text); textview.setText(R.string.reloading); } else { mLoadingBar.setVisibility(View.GONE); mTimeOutText.setVisibility(View.VISIBLE); mList.setVisibility(View.GONE); } } @Override public void onResponse(Comments response) { isloading = false; mPtr.onRefreshComplete(); if (response.totalCount == 0) { mLoadingBar.setVisibility(View.GONE); mTimeOutText.setVisibility(View.VISIBLE); mList.setVisibility(View.GONE); Drawable drawable = getResources().getDrawable(R.drawable.ac_16); drawable.setBounds(0, 0, drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight()); mTimeOutText.setCompoundDrawables(drawable, null, null, null); mTimeOutText.setText(R.string.no_comment_yet); return; } if (response.page == 1) { if (mAdapter != null) mAdapter.notifyDataSetInvalidated(); data.clear(); commentIdList.clear(); mLoadingBar.setVisibility(View.GONE); mList.setVisibility(View.VISIBLE); } ArrayUtil.putAll(response.commentArr, data); commentIdList.addAll(ArrayUtil.asList(response.commentList)); hasNextPage = response.nextPage > response.page; if (data != null && data.size() > 0) { mAdapter.setData(data, commentIdList); mAdapter.notifyDataSetChanged(); isreload = false; } } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { int count = mAdapter.getCount(); if(position>count){ if (isreload) { mFootview.findViewById(R.id.list_footview_progress).setVisibility(View.VISIBLE); TextView textview = (TextView) mFootview.findViewById(R.id.list_footview_text); textview.setText(R.string.loading); requestData(pageIndex, false); } return; } showBar(); // show input bar when selected comment Object o = parent.getItemAtPosition(position); if(o == null || !(o instanceof Comment)) return; Comment c = (Comment)o; int quoteCount = getQuoteCount(); removeQuote(mCommentText.getText()); if (quoteCount == c.count) return; // 取消引用 String pre = "引用:#" + c.count; mQuoteSpan = new Quote(c.count); /** * @see http * ://www.kpbird.com/2013/02/android-chips-edittext-token-edittext * .html */ SpannableStringBuilder sb = SpannableStringBuilder.valueOf(mCommentText.getText()); TextView tv = TextViewUtils.createBubbleTextView(this, pre); BitmapDrawable bd = (BitmapDrawable) TextViewUtils.convertViewToDrawable(tv); bd.setBounds(0, 0, bd.getIntrinsicWidth(), bd.getIntrinsicHeight()); sb.insert(0, pre); mQuoteImage = new ImageSpan(bd); sb.setSpan(mQuoteImage, 0, pre.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); sb.setSpan(mQuoteSpan, 0, pre.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); sb.append(""); mCommentText.setText(sb); mCommentText.setSelection(mCommentText.getText().length()); } boolean validate() { mUser = AcApp.getUser(); if (mUser == null) { Toast.makeText(this, getString(R.string.sign_in_first), Toast.LENGTH_SHORT).show(); startActivity(SigninActivity.createIntent(this)); return false; } Editable text = mCommentText.getText(); int len = text.length() - getQuoteSpanLength(text); if (len == 0) { Toast.makeText(this, getString(R.string.no_comment), Toast.LENGTH_SHORT).show(); return false; } if (len <= 5) { Toast.makeText(this, getString(R.string.comment_not_enough), Toast.LENGTH_SHORT).show(); return false; } return true; } int getQuoteSpanLength(Editable text) { Quote quote = TextViewUtils.getLast(text, Quote.class); int start = text.getSpanStart(quote); int end = text.getSpanEnd(quote); if (start >= 0) { return end - start; } return 0; } void removeQuote(Editable text) { Quote quote = TextViewUtils.getLast(text, Quote.class); int start = text.getSpanStart(quote); int end = text.getSpanEnd(quote); // Log.d(TAG, String.format("start=%d, end=%d", start, end)); if (start >= 0) { // Log.d(TAG, text.subSequence(start, end).toString()); text.delete(start, end); } } String getComment() { Editable text = SpannableStringBuilder.valueOf(mCommentText.getText()); Quote quote = TextViewUtils.getLast(text, Quote.class); int start = text.getSpanStart(quote); int end = text.getSpanEnd(quote); if (start < 0) return text.toString(); else if (start == 0) { return text.subSequence(end, text.length()).toString(); } else return text.subSequence(0, start).toString() + text.subSequence(end, text.length()).toString(); } /** * call before {@code removeQuote()} * * @return -1,if not found */ int getQuoteCount() { Editable text = mCommentText.getText(); Quote quote = TextViewUtils.getLast(text, Quote.class); int start = text.getSpanStart(quote); if (start >= 0) { return quote.floosCount; } return -1; } class Quote { int floosCount; public Quote(int count) { this.floosCount = count; } } @Override public void onBackPressed() { if (mEmotionGrid.isShown()) mEmotionGrid.setVisibility(View.GONE); else if (isInputShow) mKeyboard.hideSoftInputFromWindow(mEmotionGrid.getWindowToken(), 0, mIMResultRecevier); else super.onBackPressed(); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.button1: if (sizeChooser == null) { final int checked = AcApp.getConfig().getInt("text_size", 0); sizeChooser = new AlertDialog.Builder(this) .setCancelable(true) .setTitle(R.string.comment_font_size) .setSingleChoiceItems(R.array.title_sizes, checked, new DialogInterface.OnClickListener() { int lastSelected = checked; @Override public void onClick(DialogInterface dialog, int which) { if (lastSelected != which) { AcApp.putInt("text_size", which); if (mAdapter != null) mAdapter.notifyDataSetChanged(); dialog.dismiss(); lastSelected = which; } } }).create(); } sizeChooser.show(); return true; } return super.onOptionsItemSelected(item); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuItem item = menu.add(0, android.R.id.button1, 0, R.string.font_size).setIcon(R.drawable.ic_text_size); MenuItemCompat.setShowAsAction(item, MenuItemCompat.SHOW_AS_ACTION_IF_ROOM); return super.onCreateOptionsMenu(menu); } @Override protected void onDestroy() { super.onDestroy(); if (isInputShow) { mKeyboard.hideSoftInputFromWindow(mCommentText.getWindowToken(), 0); } AcApp.cancelAllRequest(TAG); if (mAdapter != null) { mAdapter.setData(null, null); mAdapter = null; } mIMResultRecevier = null; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) { Object o = parent.getItemAtPosition(position); if(o == null || !(o instanceof Comment)) return false; Comment c = (Comment)o; ClipboardManager ma = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN){ ClipData text = ClipData.newHtmlText(c.userName, c.content, c.content); ((android.content.ClipboardManager) ma).setPrimaryClip(text); }else{ ma.setText(c.content); } Toast.makeText(this, "#"+c.count+"的内容已复制", 0).show(); return true; } }
apache-2.0
sdole/aws-sdk-java
aws-java-sdk-autoscaling/src/main/java/com/amazonaws/services/autoscaling/model/EnabledMetric.java
11146
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.autoscaling.model; import java.io.Serializable; /** * <p> * Describes an enabled metric. * </p> */ public class EnabledMetric implements Serializable, Cloneable { /** * <p> * One of the following metrics: * </p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> * </ul> */ private String metric; /** * <p> * The granularity of the metric. The only valid value is * <code>1Minute</code>. * </p> */ private String granularity; /** * <p> * One of the following metrics: * </p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> * </ul> * * @param metric * One of the following metrics:</p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> */ public void setMetric(String metric) { this.metric = metric; } /** * <p> * One of the following metrics: * </p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> * </ul> * * @return One of the following metrics:</p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> */ public String getMetric() { return this.metric; } /** * <p> * One of the following metrics: * </p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> * </ul> * * @param metric * One of the following metrics:</p> * <ul> * <li> * <p> * <code>GroupMinSize</code> * </p> * </li> * <li> * <p> * <code>GroupMaxSize</code> * </p> * </li> * <li> * <p> * <code>GroupDesiredCapacity</code> * </p> * </li> * <li> * <p> * <code>GroupInServiceInstances</code> * </p> * </li> * <li> * <p> * <code>GroupPendingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupStandbyInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTerminatingInstances</code> * </p> * </li> * <li> * <p> * <code>GroupTotalInstances</code> * </p> * </li> * @return Returns a reference to this object so that method calls can be * chained together. */ public EnabledMetric withMetric(String metric) { setMetric(metric); return this; } /** * <p> * The granularity of the metric. The only valid value is * <code>1Minute</code>. * </p> * * @param granularity * The granularity of the metric. The only valid value is * <code>1Minute</code>. */ public void setGranularity(String granularity) { this.granularity = granularity; } /** * <p> * The granularity of the metric. The only valid value is * <code>1Minute</code>. * </p> * * @return The granularity of the metric. The only valid value is * <code>1Minute</code>. */ public String getGranularity() { return this.granularity; } /** * <p> * The granularity of the metric. The only valid value is * <code>1Minute</code>. * </p> * * @param granularity * The granularity of the metric. The only valid value is * <code>1Minute</code>. * @return Returns a reference to this object so that method calls can be * chained together. */ public EnabledMetric withGranularity(String granularity) { setGranularity(granularity); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMetric() != null) sb.append("Metric: " + getMetric() + ","); if (getGranularity() != null) sb.append("Granularity: " + getGranularity()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof EnabledMetric == false) return false; EnabledMetric other = (EnabledMetric) obj; if (other.getMetric() == null ^ this.getMetric() == null) return false; if (other.getMetric() != null && other.getMetric().equals(this.getMetric()) == false) return false; if (other.getGranularity() == null ^ this.getGranularity() == null) return false; if (other.getGranularity() != null && other.getGranularity().equals(this.getGranularity()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMetric() == null) ? 0 : getMetric().hashCode()); hashCode = prime * hashCode + ((getGranularity() == null) ? 0 : getGranularity().hashCode()); return hashCode; } @Override public EnabledMetric clone() { try { return (EnabledMetric) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
ytxhao/ytxplayer
YtxPlayer/app/src/main/java/com/ytx/ican/media/player/render/GraphicGLSurfaceView.java
15017
package com.ytx.ican.media.player.render; import android.app.ActivityManager; import android.content.Context; import android.content.pm.ConfigurationInfo; import android.graphics.PixelFormat; import android.opengl.GLSurfaceView; import android.support.annotation.NonNull; import android.util.AttributeSet; import android.util.Log; import android.view.SurfaceHolder; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.Interpolator; import com.ytx.ican.media.player.pragma.YtxLog; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLContext; import javax.microedition.khronos.egl.EGLDisplay; /** * Created by Administrator on 2016/9/10. */ public class GraphicGLSurfaceView extends GLSurfaceView { public static final String TAG = "GraphicGLSurfaceView"; public GraphicRenderer renderer; int mWidth; int mHeight; Picture firstPicture; Interpolator mInterpolator = new AccelerateDecelerateInterpolator(); volatile boolean mIsResume = false; volatile boolean isInitial = false; private ISurfaceCallback mSurfaceCallback; public ISurfaceCallback getSurfaceCallback() { return mSurfaceCallback; } public void setSurfaceCallback(ISurfaceCallback mSurfaceCallback) { this.mSurfaceCallback = mSurfaceCallback; } public GraphicGLSurfaceView(Context context) { this(context,null); } public GraphicGLSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); initView(context); } private void initView(Context context){ if(!supportsOpenGLES2(context)){ throw new RuntimeException("not support gles 2.0"); } setEGLContextClientVersion(2); setEGLContextFactory(new ContextFactory()); setEGLConfigChooser(new CustomChooseConfig.ComponentSizeChooser(8, 8, 8, 8, 0, 0)); getHolder().setFormat(PixelFormat.RGBA_8888); getHolder().addCallback(this); renderer = new GraphicRenderer(); setRenderer(renderer); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); YtxLog.d(TAG,"#### #### onMeasure getHeight=" + getHeight() +" getWidth="+getWidth()); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); YtxLog.d(TAG,"#### #### onLayout getHeight=" + getHeight() +" getWidth="+getWidth()); } private boolean supportsOpenGLES2(final Context context) { final ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo(); return configurationInfo.reqGlEsVersion >= 0x20000; } public GraphicRenderer getRenderer(){ return renderer; } public interface OnScreenWindowChangedListener { void onScreenWindowChanged(boolean isFinger, int width, int height, int x1, int y1, int x2, int y2); } private OnScreenWindowChangedListener onScreenWindowChangedListener = null; public void setOnScreenWindowChangedListener(OnScreenWindowChangedListener listener){ onScreenWindowChangedListener = listener; } public void queue(Runnable r) { renderer.queue.add(r); requestRender(); } @Override public void onResume() { super.onResume(); mIsResume = true; YtxLog.d(TAG,"onResume isInitial="+isInitial); } @Override public void onPause() { super.onPause(); mIsResume = false; } protected void initial() { YtxLog.d(TAG, "initial"); } protected void release() { YtxLog.d(TAG, "release"); renderer.release(); if (firstPicture != null) { firstPicture.clear(); } } public void drawFrame() { } public void setPicture(Picture picture) { renderer.setPicture(picture); mWidth = picture.width(); mHeight = picture.height(); } public void updateYuv(byte[] ydata, byte[] udata, byte[] vdata){ requestRender(); } private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser { public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { mRedSize = r; mGreenSize = g; mBlueSize = b; mAlphaSize = a; mDepthSize = depth; mStencilSize = stencil; } /* This EGL config specification is used to specify 2.0 rendering. * We use a minimum size of 4 bits for red/green/blue, but will * perform actual matching in chooseConfig() below. */ private static int EGL_OPENGL_ES2_BIT = 4; private static int[] s_configAttribs2 = { EGL10.EGL_RED_SIZE, 4, EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_BLUE_SIZE, 4, EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_NONE }; public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { /* Get the number of minimally matching EGL configurations */ int[] num_config = new int[1]; egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); int numConfigs = num_config[0]; if (numConfigs <= 0) { throw new IllegalArgumentException("No configs match configSpec"); } /* Allocate then read the array of minimally matching EGL configs */ EGLConfig[] configs = new EGLConfig[numConfigs]; egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); // // if (DEBUG) { // printConfigs(egl, display, configs); // } /* Now return the "best" one */ return chooseConfig(egl, display, configs); } public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { for(EGLConfig config : configs) { int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0); int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0); // We need at least mDepthSize and mStencilSize bits if (d < mDepthSize || s < mStencilSize) continue; // We want an *exact* match for red/green/blue/alpha int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0); int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0); int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0); int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0); if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) return config; } return null; } private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) { if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { return mValue[0]; } return defaultValue; } private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { int numConfigs = configs.length; Log.w(TAG, String.format("%d configurations", numConfigs)); for (int i = 0; i < numConfigs; i++) { Log.w(TAG, String.format("Configuration %d:\n", i)); printConfig(egl, display, configs[i]); } } private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) { int[] attributes = { EGL10.EGL_BUFFER_SIZE, EGL10.EGL_ALPHA_SIZE, EGL10.EGL_BLUE_SIZE, EGL10.EGL_GREEN_SIZE, EGL10.EGL_RED_SIZE, EGL10.EGL_DEPTH_SIZE, EGL10.EGL_STENCIL_SIZE, EGL10.EGL_CONFIG_CAVEAT, EGL10.EGL_CONFIG_ID, EGL10.EGL_LEVEL, EGL10.EGL_MAX_PBUFFER_HEIGHT, EGL10.EGL_MAX_PBUFFER_PIXELS, EGL10.EGL_MAX_PBUFFER_WIDTH, EGL10.EGL_NATIVE_RENDERABLE, EGL10.EGL_NATIVE_VISUAL_ID, EGL10.EGL_NATIVE_VISUAL_TYPE, 0x3030, // EGL10.EGL_PRESERVED_RESOURCES, EGL10.EGL_SAMPLES, EGL10.EGL_SAMPLE_BUFFERS, EGL10.EGL_SURFACE_TYPE, EGL10.EGL_TRANSPARENT_TYPE, EGL10.EGL_TRANSPARENT_RED_VALUE, EGL10.EGL_TRANSPARENT_GREEN_VALUE, EGL10.EGL_TRANSPARENT_BLUE_VALUE, 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB, 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA, 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL, 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL, EGL10.EGL_LUMINANCE_SIZE, EGL10.EGL_ALPHA_MASK_SIZE, EGL10.EGL_COLOR_BUFFER_TYPE, EGL10.EGL_RENDERABLE_TYPE, 0x3042 // EGL10.EGL_CONFORMANT }; String[] names = { "EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT" }; int[] value = new int[1]; for (int i = 0; i < attributes.length; i++) { int attribute = attributes[i]; String name = names[i]; if ( egl.eglGetConfigAttrib(display, config, attribute, value)) { Log.w(TAG, String.format(" %s: %d\n", name, value[0])); } else { // Log.w(TAG, String.format(" %s: failed\n", name)); while (egl.eglGetError() != EGL10.EGL_SUCCESS); } } } // Subclasses can adjust these values: protected int mRedSize; protected int mGreenSize; protected int mBlueSize; protected int mAlphaSize; protected int mDepthSize; protected int mStencilSize; private int[] mValue = new int[1]; } @Override public void surfaceCreated(SurfaceHolder holder) { super.surfaceCreated(holder); YtxLog.d(TAG,"#### #### surfaceCreated getHeight=" + getHeight() +" getWidth="+getWidth()); if(mSurfaceCallback != null){ mSurfaceCallback.onSurfaceCreated(holder); } } @Override public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { super.surfaceChanged(holder, format, w, h); YtxLog.d(TAG,"#### #### surfaceChanged getHeight=" + getHeight() +" getWidth="+getWidth()); if(mSurfaceCallback != null){ mSurfaceCallback.onSurfaceChanged(holder, format, w, h); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { super.surfaceDestroyed(holder); YtxLog.d(TAG,"#### #### surfaceDestroyed getHeight=" + getHeight() +" getWidth="+getWidth()); if(mSurfaceCallback != null){ mSurfaceCallback.onSurfaceDestroyed(holder); } } public interface ISurfaceCallback { /** * @param holder */ void onSurfaceCreated(@NonNull SurfaceHolder holder); /** * @param holder * @param format could be 0 * @param width * @param height */ void onSurfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height); void onSurfaceDestroyed(@NonNull SurfaceHolder holder); } private static void checkEglError(String prompt, EGL10 egl) { int error; while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error)); } } private static class ContextFactory implements GLSurfaceView.EGLContextFactory { private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) { Log.w(TAG, "creating OpenGL ES 2.0 context"); checkEglError("Before eglCreateContext", egl); int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); checkEglError("After eglCreateContext", egl); return context; } public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) { egl.eglDestroyContext(display, context); } } }
apache-2.0
tita/tita
tita-entity/src/main/java/at/ac/tuwien/ifs/tita/entity/interfaces/ITimedBase.java
1464
/** Copyright 2009 TiTA Project, Vienna University of Technology Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE\-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package at.ac.tuwien.ifs.tita.entity.interfaces; import java.util.Date; /** * Interface for general timer methods. * @author herbert * */ public interface ITimedBase { /** * Starts timer's function for messuring efforted time of task/issue. */ void start(); /** * Stops timer's messuring function. */ void stop(); /** * Decides, if task has been stopped or is running. * * @return true, if stopped */ Boolean isStopped(); /** * Returns duration of task. * * @return Long duration */ Long getDuration(); /** * Returns start time of task. * @return Date startTime */ Date getStartTime(); /** * Returns end time of task. * @return Date startTime */ Date getEndTime(); }
apache-2.0
nextreports/nextreports-server
src/ro/nextreports/server/web/common/table/SortableDataAdapter.java
3262
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ro.nextreports.server.web.common.table; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.wicket.extensions.markup.html.repeater.util.SortParam; import org.apache.wicket.extensions.markup.html.repeater.util.SortableDataProvider; import org.apache.wicket.markup.repeater.data.IDataProvider; import org.apache.wicket.model.IModel; import org.springframework.beans.support.MutableSortDefinition; import org.springframework.beans.support.PropertyComparator; import org.springframework.beans.support.SortDefinition; /** * @author Decebal Suiu */ public class SortableDataAdapter<T> extends SortableDataProvider<T, String> { private static final long serialVersionUID = 1L; private IDataProvider<T> provider; private Map<String, Comparator<T>> comparators; @SuppressWarnings("unchecked") public SortableDataAdapter(IDataProvider<T> provider) { this(provider, Collections.EMPTY_MAP); } public SortableDataAdapter(IDataProvider<T> provider, Map<String, Comparator<T>> comparators) { this.provider = provider; this.comparators = comparators; } @Override public Iterator<T> iterator(long first, long count) { long size = provider.size(); List<T> resources = new ArrayList<T>((int) size); Iterator<? extends T> iter = provider.iterator(0, size); while (iter.hasNext()) { resources.add(iter.next()); } if (comparators != null) { SortParam<String> sortParam = getSort(); if (sortParam != null) { String sortProperty = sortParam.getProperty(); if (sortProperty != null) { Comparator<T> comparator = comparators.get(sortProperty); if (comparator != null) { Collections.sort(resources, comparator); if (getSort().isAscending() == false) { Collections.reverse(resources); } } else { SortDefinition sortDefinition = new MutableSortDefinition(sortProperty, true, getSort().isAscending()); PropertyComparator.sort(resources, sortDefinition); } } } } return Collections.unmodifiableList(resources.subList((int) first, (int) (first + count))).iterator(); } @SuppressWarnings("unchecked") @Override public IModel<T> model(Object object) { return provider.model((T) object); } @Override public long size() { return provider.size(); } @Override public void detach() { provider.detach(); } }
apache-2.0
lmjacksoniii/hazelcast
hazelcast/src/main/java/com/hazelcast/map/impl/query/QueryEventFilter.java
2805
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.query; import com.hazelcast.map.impl.EntryEventFilter; import com.hazelcast.map.impl.MapDataSerializerHook; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.nio.serialization.Data; import com.hazelcast.query.Predicate; import com.hazelcast.query.impl.QueryableEntry; import java.io.IOException; import java.util.Map; public class QueryEventFilter extends EntryEventFilter { private Predicate predicate; public QueryEventFilter() { } public QueryEventFilter(boolean includeValue, Data key, Predicate predicate) { super(includeValue, key); this.predicate = predicate; } public Object getPredicate() { return predicate; } @Override public boolean eval(Object arg) { QueryableEntry entry = (QueryableEntry) arg; Data keyData = entry.getKeyData(); return (key == null || key.equals(keyData)) && predicate.apply((Map.Entry) arg); } @Override public void writeData(ObjectDataOutput out) throws IOException { super.writeData(out); out.writeObject(predicate); } @Override public void readData(ObjectDataInput in) throws IOException { super.readData(in); predicate = in.readObject(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } QueryEventFilter that = (QueryEventFilter) o; if (!super.equals(o)) { return false; } if (!predicate.equals(that.predicate)) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + predicate.hashCode(); return result; } @Override public String toString() { return "QueryEventFilter{" + "predicate=" + predicate + '}'; } @Override public int getId() { return MapDataSerializerHook.QUERY_EVENT_FILTER; } }
apache-2.0
google/fest
third_party/fest-swing/src/test/java/org/fest/swing/fixture/AbstractContainerFixture_tree_Test.java
3879
/* * Created on Jun 7, 2009 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright @2009-2016 the FEST authors. */ package org.fest.swing.fixture; import static org.fest.assertions.Assertions.assertThat; import static org.fest.swing.edt.GuiActionRunner.execute; import static org.fest.swing.test.core.NeverMatchingComponentMatcher.neverMatches; import javax.annotation.Nonnull; import javax.swing.JTree; import javax.swing.tree.DefaultMutableTreeNode; import org.fest.swing.core.GenericTypeMatcher; import org.fest.swing.edt.GuiQuery; import org.fest.swing.exception.ComponentLookupException; import org.fest.swing.test.core.RobotBasedTestCase; import org.fest.swing.test.swing.TestWindow; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; /** * Tests lookups of {@code JTree}s in {@link AbstractContainerFixture}. * * @author Alex Ruiz */ public class AbstractContainerFixture_tree_Test extends RobotBasedTestCase { @Rule public final ExpectedException thrown = ExpectedException.none(); private FakeContainerFixture fixture; private MyWindow window; @Override protected final void onSetUp() { window = MyWindow.createNew(getClass()); fixture = new FakeContainerFixture(robot, window); } @Test public void should_find_visible_JTree_by_name() { robot.showWindow(window); JTreeFixture tree = fixture.tree("expandMeTree"); assertThat(tree.target()).isSameAs(window.tree); } @Test public void should_fail_if_visible_JTree_not_found_by_name() { thrown.expect(ComponentLookupException.class); thrown.expectMessage("Unable to find component using matcher"); thrown.expectMessage("name='myTree', type=javax.swing.JTree, requireShowing=true"); fixture.tree("myTree"); } @Test public void should_find_visible_JTree_by_type() { robot.showWindow(window); JTreeFixture tree = fixture.tree(); assertThat(tree.target()).isSameAs(window.tree); } @Test public void should_fail_if_visible_JTree_not_found_by_type() { thrown.expect(ComponentLookupException.class); thrown.expectMessage("Unable to find component using matcher"); thrown.expectMessage("type=javax.swing.JTree, requireShowing=true"); fixture.tree(); } @Test public void should_find_visible_JTree_by_Matcher() { robot.showWindow(window); JTreeFixture tree = fixture.tree(new GenericTypeMatcher<JTree>(JTree.class) { @Override protected boolean isMatching(@Nonnull JTree t) { return "expandMeTree".equals(t.getName()); } }); assertThat(tree.target()).isSameAs(window.tree); } @Test public void should_fail_if_visible_JTree_not_found_by_Matcher() { thrown.expect(ComponentLookupException.class); thrown.expectMessage("Unable to find component using matcher"); fixture.tree(neverMatches(JTree.class)); } private static class MyWindow extends TestWindow { final JTree tree = new JTree(new DefaultMutableTreeNode("root")); static MyWindow createNew(final Class<?> testClass) { return execute(new GuiQuery<MyWindow>() { @Override protected MyWindow executeInEDT() { return new MyWindow(testClass); } }); } private MyWindow(Class<?> testClass) { super(testClass); tree.setName("expandMeTree"); addComponents(tree); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-lexruntimev2/src/main/java/com/amazonaws/services/lexruntimev2/model/transform/ConflictExceptionUnmarshaller.java
2793
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lexruntimev2.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.lexruntimev2.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * ConflictException JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ConflictExceptionUnmarshaller extends EnhancedJsonErrorUnmarshaller { private ConflictExceptionUnmarshaller() { super(com.amazonaws.services.lexruntimev2.model.ConflictException.class, "ConflictException"); } @Override public com.amazonaws.services.lexruntimev2.model.ConflictException unmarshallFromContext(JsonUnmarshallerContext context) throws Exception { com.amazonaws.services.lexruntimev2.model.ConflictException conflictException = new com.amazonaws.services.lexruntimev2.model.ConflictException(null); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return conflictException; } private static ConflictExceptionUnmarshaller instance; public static ConflictExceptionUnmarshaller getInstance() { if (instance == null) instance = new ConflictExceptionUnmarshaller(); return instance; } }
apache-2.0
leiyong0326/phone
phone-common/src/main/java/com/ly/base/common/util/MyBatisUtil.java
4191
package com.ly.base.common.util; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import org.apache.commons.lang3.StringUtils; import org.springframework.util.ReflectionUtils.FieldCallback; import org.springframework.util.ReflectionUtils.FieldFilter; import com.alibaba.fastjson.JSON; import com.ly.base.common.model.Model; import com.ly.base.common.model.WechatPayModel; /** * MyBatis工具类 * @author LeiYong * */ public class MyBatisUtil { private static final String splitCharacter = ","; // private static final String defaultFilterRegex = ".*([d|D]ate|[t|T]ime).*"; private static final String EQUALS = "="; private static final FieldFilter defaultFilter = new FieldFilter() { @Override public boolean matches(Field field) { // if(field.getName().matches(defaultFilterRegex)){ // return false; // } if(Boolean.class.isAssignableFrom(field.getType())){ return false; } if(Date.class.isAssignableFrom(field.getType())){ return false; } return true; } }; /** * 解析String为Model,切割符默认为"," * @param texts 如"pk,=,111","age,>,15" * @return */ public static List<Model> parseBase(String... texts){ List<Model> list = new ArrayList<>(); if (texts!=null&&texts.length>0) { for (String text : texts) { if (StringUtils.isNotBlank(text)) { String[] arr = StringUtil.indexOf(text, splitCharacter, 0); String[] arr2 = StringUtil.indexOf(arr[1], splitCharacter, 0); list.add(new Model(arr[0],arr2[0], arr2[1])); } } } return list; } /** * 解析对象为Model * 默认过滤date和boolean类型字段 * @param obj * @param parseSuper 是否解析父类 * @return */ public static List<Model> parseByObject(Object obj,boolean parseSuper){ return parseByObject(obj, parseSuper, defaultFilter); } /** * 解析对象为Model * 自定义filter * @param obj * @param parseSuper 是否解析父类 * @return */ public static List<Model> parseByObject(Object obj,boolean parseSuper,FieldFilter ff){ List<Model> list = new ArrayList<>(); if (obj==null) { return list; } //解析Field FieldCallback fc = new FieldCallback() { @Override public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException { if (ff != null && !ff.matches(field)) { return; } Model m = parseByField(obj, field); if (m!=null) { list.add(m); } } }; if (parseSuper) { ReflectionUtil.doWithFields(obj.getClass(),fc); }else{ ReflectionUtil.doWithLocalFields(obj.getClass(),fc); } return list; } public static void main(String[] args) { WechatPayModel model = new WechatPayModel(); model.setAppId("123"); model.setAppKey("456"); model.setCommodityName("thank"); System.out.println(JSON.toJSONString(parseByObject(model, true))); } /** * 将指定对象的field转为Model * @param obj * @param f * @return */ public static Model parseByField(Object obj, Field f) { if (f == null || obj == null) { return null; } if (FieldUtil.isFinalOrStatic(f)) { return null; } Object value = ReflectionUtil.getFieldAndSetAccessible(f, obj); if (value==null||value.toString().isEmpty()) { return null; } return new Model(FieldUtil.fieldNameToDbName(f.getName()), EQUALS, value); } /** * 解析String为Model,默认使用,分割 * 例如:"name","in","张三,李四,王五" * @param column 列 * @param operator 操作符 * @param values 值集 * @return */ public static Model parseList(String column,String operator,String values){ return parseList(column, operator, values, splitCharacter); } /** * 解析String为Model * 例如:"name","in","张三#李四#王五","#" * @param column 列 * @param operator 操作符 * @param values 值集 * @param split 切割符,支持正则 * @return */ public static Model parseList(String column,String operator,String values,String split){ if (StringUtils.isNotBlank(values)) { List<String> list = Arrays.asList(values.split(split)); return new Model(column,operator,list); } return null; } }
apache-2.0
aspnet/AspNetCore
src/SignalR/clients/java/signalr/core/src/main/java/com/microsoft/signalr/InvocationMessage.java
1445
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. package com.microsoft.signalr; import java.util.Collection; import java.util.Map; public class InvocationMessage extends HubMessage { int type = HubMessageType.INVOCATION.value; private Map<String, String> headers; private final String invocationId; private final String target; private final Object[] arguments; private Collection<String> streamIds; public InvocationMessage(Map<String, String> headers, String invocationId, String target, Object[] args, Collection<String> streamIds) { if (headers != null && !headers.isEmpty()) { this.headers = headers; } this.invocationId = invocationId; this.target = target; this.arguments = args; if (streamIds != null && !streamIds.isEmpty()) { this.streamIds = streamIds; } } public Map<String, String> getHeaders() { return headers; } public String getInvocationId() { return invocationId; } public String getTarget() { return target; } public Object[] getArguments() { return arguments; } public Collection<String> getStreamIds() { return streamIds; } @Override public HubMessageType getMessageType() { return HubMessageType.INVOCATION; } }
apache-2.0
paulopwm/Joomla-Day-Brasil-2011
APP/src/com/JoomlaDay/Internet.java
8186
package com.JoomlaDay; import java.io.BufferedInputStream; import java.io.File; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.sql.Date; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.http.util.ByteArrayBuffer; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.Context; import android.net.ConnectivityManager; import android.util.Log; public class Internet { public boolean checkInternetConnection(Context context) { ConnectivityManager conMgr = (ConnectivityManager)context.getSystemService (Context.CONNECTIVITY_SERVICE); // ARE WE CONNECTED TO THE NET if (conMgr.getActiveNetworkInfo() != null && conMgr.getActiveNetworkInfo().isAvailable() && conMgr.getActiveNetworkInfo().isConnected()) { return true; } else { Log.v("INTERNET", "Internet Connection Not Present"); return false; } } public String getInternet(Context context, String site){ // The data that is retrieved String result = null; FileCache cache = new FileCache(context); File cacheFile = cache.getFile(site); String fileName = cacheFile.getAbsolutePath(); if (this.checkInternetConnection(context)){ if (!cache.verifyCache(fileName)){ try { // This assumes that you have a URL from which the response will come URL url = new URL(site); // Open a connection to the URL and obtain a buffered input stream URLConnection connection = url.openConnection(); InputStream inputStream = connection.getInputStream(); BufferedInputStream bufferedInput = new BufferedInputStream(inputStream, 8); // Read the response into a byte array ByteArrayBuffer byteArray = new ByteArrayBuffer(50); int current = 0; while((current = bufferedInput.read()) != -1){ byteArray.append((byte)current); } // Construct a String object from the byte array containing the response result = new String(byteArray.toByteArray()); try { if (cache.saveFile(result)){ Log.i("INTERNET", "Cache salvo!"); }else{ Log.i("INTERNET", "Erro no salvar o cache!"); } }catch (Exception e) { result = "seminternet"; Log.e("INTERNET", e.toString()); } } catch (Exception e) { result = "seminternet"; Log.e("INTERNET", e.toString()); } }else{ try { result = cache.readFile(fileName); }catch (Exception e) { result = "seminternet"; Log.e("INTERNET", e.toString()); } } }else{ try { result = cache.readFile(fileName); if (result.length() == 0){ result = "seminternet"; } }catch (Exception e) { result = "seminternet"; Log.e("INTERNET", e.toString()); } } Log.i("INTERNET", result); return(result); } public ArrayList<Map<String, String>> JSON(String url, Context context){ ArrayList<Map<String, String>> lista = new ArrayList<Map<String, String>>(); JSONObject myJSON = new JSONObject(); JSONArray names = null; String restWebServerResponse = getInternet(context, url); if (restWebServerResponse != "seminternet"){ try{ JSONArray myJSONArray = new JSONArray(restWebServerResponse); for (int i = 0; i < myJSONArray.length(); i++){ Map<String, String> data = new HashMap<String, String>(); myJSON = new JSONObject(myJSONArray.getString(i)); names = myJSON.names(); for (int j = 0; j < names.length(); j++){ Log.i("JSON Items", names.getString(j)+", "+myJSON.getString(names.getString(j))); data.put(names.getString(j), myJSON.getString(names.getString(j))); } data.put("internet", "true"); lista.add(data); } Log.i("JSON Lista", lista.toString()); } catch (JSONException e) { Log.e("JSON", e.getMessage()); } }else{ Map<String, String> data = new HashMap<String, String>(); data.put("internet", "false"); lista.add(data); } if (lista.size() > 0){ return lista; }else{ Map<String, String> data = new HashMap<String, String>(); data.put("internet", "false"); lista.add(data); return lista; } } public List<Map<String, Object>> getTwitter(String url, Context context){ ArrayList<Map<String, String>> json = this.JSON(url, context); List<Map<String, Object>> tweets = new ArrayList<Map<String, Object>>(); Map<String, Object> data; if (json.get(0).get("internet").equals("true")){ for (int i = 0; i < json.size(); i++){ try { data = new HashMap<String, Object>(); Log.i("JSON Lista Recebida", i+" -> "+json.get(i).toString()); Tweet tweet = new Tweet(json.get(i).get("texto"), json.get(i).get("autor"), json.get(i).get("imagem"), json.get(i).get("date")); data.put("internet", true); data.put("text", tweet.getText()); data.put("author", tweet.getAuthor()); data.put("image", tweet.getImage()); data.put("datetime", tweet.getDate()); tweets.add(data); } catch (Exception e) { Log.e("JSON - Tweets", e.getMessage()+" "+e.getStackTrace()); } } }else{ data = new HashMap<String, Object>(); data.put("internet", false); tweets.add(data); } return tweets; } public List<Map<String, Object>> getNoticias(String url, Context context){ ArrayList<Map<String, String>> json = this.JSON(url, context); List<Map<String, Object>> noticias = new ArrayList<Map<String, Object>>(); Map<String, Object> data; if (json.get(0).get("internet").equals("true")){ for (int i = 0; i < json.size(); i++){ try { data = new HashMap<String, Object>(); Log.i("JSON Lista Recebida", i+" -> "+json.get(i).toString()); Noticias noticia = new Noticias(json.get(i).get("title"), json.get(i).get("description"), json.get(i).get("author"), json.get(i).get("link"), json.get(i).get("image"), json.get(i).get("datetime")); data.put("internet", true); data.put("title", noticia.getTitle()); data.put("author", noticia.getAuthor()); data.put("description", noticia.getDescription()); data.put("image", noticia.getImage()); data.put("datetime", noticia.getDate()); data.put("link", noticia.getLink()); noticias.add(data); } catch (Exception e) { Log.e("JSON - Noticias", e.getMessage()+" "+e.getStackTrace()); } } }else{ data = new HashMap<String, Object>(); data.put("internet", false); noticias.add(data); } return noticias; } public List<Map<String, Object>> getProgramacao(String url, Context context){ ArrayList<Map<String, String>> json = this.JSON(url, context); List<Map<String, Object>> programacao = new ArrayList<Map<String, Object>>(); Map<String, Object> data; if (json.get(0).get("internet").equals("true")){ for (int i = 0; i < json.size(); i++){ try { data = new HashMap<String, Object>(); Log.i("JSON Lista Recebida", i+" -> "+json.get(i).toString()); Evento evento = new Evento(json.get(i).get("title"), json.get(i).get("text"), json.get(i).get("local"), json.get(i).get("datetime")); data.put("internet", true); data.put("title", evento.getTitle()); data.put("local", evento.getLocal()); data.put("description", evento.getDescription()); data.put("datetime", evento.getDate()); programacao.add(data); } catch (Exception e) { Log.e("JSON - Programação", e.getMessage()+" "+e.getStackTrace()); } } }else{ data = new HashMap<String, Object>(); data.put("internet", false); programacao.add(data); } return programacao; } }
apache-2.0
jjj117/airavata
modules/test-suite/multi-tenanted-airavata/src/main/java/org/apache/airavata/testsuite/multitenantedairavata/ApplicationRegister.java
26525
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.airavata.testsuite.multitenantedairavata; import org.apache.airavata.api.Airavata; import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription; import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule; import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType; import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription; import org.apache.airavata.model.application.io.DataType; import org.apache.airavata.model.application.io.InputDataObjectType; import org.apache.airavata.model.application.io.OutputDataObjectType; import org.apache.airavata.model.security.AuthzToken; import org.apache.airavata.model.workspace.Gateway; import org.apache.airavata.testsuite.multitenantedairavata.utils.FrameworkUtils; import org.apache.airavata.testsuite.multitenantedairavata.utils.TestFrameworkConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class ApplicationRegister { private Airavata.Client airavata; private List<Gateway> allGateways; private Map<String, String> applicationInterfaceListPerGateway; private Map<String, String> applicationDeployementListPerGateway; private final static Logger logger = LoggerFactory.getLogger(ApplicationRegister.class); private String stampedeResourceId; private String trestlesResourceId; private String br2ResourceId; private String gordenResourceId; private String alamoResourceId; private List<String> gatewaysToAvoid; private AuthzToken authzToken; public ApplicationRegister(Airavata.Client airavata, TestFrameworkProps props) throws Exception { this.airavata = airavata; allGateways = getAllGateways(airavata); applicationInterfaceListPerGateway = new HashMap<String, String>(); applicationDeployementListPerGateway = new HashMap<String, String>(); FrameworkUtils frameworkUtils = FrameworkUtils.getInstance(); gatewaysToAvoid = frameworkUtils.getGatewayListToAvoid(props.getSkippedGateways()); authzToken = new AuthzToken("emptyToken"); } public List<Gateway> getAllGateways(Airavata.Client client) throws Exception{ try { return client.getAllGateways(authzToken); }catch (Exception e){ logger.error("Error while getting all the gateways", e); throw new Exception("Error while getting all the gateways", e); } } public void addApplications () throws Exception{ Map<String, String> allComputeResourceNames = airavata.getAllComputeResourceNames(authzToken); System.out.println("All compute resources :" + allComputeResourceNames.size()); for (String resourceId : allComputeResourceNames.keySet()){ String resourceName = allComputeResourceNames.get(resourceId); if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.STAMPEDE_RESOURCE_NAME)){ stampedeResourceId = resourceId; }else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.TRESTLES_RESOURCE_NAME)){ trestlesResourceId = resourceId; }else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.BR2_RESOURCE_NAME)){ br2ResourceId = resourceId; }else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.GORDEN_RESOURCE_NAME)){ gordenResourceId = resourceId; }else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.ALAMO_RESOURCE_NAME)){ alamoResourceId = resourceId; } } addUltrascanApplication(); // addAmberApplication(); // addEchoApplication(); // addLAMMPSApplication(); } protected void addAmberApplication () throws Exception{ for (Gateway gateway : allGateways) { boolean isgatewayValid = true; for (String ovoidGateway : gatewaysToAvoid){ if (gateway.getGatewayId().equals(ovoidGateway)){ isgatewayValid = false; break; } } if (isgatewayValid) { // add amber module String amberModuleId = airavata.registerApplicationModule(authzToken, gateway.getGatewayId(), createApplicationModule(TestFrameworkConstants.AppcatalogConstants.AMBER_APP_NAME, "12.0", TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION)); System.out.println("Amber Module Id " + amberModuleId); // add amber interface String amberInterfaceId = registerAmberInterface(gateway, amberModuleId); applicationInterfaceListPerGateway.put(amberInterfaceId, gateway.getGatewayId()); // add amber deployment List<String> moduleLoadCMDs = new ArrayList<String>(); moduleLoadCMDs.add("module load amber"); ApplicationDeploymentDescription amberStampedeDeployment = createApplicationDeployment(amberModuleId, stampedeResourceId, "/opt/apps/intel13/mvapich2_1_9/amber/12.0/bin/sander.MPI -O", ApplicationParallelismType.MPI, TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, moduleLoadCMDs, null, null); String amberStampedeAppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), amberStampedeDeployment); String amberTrestlesAppDeployId = airavata.registerApplicationDeployment(authzToken,gateway.getGatewayId(), createApplicationDeployment(amberModuleId, trestlesResourceId, "/opt/amber/bin/sander.MPI -O", ApplicationParallelismType.MPI, TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, moduleLoadCMDs, null, null)); List<String> amberModuleLoadCMDsBr2 = new ArrayList<String>(); amberModuleLoadCMDsBr2.add("module load amber/gnu/mpi/12"); amberModuleLoadCMDsBr2.add("module swap PrgEnv-cray PrgEnv-gnu"); String amberBr2AppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(amberModuleId, br2ResourceId, "/N/soft/cle4/amber/gnu/mpi/12/amber12/bin/sander.MPI -O", ApplicationParallelismType.MPI, TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, amberModuleLoadCMDsBr2, null, null)); applicationDeployementListPerGateway.put(amberStampedeAppDeployId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(amberTrestlesAppDeployId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(amberBr2AppDeployId, gateway.getGatewayId()); } } } protected void addUltrascanApplication () throws Exception{ for (Gateway gateway : allGateways) { boolean isgatewayValid = true; for (String ovoidGateway : gatewaysToAvoid){ if (gateway.getGatewayId().equals(ovoidGateway)){ isgatewayValid = false; break; } } if (isgatewayValid) { // add amber module String ultrascanModuleId = airavata.registerApplicationModule(authzToken, gateway.getGatewayId(), createApplicationModule(TestFrameworkConstants.AppcatalogConstants.ULTRASCAN, "1.0", TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION)); System.out.println("Ultrascan module Id " + ultrascanModuleId); // add amber interface String ultrascanInterfaceId = registerUltrascanInterface(gateway, ultrascanModuleId); applicationInterfaceListPerGateway.put(ultrascanInterfaceId, gateway.getGatewayId()); // add amber deployment ApplicationDeploymentDescription ultrascanStampedeDeployment = createApplicationDeployment(ultrascanModuleId, stampedeResourceId, "/home1/01623/us3/bin/us_mpi_analysis", ApplicationParallelismType.MPI, TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, null, null, null); String ultrascanStampedeAppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), ultrascanStampedeDeployment); String ultrascanTrestlesAppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(ultrascanModuleId, trestlesResourceId, "/home/us3/trestles/bin/us_mpi_analysis", ApplicationParallelismType.MPI, TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, null, null, null)); String ultrascanGordenAppDepId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(ultrascanModuleId,gordenResourceId, "/home/us3/gordon/bin/us_mpi_analysis", ApplicationParallelismType.MPI, TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, null, null, null)); List<String> alamoModules = new ArrayList<>(); alamoModules.add("module load intel/2015/64"); alamoModules.add("module load openmpi/intel/1.8.4"); alamoModules.add("module load qt4/4.8.6"); alamoModules.add("module load ultrascan3/3.3"); String ultrascanAlamoAppId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(ultrascanModuleId,alamoResourceId, "/home/us3/bin/us_mpi_analysis", ApplicationParallelismType.OPENMP, TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, alamoModules, null, null)); applicationDeployementListPerGateway.put(ultrascanStampedeAppDeployId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(ultrascanTrestlesAppDeployId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(ultrascanGordenAppDepId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(ultrascanAlamoAppId, gateway.getGatewayId()); } } } private String registerUltrascanInterface(Gateway gateway, String ultrascanModuleId) throws org.apache.thrift.TException { List<String> appModules = new ArrayList<String>(); appModules.add(ultrascanModuleId); InputDataObjectType input1 = createAppInput("input", null, DataType.URI, null, 1, true, true,false, "Input tar file", null); InputDataObjectType input2 = createAppInput("mgroupcount", "-mgroupcount=1", DataType.STRING, null, 3, true, true,false, "mgroupcount", null); InputDataObjectType input3 = createAppInput("walltime", "-walltime=60", DataType.STRING, null, 2, true, true,false, "walltime", null); List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>(); applicationInputs.add(input1); applicationInputs.add(input2); applicationInputs.add(input3); OutputDataObjectType output1 = createAppOutput("ultrascanOutput", "analysis-results.tar", DataType.URI, true, false, null); output1.setLocation("output"); OutputDataObjectType output2 = createAppOutput("STDOUT", null, DataType.STDOUT, true, false, null); OutputDataObjectType output3 = createAppOutput("STDERR", null, DataType.STDERR, true, false, null); List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>(); applicationOutputs.add(output1); applicationOutputs.add(output2); applicationOutputs.add(output3); String ultrascanAppId = airavata.registerApplicationInterface(authzToken, gateway.getGatewayId(), createApplicationInterfaceDescription(TestFrameworkConstants.AppcatalogConstants.ULTRASCAN, TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, appModules, applicationInputs, applicationOutputs)); System.out.println("Ultrascan Application Interface Id " + ultrascanAppId); return ultrascanAppId; } private String registerAmberInterface(Gateway gateway, String amberModuleId) throws org.apache.thrift.TException { List<String> appModules = new ArrayList<String>(); appModules.add(amberModuleId); InputDataObjectType input1 = createAppInput("heatRst", null, DataType.URI, "-c", 1, true, true,false, "Heating up the system equilibration stage - 02_Heat.rst", null); InputDataObjectType input2 = createAppInput("prodIn", null, DataType.URI, "-i ", 2, true, true, false, "Constant pressure and temperature for production stage - 03_Prod.in", null); InputDataObjectType input3 = createAppInput("prmtop", null, DataType.URI, "-p", 3, true, true, false, "Parameter and Topology coordinates - prmtop", null); List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>(); applicationInputs.add(input1); applicationInputs.add(input2); applicationInputs.add(input3); OutputDataObjectType output1 = createAppOutput("AMBER_Execution_Summary", "03_Prod.info", DataType.URI, true, true, "-inf"); OutputDataObjectType output2 = createAppOutput("AMBER_Execution_log", "03_Prod.out", DataType.URI, true, true, "-o"); OutputDataObjectType output3 = createAppOutput("AMBER_Trajectory_file", "03_Prod.mdcrd", DataType.URI, true, true, "-x"); OutputDataObjectType output4 = createAppOutput("AMBER_Restart_file", "03_Prod.rst", DataType.URI, true, true, " -r"); OutputDataObjectType output5 = createAppOutput("STDOUT", null, DataType.STDOUT, true, false, null); OutputDataObjectType output6 = createAppOutput("STDERR", null, DataType.STDERR, true, false, null); List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>(); applicationOutputs.add(output1); applicationOutputs.add(output2); applicationOutputs.add(output3); applicationOutputs.add(output4); applicationOutputs.add(output5); applicationOutputs.add(output6); String amberInterfaceId = airavata.registerApplicationInterface(authzToken, gateway.getGatewayId(), createApplicationInterfaceDescription(TestFrameworkConstants.AppcatalogConstants.AMBER_APP_NAME, TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, appModules, applicationInputs, applicationOutputs)); System.out.println("Amber Application Interface Id " + amberInterfaceId); return amberInterfaceId; } private String registerEchoInterface(Gateway gateway, String moduleId) throws org.apache.thrift.TException { List<String> appModules = new ArrayList<String>(); appModules.add(moduleId); InputDataObjectType input1 = createAppInput("input_to_Echo", null, DataType.STRING, null, 1, true, true,false, "Sample input to Echo", null); List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>(); applicationInputs.add(input1); OutputDataObjectType output1 = createAppOutput("STDOUT", null, DataType.STDOUT, true, false, null); OutputDataObjectType output2 = createAppOutput("STDERR", null, DataType.STDERR, true, false, null); List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>(); applicationOutputs.add(output1); applicationOutputs.add(output2); String echoInterfaceId = airavata.registerApplicationInterface(authzToken, gateway.getGatewayId(), createApplicationInterfaceDescription(TestFrameworkConstants.AppcatalogConstants.ECHO_NAME, TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, appModules, applicationInputs, applicationOutputs)); System.out.println("Echo Application Interface Id " + echoInterfaceId); return echoInterfaceId; } protected void addEchoApplication() throws Exception{ for (Gateway gateway : allGateways){ boolean isgatewayValid = true; for (String ovoidGateway : gatewaysToAvoid){ if (gateway.getGatewayId().equals(ovoidGateway)){ isgatewayValid = false; break; } } if (isgatewayValid) { // add echo module String echoModuleId = airavata.registerApplicationModule(authzToken, gateway.getGatewayId(), createApplicationModule(TestFrameworkConstants.AppcatalogConstants.ECHO_NAME, "1.0", TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION)); System.out.println("Echo Module Id " + echoModuleId); // add amber interface String echoInterfaceId = registerEchoInterface(gateway, echoModuleId); applicationInterfaceListPerGateway.put(echoInterfaceId, gateway.getGatewayId()); // add amber deployment String echoStampedeAppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(echoModuleId, stampedeResourceId, "/home1/01437/ogce/production/app_wrappers/echo_wrapper.sh", ApplicationParallelismType.SERIAL, TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, null, null, null)); String echoTrestlesAppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(echoModuleId, trestlesResourceId, "/home/ogce/production/app_wrappers/echo_wrapper.sh", ApplicationParallelismType.SERIAL, TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, null, null, null)); String echoBr2AppDeployId = airavata.registerApplicationDeployment(authzToken, gateway.getGatewayId(), createApplicationDeployment(echoModuleId, br2ResourceId, "/N/u/cgateway/BigRed2/production/app_wrappers/echo_wrapper.sh", ApplicationParallelismType.SERIAL, TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, null, null, null)); applicationDeployementListPerGateway.put(echoStampedeAppDeployId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(echoTrestlesAppDeployId, gateway.getGatewayId()); applicationDeployementListPerGateway.put(echoBr2AppDeployId, gateway.getGatewayId()); } } } protected void addLAMMPSApplication() throws Exception{ // add LAMPPS module // add LAMPSS interface // add LAMPSS deployment } protected ApplicationDeploymentDescription createApplicationDeployment(String appModuleId, String computeResourceId, String executablePath, ApplicationParallelismType parallelism, String appDeploymentDescription, List<String> moduleLoadCmds, List<String> preJobCmds, List<String> postJobCmds) { ApplicationDeploymentDescription deployment = new ApplicationDeploymentDescription(); deployment.setAppDeploymentDescription(appDeploymentDescription); deployment.setAppModuleId(appModuleId); deployment.setComputeHostId(computeResourceId); deployment.setExecutablePath(executablePath); deployment.setParallelism(parallelism); deployment.setModuleLoadCmds(moduleLoadCmds); deployment.setPreJobCommands(preJobCmds); deployment.setPostJobCommands(postJobCmds); return deployment; } protected ApplicationModule createApplicationModule(String appModuleName, String appModuleVersion, String appModuleDescription) { ApplicationModule module = new ApplicationModule(); module.setAppModuleDescription(appModuleDescription); module.setAppModuleName(appModuleName); module.setAppModuleVersion(appModuleVersion); return module; } protected InputDataObjectType createAppInput (String inputName, String value, DataType type, String applicationArgument, int order, boolean isRequired, boolean requiredToCMD, boolean stdIn, String description, String metadata) { InputDataObjectType input = new InputDataObjectType(); if (inputName != null) input.setName(inputName); if (value != null) input.setValue(value); if (type != null) input.setType(type); if (applicationArgument != null) input.setApplicationArgument(applicationArgument); input.setInputOrder(order); input.setIsRequired(isRequired); input.setRequiredToAddedToCommandLine(requiredToCMD); if (description != null) input.setUserFriendlyDescription(description); input.setStandardInput(stdIn); if (metadata != null) input.setMetaData(metadata); return input; } protected OutputDataObjectType createAppOutput(String inputName, String value, DataType type, boolean isRequired, boolean requiredToCMD, String argument) { OutputDataObjectType outputDataObjectType = new OutputDataObjectType(); if (inputName != null) outputDataObjectType.setName(inputName); if (value != null) outputDataObjectType.setValue(value); if (type != null) outputDataObjectType.setType(type); outputDataObjectType.setIsRequired(isRequired); outputDataObjectType.setRequiredToAddedToCommandLine(requiredToCMD); outputDataObjectType.setApplicationArgument(argument); return outputDataObjectType; } protected ApplicationInterfaceDescription createApplicationInterfaceDescription (String applicationName, String applicationDescription, List<String> applicationModules, List<InputDataObjectType> applicationInputs, List<OutputDataObjectType>applicationOutputs) { ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription(); applicationInterfaceDescription.setApplicationName(applicationName); if (applicationDescription != null) applicationInterfaceDescription.setApplicationDescription(applicationDescription); if (applicationModules != null) applicationInterfaceDescription.setApplicationModules(applicationModules); if (applicationInputs != null) applicationInterfaceDescription.setApplicationInputs(applicationInputs); if (applicationOutputs != null) applicationInterfaceDescription.setApplicationOutputs(applicationOutputs); return applicationInterfaceDescription; } public Map<String, String> getApplicationInterfaceListPerGateway() { return applicationInterfaceListPerGateway; } public void setApplicationInterfaceListPerGateway(Map<String, String> applicationInterfaceListPerGateway) { this.applicationInterfaceListPerGateway = applicationInterfaceListPerGateway; } public Map<String, String> getApplicationDeployementListPerGateway() { return applicationDeployementListPerGateway; } public void setApplicationDeployementListPerGateway(Map<String, String> applicationDeployementListPerGateway) { this.applicationDeployementListPerGateway = applicationDeployementListPerGateway; } }
apache-2.0
hello/android-buruberi
buruberi-core/src/main/java/is/hello/buruberi/bluetooth/stacks/android/ScannedPeripheral.java
2221
/* * Copyright 2015 Hello Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package is.hello.buruberi.bluetooth.stacks.android; import android.bluetooth.BluetoothDevice; import android.support.annotation.NonNull; import is.hello.buruberi.bluetooth.stacks.util.AdvertisingData; /*package*/ class ScannedPeripheral { /*package*/ final BluetoothDevice device; /*package*/ final AdvertisingData advertisingData; /*package*/ int rssi; /*package*/ ScannedPeripheral(@NonNull BluetoothDevice device, @NonNull AdvertisingData advertisingData, int rssi) { this.device = device; this.advertisingData = advertisingData; this.rssi = rssi; } /*package*/ NativeGattPeripheral createPeripheral(@NonNull NativeBluetoothStack stack) { return new NativeGattPeripheral(stack, device, rssi, advertisingData); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final ScannedPeripheral that = (ScannedPeripheral) o; return (rssi == that.rssi && advertisingData.equals(that.advertisingData) && device.equals(that.device)); } @Override public int hashCode() { int result = device.hashCode(); result = 31 * result + advertisingData.hashCode(); return result; } @Override public String toString() { return "ScanResult{" + "device=" + device + ", advertisingData=" + advertisingData + ", rssi=" + rssi + '}'; } }
apache-2.0
asakusafw/asakusafw
testing-project/asakusa-test-data-provider/src/main/java/com/asakusafw/testdriver/excel/Util.java
6127
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.testdriver.excel; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URL; import java.text.MessageFormat; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.ss.SpreadsheetVersion; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Common utilities for this package. * @since 0.2.0 * @version 0.5.3 */ final class Util { static final Logger LOG = LoggerFactory.getLogger(Util.class); private static final Pattern FRAGMENT = Pattern.compile(":(\\d+)|([^:].*)"); //$NON-NLS-1$ private static final String HSSF_EXTENSION = ".xls"; //$NON-NLS-1$ private static final String XSSF_EXTENSION = ".xlsx"; //$NON-NLS-1$ private static final String FRAGMENT_FIRST_SHEET = ":0"; //$NON-NLS-1$ static Sheet extract(URI source) throws IOException { assert source != null; String path = source.getSchemeSpecificPart(); if (isHssf(path) == false && isXssf(path) == false) { LOG.debug("Not an Excel workbook: {}", source); //$NON-NLS-1$ return null; } String fragment = source.getFragment(); if (fragment == null) { // the first sheet fragment = FRAGMENT_FIRST_SHEET; LOG.debug("Fragment is not set, using first sheet: {}", source); //$NON-NLS-1$ } Matcher matcher = FRAGMENT.matcher(fragment); if (matcher.matches() == false) { LOG.info(MessageFormat.format( Messages.getString("Util.infoUnsupportedUriFragment"), //$NON-NLS-1$ source)); return null; } LOG.debug("Processing Excel workbook: {}", source); //$NON-NLS-1$ URL url = source.toURL(); Workbook book; try (InputStream in = new BufferedInputStream(url.openStream())) { book = openWorkbookFor(path, in); } catch (IOException e) { throw new IOException(MessageFormat.format( Messages.getString("Util.errorFailedToOpenWorkbook"), //$NON-NLS-1$ source)); } if (matcher.group(1) != null) { int sheetNumber = Integer.parseInt(matcher.group(1)); LOG.debug("Opening sheet by index : {}", sheetNumber); //$NON-NLS-1$ try { Sheet sheet = book.getSheetAt(sheetNumber); assert sheet != null; return sheet; } catch (RuntimeException e) { throw new IOException(MessageFormat.format( Messages.getString("Util.errorMissingSheetByIndex"), //$NON-NLS-1$ source, sheetNumber), e); } } else { String sheetName = matcher.group(2); LOG.debug("Opening sheet by name : {}", sheetName); //$NON-NLS-1$ assert sheetName != null; Sheet sheet = book.getSheet(sheetName); if (sheet == null) { throw new IOException(MessageFormat.format( Messages.getString("Util.errorMissingSheetByName"), //$NON-NLS-1$ source, sheetName)); } return sheet; } } static Workbook openWorkbookFor(String path, InputStream input) throws IOException { if (isHssf(path)) { return new HSSFWorkbook(input); } else if (isXssf(path)) { return new XSSFWorkbook(input); } else { return new HSSFWorkbook(input); } } static Workbook createEmptyWorkbookFor(String path) { if (isHssf(path)) { return new HSSFWorkbook(); } else if (isXssf(path)) { return new XSSFWorkbook(); } else { return new HSSFWorkbook(); } } static SpreadsheetVersion getSpreadsheetVersionFor(String path) { if (isHssf(path)) { return SpreadsheetVersion.EXCEL97; } else if (isXssf(path)) { return SpreadsheetVersion.EXCEL2007; } else { return SpreadsheetVersion.EXCEL97; } } static boolean isXssf(String path) { if (path == null) { return false; } return path.endsWith(XSSF_EXTENSION); } static boolean isHssf(String path) { if (path == null) { return false; } return path.endsWith(HSSF_EXTENSION); } static String buildText(String symbol, String title) { assert symbol != null; assert title != null; if (symbol.equalsIgnoreCase(title)) { return symbol; } else { return MessageFormat.format( "{0} [{1}]", //$NON-NLS-1$ title, symbol); } } private static final Pattern TEXT = Pattern.compile(".*\\[(.*?)\\]"); //$NON-NLS-1$ static String extractSymbol(String text) { assert text != null; Matcher matcher = TEXT.matcher(text); if (matcher.matches()) { return matcher.group(1); } else { return text; } } private Util() { return; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-guardduty/src/main/java/com/amazonaws/services/guardduty/model/transform/SecurityContextJsonUnmarshaller.java
2745
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.guardduty.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.guardduty.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * SecurityContext JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SecurityContextJsonUnmarshaller implements Unmarshaller<SecurityContext, JsonUnmarshallerContext> { public SecurityContext unmarshall(JsonUnmarshallerContext context) throws Exception { SecurityContext securityContext = new SecurityContext(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("privileged", targetDepth)) { context.nextToken(); securityContext.setPrivileged(context.getUnmarshaller(Boolean.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return securityContext; } private static SecurityContextJsonUnmarshaller instance; public static SecurityContextJsonUnmarshaller getInstance() { if (instance == null) instance = new SecurityContextJsonUnmarshaller(); return instance; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-mediastore/src/main/java/com/amazonaws/services/mediastore/model/TagResourceRequest.java
10278
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediastore.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/TagResource" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TagResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon Resource Name (ARN) for the container. * </p> */ private String resource; /** * <p> * An array of key:value pairs that you want to add to the container. You need to specify only the tags that you * want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For TagResource, * you specify the following tags: priority:Medium, type:Contract. The result is that your container has three tags: * customer:CompanyA, priority:Medium, and type:Contract. * </p> */ private java.util.List<Tag> tags; /** * <p> * The Amazon Resource Name (ARN) for the container. * </p> * * @param resource * The Amazon Resource Name (ARN) for the container. */ public void setResource(String resource) { this.resource = resource; } /** * <p> * The Amazon Resource Name (ARN) for the container. * </p> * * @return The Amazon Resource Name (ARN) for the container. */ public String getResource() { return this.resource; } /** * <p> * The Amazon Resource Name (ARN) for the container. * </p> * * @param resource * The Amazon Resource Name (ARN) for the container. * @return Returns a reference to this object so that method calls can be chained together. */ public TagResourceRequest withResource(String resource) { setResource(resource); return this; } /** * <p> * An array of key:value pairs that you want to add to the container. You need to specify only the tags that you * want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For TagResource, * you specify the following tags: priority:Medium, type:Contract. The result is that your container has three tags: * customer:CompanyA, priority:Medium, and type:Contract. * </p> * * @return An array of key:value pairs that you want to add to the container. You need to specify only the tags that * you want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For * TagResource, you specify the following tags: priority:Medium, type:Contract. The result is that your * container has three tags: customer:CompanyA, priority:Medium, and type:Contract. */ public java.util.List<Tag> getTags() { return tags; } /** * <p> * An array of key:value pairs that you want to add to the container. You need to specify only the tags that you * want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For TagResource, * you specify the following tags: priority:Medium, type:Contract. The result is that your container has three tags: * customer:CompanyA, priority:Medium, and type:Contract. * </p> * * @param tags * An array of key:value pairs that you want to add to the container. You need to specify only the tags that * you want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For * TagResource, you specify the following tags: priority:Medium, type:Contract. The result is that your * container has three tags: customer:CompanyA, priority:Medium, and type:Contract. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * An array of key:value pairs that you want to add to the container. You need to specify only the tags that you * want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For TagResource, * you specify the following tags: priority:Medium, type:Contract. The result is that your container has three tags: * customer:CompanyA, priority:Medium, and type:Contract. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * An array of key:value pairs that you want to add to the container. You need to specify only the tags that * you want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For * TagResource, you specify the following tags: priority:Medium, type:Contract. The result is that your * container has three tags: customer:CompanyA, priority:Medium, and type:Contract. * @return Returns a reference to this object so that method calls can be chained together. */ public TagResourceRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * An array of key:value pairs that you want to add to the container. You need to specify only the tags that you * want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For TagResource, * you specify the following tags: priority:Medium, type:Contract. The result is that your container has three tags: * customer:CompanyA, priority:Medium, and type:Contract. * </p> * * @param tags * An array of key:value pairs that you want to add to the container. You need to specify only the tags that * you want to add or update. For example, suppose a container already has two tags (customer:CompanyA and * priority:High). You want to change the priority tag and also add a third tag (type:Contract). For * TagResource, you specify the following tags: priority:Medium, type:Contract. The result is that your * container has three tags: customer:CompanyA, priority:Medium, and type:Contract. * @return Returns a reference to this object so that method calls can be chained together. */ public TagResourceRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResource() != null) sb.append("Resource: ").append(getResource()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TagResourceRequest == false) return false; TagResourceRequest other = (TagResourceRequest) obj; if (other.getResource() == null ^ this.getResource() == null) return false; if (other.getResource() != null && other.getResource().equals(this.getResource()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResource() == null) ? 0 : getResource().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public TagResourceRequest clone() { return (TagResourceRequest) super.clone(); } }
apache-2.0
sibvisions/DropboxStorage
src/com/sibvisions/apps/persist/DropboxStorage.java
31150
/* * Copyright 2014 SIB Visions GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * * History * * 11.11.2014 - [JR] - creation */ package com.sibvisions.apps.persist; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.Proxy.Type; import java.util.List; import java.util.Locale; import java.util.UUID; import javax.rad.io.IFileHandle; import javax.rad.io.RemoteFileHandle; import javax.rad.model.ModelException; import javax.rad.model.RowDefinition; import javax.rad.model.SortDefinition; import javax.rad.model.condition.Equals; import javax.rad.model.condition.ICondition; import javax.rad.model.condition.OperatorCondition; import javax.rad.model.datatype.BinaryDataType; import javax.rad.model.datatype.StringDataType; import javax.rad.persist.ColumnMetaData; import javax.rad.persist.DataSourceException; import javax.rad.persist.MetaData; import javax.rad.persist.MetaData.Feature; import com.dropbox.core.DbxClient; import com.dropbox.core.DbxEntry; import com.dropbox.core.DbxHost; import com.dropbox.core.DbxRequestConfig; import com.dropbox.core.DbxWriteMode; import com.dropbox.core.http.StandardHttpRequestor; import com.sibvisions.rad.model.DataBookCSVExporter; import com.sibvisions.rad.model.mem.DataRow; import com.sibvisions.rad.model.mem.MemDataBook; import com.sibvisions.rad.persist.AbstractCachedStorage; import com.sibvisions.util.ArrayUtil; import com.sibvisions.util.ObjectCache; import com.sibvisions.util.ProxyUtil; import com.sibvisions.util.type.CommonUtil; import com.sibvisions.util.type.FileUtil; import com.sibvisions.util.type.StringUtil; /** * The <code>DropboxStorage</code> is a cached storage for files which were store in a dropbox. * * @author René Jahn */ public class DropboxStorage extends AbstractCachedStorage { //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Class members //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /** the filter value if null. */ private static final String NULL = "/"; /** the file type enumeration. */ public enum FileType { /** file and directory. */ All, /** only file. */ File, /** only directory. */ Folder } /** the dropbox client. */ private DbxClient client; /** the metadata. */ private MetaData metadata; /** the row definition. */ private RowDefinition rowdef; /** the export databook. */ private MemDataBook mdbExport; /** the access token. */ private String sAccessToken; /** the initial root path. */ private String sRootPath; /** the proxy host. */ private String sProxyHost; /** the fetch filetype. */ private FileType fileType = FileType.File; /** the proxy port. */ private int iProxyPort; /** whether this storage is open. */ private boolean bOpen; /** whether fetch should act recursive. */ private boolean bRecursive = false; //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Initialization //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /** * Creates a new instance of <code>DropboxStorage</code>. */ public DropboxStorage() { } //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Interface implementation //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /** * {@inheritDoc} */ public MetaData getMetaData() throws DataSourceException { if (!isOpen()) { throw new DataSourceException("Drobpox storage isn't open!"); } return metadata; } /** * {@inheritDoc} */ public int getEstimatedRowCount(ICondition pFilter) throws DataSourceException { return 0; } /** * Closes this storage. * * @throws Throwable if closing failed */ public void close() throws Throwable { if (bOpen) { metadata = null; } } //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Overwritten methods //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /** * {@inheritDoc} */ @Override protected List<Object[]> executeFetch(ICondition pFilter, SortDefinition pSort, int pFromRow, int pMinimumRowCount) throws DataSourceException { if (!isOpen()) { throw new DataSourceException("DropboxStorage isn't open!"); } String sFolder = null; boolean bDeepSearch = bRecursive; if (pFilter == null) { //NO Filter -> search all sFolder = sRootPath; } else { String sFilter = (String)getEqualsValue(pFilter, "FOLDER"); if (sFilter != null && sFilter != NULL) { sFolder = sFilter; } else { sFilter = (String)getEqualsValue(pFilter, "PARENT_FOLDER"); if (sFilter != null) { sFolder = sFilter; } } if (!bDeepSearch) { //no folder search -> must be a complex filter -> deep search bDeepSearch = sFolder == null; } } try { List<Object[]> liRecords = new ArrayUtil<Object[]>(); if (sFolder == NULL) { DbxEntry entry = client.getMetadata("/"); DataRow row = new DataRow(rowdef); if (pFilter.isFulfilled(row)) { liRecords.add(createRecord(entry)); } } else { searchRecords(sFolder, bDeepSearch, liRecords, pFilter); } liRecords.add(null); return liRecords; } catch (Exception e) { throw new DataSourceException("Can't read folder!", e); } } /** * {@inheritDoc} */ @Override protected Object[] executeRefetchRow(Object[] pDataRow) throws DataSourceException { if (!isOpen()) { throw new DataSourceException("DropboxStorage isn't open!"); } removeFileHandle(pDataRow); //the only thing we can do, is to create a new file handle (maybe something has changed) if (FileType.File.toString().equals(pDataRow[4])) { return new Object[] {pDataRow[0], pDataRow[1], pDataRow[2], pDataRow[3], pDataRow[4], createFileHandle((String)pDataRow[0], (String)pDataRow[3])}; } else { return new Object[] {pDataRow[0], pDataRow[1], pDataRow[2], getFolderDisplayName((String)pDataRow[3]), pDataRow[4], null}; } } /** * {@inheritDoc} */ @Override protected Object[] executeInsert(Object[] pDataRow) throws DataSourceException { if (!isOpen()) { throw new DataSourceException("DropboxStorage isn't open!"); } if ((pDataRow[4] == null && pDataRow[5] != null) || FileType.File.toString().equals(pDataRow[4])) { if (pDataRow[3] == null) { throw new DataSourceException("Can't save file because file name is undefined!"); } String sPath = buildPath(pDataRow); Object data = pDataRow[5]; DbxEntry.File file; try { file = save(sPath, data); String sFolder = getFolder(file.path); return new Object[] {file.path, getParentFolder(sFolder), sFolder, file.name, FileType.File.toString(), createFileHandle(file.path, file.name)}; } catch (Exception ex) { if (ex instanceof DataSourceException) { throw (DataSourceException)ex; } throw new DataSourceException("Couldn't create file '" + sPath + "'!", ex); } } else { if (pDataRow[2] == null) { throw new DataSourceException("Can't create folder because path is undefined!"); } String sFolder = getFolderDisplayName((String)pDataRow[2]); try { DbxEntry.Folder folder = client.createFolder(sFolder); return new Object[] {folder.path, getParentFolder(folder.path), folder.path, getFolderDisplayName(folder.name), FileType.Folder.toString(), null}; } catch (Exception ex) { throw new DataSourceException("Couldn't create folder '" + sFolder + "'!", ex); } } } @Override protected Object[] executeUpdate(Object[] pOldDataRow, Object[] pNewDataRow) throws DataSourceException { if (!isOpen()) { throw new DataSourceException("DropboxStorage isn't open!"); } if (!CommonUtil.equals(pOldDataRow[4], pNewDataRow[4])) { throw new DataSourceException("Can't change file type '" + pOldDataRow[4] + "' to '" + pNewDataRow[4] + "'!"); } if (!CommonUtil.equals(pOldDataRow[4], pNewDataRow[4])) { throw new DataSourceException("Can't change file type '" + pOldDataRow[4] + "' to '" + pNewDataRow[4] + "'!"); } String sOldPath = buildPath(pOldDataRow); String sNewPath = buildPath(pNewDataRow); Object[] oResult; if (!CommonUtil.equals(sOldPath, sNewPath)) { try { //file exists? if (client.getMetadata(sNewPath) != null) { //try to delete client.delete(sNewPath); } DbxEntry entry = client.move(sOldPath, sNewPath); if (entry != null) { removeFileHandle(pOldDataRow); removeFileHandle(pNewDataRow); oResult = createRecord(entry); } else { throw new DataSourceException("Can't move file '" + sOldPath + "' to '" + sNewPath + "'!"); } } catch (Exception ex) { if (ex instanceof DataSourceException) { throw (DataSourceException)ex; } throw new DataSourceException("Can't move file '" + sOldPath + "' to '" + sNewPath + "'!", ex); } } else { oResult = pNewDataRow; } if (!CommonUtil.equals(pOldDataRow[5], pNewDataRow[5])) { try { DbxEntry.File file = save(sNewPath, pNewDataRow[5]); removeFileHandle(oResult); oResult[5] = createFileHandle(file); } catch (Exception ex) { throw new DataSourceException("Can't change content of file '" + sNewPath + "'!", ex); } } return oResult; } /** * {@inheritDoc} */ @Override protected void executeDelete(Object[] pDeleteDataRow) throws DataSourceException { if (!isOpen()) { throw new DataSourceException("DropboxStorage isn't open!"); } try { client.delete((String)pDeleteDataRow[0]); removeFileHandle(pDeleteDataRow); } catch (Exception ex) { throw new DataSourceException("Couldn't delete file '" + pDeleteDataRow[0] + "'!", ex); } } @Override public void writeCSV(OutputStream pStream, String[] pColumnNames, String[] pLabels, ICondition pFilter, SortDefinition pSort, String pSeparator) throws Exception { if (!isOpen()) { throw new DataSourceException("DropboxStorage isn't open!"); } if (mdbExport == null) { mdbExport = new MemDataBook(rowdef); mdbExport.setName("export"); mdbExport.open(); } else { mdbExport.close(); mdbExport.open(); } for (Object[] record : fetch(pFilter, null, 0, -1)) { if (record != null) { mdbExport.insert(false); mdbExport.setValues(null, record); } } DataBookCSVExporter.writeCSV(mdbExport, pStream, pColumnNames, pLabels, pFilter, pSort, pSeparator); } //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // User-defined methods //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /** * Opens this storage. * * @throws DataSourceException if opening failed */ public void open() throws DataSourceException { if (!bOpen) { String sHost = sProxyHost; int iPort = iProxyPort; try { Proxy proxy = ProxyUtil.getSystemProxy("https://" + DbxHost.Default.api); if (proxy != null) { InetSocketAddress addr = (InetSocketAddress)proxy.address(); sHost = addr.getHostName(); iPort = addr.getPort(); } } catch (Exception e) { debug(e); } if (StringUtil.isEmpty(sHost)) { sHost = System.getProperty("http.proxyHost"); String sPort = System.getProperty("http.proxyPort"); if (sHost == null) { sHost = System.getProperty("https.proxyHost"); sPort = System.getProperty("https.proxyPort"); } try { iPort = Integer.parseInt(sPort); } catch (Exception e) { iPort = 0; } } DbxRequestConfig config; if (!StringUtil.isEmpty(sHost) && iPort > 0) { config = new DbxRequestConfig("JVx", Locale.getDefault().toString(), new StandardHttpRequestor(new Proxy(Type.HTTP, new InetSocketAddress(sHost, iPort)))); } else { config = new DbxRequestConfig("JVx", Locale.getDefault().toString()); } client = new DbxClient(config, sAccessToken); MetaData md = new MetaData(); ColumnMetaData cmd = new ColumnMetaData("PATH", StringDataType.TYPE_IDENTIFIER); cmd.setNullable(false); md.addColumnMetaData(cmd); cmd = new ColumnMetaData("PARENT_FOLDER", StringDataType.TYPE_IDENTIFIER); cmd.setNullable(false); md.addColumnMetaData(cmd); cmd = new ColumnMetaData("FOLDER", StringDataType.TYPE_IDENTIFIER); cmd.setNullable(false); md.addColumnMetaData(cmd); cmd = new ColumnMetaData("NAME", StringDataType.TYPE_IDENTIFIER); cmd.setNullable(false); md.addColumnMetaData(cmd); cmd = new ColumnMetaData("TYPE", StringDataType.TYPE_IDENTIFIER); cmd.setNullable(false); cmd.setAllowedValues(new Object[] {FileType.File.toString(), FileType.Folder.toString()}); md.addColumnMetaData(cmd); cmd = new ColumnMetaData("CONTENT", BinaryDataType.TYPE_IDENTIFIER); cmd.setFetchLargeObjectsLazy(true); md.addColumnMetaData(cmd); md.removeFeature(Feature.Sort); md.setPrimaryKeyColumnNames(new String[] {"PATH"}); metadata = md; rowdef = new RowDefinition(); try { for (int i = 0, cnt = metadata.getColumnMetaDataCount(); i < cnt; i++) { rowdef.addColumnDefinition(ColumnMetaData.createColumnDefinition(metadata.getColumnMetaData(i))); } } catch (ModelException me) { throw new DataSourceException("Can't create row definition!", me); } bOpen = true; } } /** * Gets whether this storage is open. * * @return <code>true</code> if this storage was opened, <code>false</code> otherwise */ public boolean isOpen() { return bOpen; } /** * Search dropbox records. * * @param pFolder the folder to search * @param pDeep <code>true</code> to search in sub folders as well * @param pRecords the found records * @param pFilter the filter condition * @throws Exception if iterating folders fails */ private void searchRecords(String pFolder, boolean pDeep, List<Object[]> pRecords, ICondition pFilter) throws Exception { DataRow row = new DataRow(rowdef); searchRecords(pFolder, pDeep, pRecords, pFilter, row); } /** * Search dropbox records. * * @param pFolder the folder to search * @param pDeep <code>true</code> to search in sub folders as well * @param pRecords the found records * @param pFilter the filter condition * @param pSearch the temporary search row (for checking the filter condition) * @throws Exception if iterating folders fails */ private void searchRecords(String pFolder, boolean pDeep, List<Object[]> pRecords, ICondition pFilter, DataRow pSearch) throws Exception { DbxEntry.WithChildren listing = client.getMetadataWithChildren(getFolderDisplayName(pFolder)); for (DbxEntry entry : listing.children) { if (fileType == FileType.All || (fileType == FileType.File && entry.isFile()) || (fileType == FileType.Folder && entry.isFolder())) { Object[] oRecord = createRecord(entry); if (pFilter != null) { pSearch.setValues(null, oRecord); if (pFilter.isFulfilled(pSearch)) { pRecords.add(oRecord); } } else { pRecords.add(oRecord); } } if (entry.isFolder() && pDeep) { searchRecords(entry.path, pDeep, pRecords, pFilter); } } } /** * Creates a record for the given entry. * * @param pEntry the remote entry * @return the record */ private Object[] createRecord(DbxEntry pEntry) { boolean bFile = pEntry.isFile(); String sDirectory = bFile ? getFolder(pEntry.path) : pEntry.path; return new Object[] {pEntry.path, getParentFolder(sDirectory), sDirectory, bFile ? pEntry.name : getFolderDisplayName(pEntry.name), bFile ? FileType.File.toString() : FileType.Folder.toString(), bFile ? createFileHandle(pEntry.asFile()) : null}; } /** * Creates a cached file handle for lazy loading. * * @param pFile the file * @return the {@link RemoteFileHandle} */ private RemoteFileHandle createFileHandle(DbxEntry.File pFile) { DropboxFileHandle handle = new DropboxFileHandle(client, pFile); String sUUID = UUID.randomUUID().toString(); ObjectCache.put(sUUID, handle); return new RemoteFileHandle(pFile.name, sUUID); } /** * Creates a cached file handle for lazy loading. * * @param pPath the file path * @param pName the file name * @return the {@link RemoteFileHandle} */ private RemoteFileHandle createFileHandle(String pPath, String pName) { DropboxFileHandle handle = new DropboxFileHandle(client, pPath); String sUUID = UUID.randomUUID().toString(); ObjectCache.put(sUUID, handle); return new RemoteFileHandle(pName, sUUID); } /** * Removes a file handle from the cache. * * @param pRecord the record information. The [5] element should be an instance of {@link RemoteFileHandle} in * order to remove the object from the cache. */ private void removeFileHandle(Object[] pRecord) { if (pRecord[4] instanceof RemoteFileHandle) { Object oKey = ((RemoteFileHandle)pRecord[5]).getObjectCacheKey(); if (oKey != null) { ObjectCache.remove(oKey); } } } /** * Gets the folder name from the given path. * * @param pPath the path * @return the directory name */ private String getFolder(String pPath) { String sPath = FileUtil.getDirectory(pPath); if (sPath == null) { sPath = "/"; } else if (!sPath.startsWith("/")) { sPath = "/" + sPath; } return sPath; } /** * Gets the display name of the given folder. * * @param pName the folder name (root node is empty) * @return the display name of the given folder (/ for root folder) */ private String getFolderDisplayName(String pName) { if (StringUtil.isEmpty(pName)) { return "/"; } return pName; } /** * Gets the parent folder name for the given directory. * * @param pPath the directory * @return the parent directory */ private String getParentFolder(String pPath) { if ("/".equals(pPath)) { return null; } return getFolder(pPath); } /** * Creates a path with given record. * * @param pDataRow the record * @return the path */ private String buildPath(Object[] pDataRow) { String sPath = getFolderDisplayName((String)pDataRow[2]); if (pDataRow[5] != null || FileType.File.toString().equals(pDataRow[4])) { String sName = (String)pDataRow[3]; if (!StringUtil.isEmpty(sName)) { if (sPath.endsWith("/")) { sPath += sName; } else { sPath += "/" + sName; } } } else { if ("/".equals(pDataRow[3])) { if (!((String)pDataRow[2]).endsWith("/" + pDataRow[3])) { sPath = sPath.substring(0, sPath.lastIndexOf('/')) + pDataRow[3]; } } } return sPath; } /** * Saves a file. * * @param pPath the path * @param pContent the new content * @return the saved file * @throws Exception if saving failed */ private DbxEntry.File save(String pPath, Object pContent) throws Exception { if (pContent == null) { return client.uploadFile(pPath, DbxWriteMode.force(), 0, new ByteArrayInputStream(new byte[0])); } else if (pContent instanceof byte[]) { return client.uploadFile(pPath, DbxWriteMode.force(), ((byte[])pContent).length, new ByteArrayInputStream((byte[])pContent)); } else if (pContent instanceof IFileHandle) { return client.uploadFile(pPath, DbxWriteMode.force(), ((IFileHandle)pContent).getLength(), ((IFileHandle)pContent).getInputStream()); } else if (pContent instanceof File) { FileInputStream fis = new FileInputStream((File)pContent); try { return client.uploadFile(pPath, DbxWriteMode.force(), ((File)pContent).length(), fis); } finally { CommonUtil.close(fis); } } else if (pContent instanceof InputStream) { return client.uploadFile(pPath, DbxWriteMode.force(), -1, (InputStream)pContent); } else { throw new DataSourceException("Unsupportet content type: " + pContent.getClass().getName()); } } /** * Sets the access token. * * @param pAccessToken the access token */ public void setAccessToken(String pAccessToken) { sAccessToken = pAccessToken; } /** * Gets the access token. * * @return the access token */ public String getAccessToken() { return sAccessToken; } /** * Sets the file type. The file type will be used for listing dropbox records. * * @param pType the {@link FileType} */ public void setFileType(FileType pType) { fileType = pType; } /** * Gets the file type. * * @return the {@link FileType} * @see #setFileType(FileType) */ public FileType getFileType() { return fileType; } /** * Sets the root folder. * * @param pPath the (absolute) folder name/path */ public void setRootFolder(String pPath) { sRootPath = pPath; } /** * Gets the root folder. * * @return the folder name/path */ public String getRootFolder() { return sRootPath; } /** * Gets a value from the given filter. * * @param pFilter the filter * @param pColumn the column name * @return the value or <code>null</code> if the column was not found */ private Object getEqualsValue(ICondition pFilter, String pColumn) { if (pFilter instanceof OperatorCondition) { for (ICondition cond : ((OperatorCondition)pFilter).getConditions()) { if (cond instanceof Equals) { if (pColumn.equals(((Equals)cond).getColumnName())) { Object oValue = ((Equals)cond).getValue(); return oValue == null ? NULL : oValue; } } } } else if (pFilter instanceof Equals) { if (pColumn.equals(((Equals)pFilter).getColumnName())) { Object oValue = ((Equals)pFilter).getValue(); return oValue == null ? NULL : oValue; } } return null; } /** * Sets whether fetching should act recursive. The search results will be recursive if no specific path was * configured via fetch condition. * * @param pRecursive <code>true</code> to search directories recursive, <code>false</code> to search * only the given directory */ public void setRecursive(boolean pRecursive) { bRecursive = pRecursive; } /** * Gets whether fetching should act recursive. * * @return <code>true</code> if directories will be iterated recursively, <code>false</code> otherwise */ public boolean isRecursive() { return bRecursive; } } // DropboxStorage
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-core/src/test/java/com/amazonaws/retry/v2/FixedDelayBackoffStrategyTest.java
1266
/* * Copyright 2011-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.retry.v2; import org.junit.Test; import static org.junit.Assert.assertEquals; public class FixedDelayBackoffStrategyTest { @Test(expected = IllegalArgumentException.class) public void negativeBackoff_ThrowsException() { new FixedDelayBackoffStrategy(-1); } @Test(expected = IllegalArgumentException.class) public void zeroBackoff_ThrowsException() { new FixedDelayBackoffStrategy(0); } @Test public void positiveBackoff_ReturnsFixedBackoffOnDelay() { long delay = new FixedDelayBackoffStrategy(100).computeDelayBeforeNextRetry(RetryPolicyContexts.EMPTY); assertEquals(100, delay); } }
apache-2.0
jdgwartney/vsphere-ws
java/JAXWS/samples/com/vmware/vim25/RemoveDatastoreRequestType.java
2313
package com.vmware.vim25; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for RemoveDatastoreRequestType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="RemoveDatastoreRequestType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="_this" type="{urn:vim25}ManagedObjectReference"/> * &lt;element name="datastore" type="{urn:vim25}ManagedObjectReference"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "RemoveDatastoreRequestType", propOrder = { "_this", "datastore" }) public class RemoveDatastoreRequestType { @XmlElement(required = true) protected ManagedObjectReference _this; @XmlElement(required = true) protected ManagedObjectReference datastore; /** * Gets the value of the this property. * * @return * possible object is * {@link ManagedObjectReference } * */ public ManagedObjectReference getThis() { return _this; } /** * Sets the value of the this property. * * @param value * allowed object is * {@link ManagedObjectReference } * */ public void setThis(ManagedObjectReference value) { this._this = value; } /** * Gets the value of the datastore property. * * @return * possible object is * {@link ManagedObjectReference } * */ public ManagedObjectReference getDatastore() { return datastore; } /** * Sets the value of the datastore property. * * @param value * allowed object is * {@link ManagedObjectReference } * */ public void setDatastore(ManagedObjectReference value) { this.datastore = value; } }
apache-2.0
JulienDeray/sweep_algorythm
src/fr/emn/fil/model/Constraint.java
2407
/** * Contraintes, zones (délimitées en abcisse et ordonnée) où placer des rectangles de largeur width et de hauteur height. * * Created with IntelliJ IDEA. * Autor: julienderay * Company : SERLI * Date: 13/10/14 * Time: 08:56 */ package fr.emn.fil.model; public class Constraint { //Arguments private int xMin; private int xMax; private int yMin; private int yMax; private int height; private int width; //Constructeurs public Constraint(int xMin, int xMax, int yMin, int yMax, int width, int height) { this.xMin = xMin; this.xMax = xMax; this.yMin = yMin; this.yMax = yMax; this.height = height; this.width = width; } public Constraint(int width, int height) { this.height = height; this.width = width; } // Getters - Setters public int getxMin() { return xMin; } public void setxMin(int x) { this.xMin = x; } public int getxMax() { return xMax; } public void setxMax(int x) { this.xMax = x; } public int getyMin() { return yMin; } public int getyMax() { return yMax; } public int getHeight() { return height; } public int getWidth() { return width; } // Méthodes /** * Retourne le détail des arguments de la contrainte sous forme de String * @return strinf du détail */ @Override public String toString() { return "Rectangle{" + "xMin=" + xMin + ", xMax=" + xMax + ", yMin=" + yMin + ", yMax=" + yMax + ", width=" + width + ", height=" + height + '}'; } /** * Teste l'égalité de deux contraintes/rectangles * @param o la contrainte à comparer * @return le résultat du test d'égalité */ @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Constraint)) return false; Constraint that = (Constraint) o; if (height != that.height) return false; if (width != that.width) return false; if (xMax != that.xMax) return false; if (xMin != that.xMin) return false; if (yMax != that.yMax) return false; if (yMin != that.yMin) return false; return true; } }
apache-2.0
arundh93/twu-biblioteca-arundhingra
test/com/twu/biblioteca/menu/CheckOutTest.java
909
package com.twu.biblioteca.menu; import com.twu.biblioteca.model.Library; import com.twu.biblioteca.model.Login; import com.twu.biblioteca.view.*; import org.junit.Test; import org.mockito.Mockito; public class CheckOutTest { @Test public void checkOutCommandCanCheckOutBook() { Library bookLibrary = Mockito.mock(Library.class); Login login = Mockito.mock(Login.class); ViewInterface viewInterface = Mockito.mock(ViewInterface.class); View view = Mockito.mock(View.class); CheckOut checkOutABook = new CheckOut(bookLibrary, login, view, "book"); Mockito.when(view.takeInputOfItem(Mockito.anyString())).thenReturn("prodigal daughter"); Mockito.when(bookLibrary.checkOutLibraryItem("prodigal daughter", login)).thenReturn(true); checkOutABook.execute(viewInterface); Mockito.verify(view).showCheckOutMessage("book"); } }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Framework/Project/src/main/java/ghidra/project/test/TestProjectManager.java
1408
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.project.test; import ghidra.framework.model.ProjectManager; import ghidra.framework.project.DefaultProjectManager; /** This class exists to open access to the {@link DefaultProjectManager} for tests */ public class TestProjectManager extends DefaultProjectManager { private static ProjectManager projectManager; public synchronized static ProjectManager get() { // TODO make a static test manager if needed // // Not sure the best way to proceed here. The old behavior was to have a shared // project manager. Having this here made migration easier. There should be no reason // to have a static, shared project manager. // if (projectManager == null) { projectManager = new TestProjectManager(); } return projectManager; } private TestProjectManager() { super(); } }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Framework/SoftwareModeling/src/main/java/ghidra/app/plugin/assembler/Assembler.java
9233
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.assembler; import java.util.Collection; import ghidra.app.plugin.assembler.sleigh.parse.AssemblyParseResult; import ghidra.app.plugin.assembler.sleigh.sem.*; import ghidra.program.model.address.Address; import ghidra.program.model.address.AddressOverflowException; import ghidra.program.model.listing.Instruction; import ghidra.program.model.listing.InstructionIterator; import ghidra.program.model.mem.MemoryAccessException; /** * The primary interface for performing assembly in Ghidra. * * <p> * Use the {@link Assemblers} class to obtain a suitable implementation for a given program or * language. */ public interface Assembler { /** * Assemble a sequence of instructions and place them at the given address. * * <p> * This method is only valid if the assembler is bound to a program. An instance may optionally * implement this method without a program binding. In that case, the returned iterator will * refer to pseudo instructions. * * <p> * NOTE: There must be an active transaction on the bound program for this method to succeed. * * @param at the location where the resulting instructions should be placed * @param listing a new-line separated or array sequence of instructions * @return an iterator over the resulting instructions * @throws AssemblySyntaxException a textual instruction is non well-formed * @throws AssemblySemanticException a well-formed instruction cannot be assembled * @throws MemoryAccessException there is an issue writing the result to program memory * @throws AddressOverflowException the resulting block is beyond the valid address range */ public InstructionIterator assemble(Address at, String... listing) throws AssemblySyntaxException, AssemblySemanticException, MemoryAccessException, AddressOverflowException; /** * Assemble a line instruction at the given address. * * <p> * This method is valid with or without a bound program. Even if bound, the program is not * modified; however, the appropriate context information is taken from the bound program. * Without a program, the language's default context is taken at the given location. * * @param at the location of the start of the instruction * @param line the textual assembly code * @return the binary machine code, suitable for placement at the given address * @throws AssemblySyntaxException the textual instruction is not well-formed * @throws AssemblySemanticException the the well-formed instruction cannot be assembled */ public byte[] assembleLine(Address at, String line) throws AssemblySyntaxException, AssemblySemanticException; /** * Assemble a line instruction at the given address, assuming the given context. * * <p> * This method works like {@link #assembleLine(Address, String)} except that it allows you to * override the assumed context at that location. * * @param at the location of the start of the instruction * @param line the textual assembly code * @param ctx the context register value at the start of the instruction * @return the results of semantic resolution (from all parse results) * @throws AssemblySyntaxException the textual instruction is not well-formed * @throws AssemblySemanticException the well-formed instruction cannot be assembled */ public byte[] assembleLine(Address at, String line, AssemblyPatternBlock ctx) throws AssemblySemanticException, AssemblySyntaxException; /** * Parse a line instruction. * * <p> * Generally, you should just use {@link #assembleLine(Address, String)}, but if you'd like * access to the parse trees outside of an {@link AssemblySelector}, then this may be an * acceptable option. Most notably, this is an excellent way to obtain suggestions for * auto-completion. * * <p> * Each item in the returned collection is either a complete parse tree, or a syntax error * Because all parse paths are attempted, it's possible to get many mixed results. For example, * The input line may be a valid instruction; however, there may be suggestions to continue the * line toward another valid instruction. * * @param line the line (or partial line) to parse * @return the results of parsing */ public Collection<AssemblyParseResult> parseLine(String line); /** * Resolve a given parse tree at the given address, assuming the given context * * <p> * Each item in the returned collection is either a completely resolved instruction, or a * semantic error. Because all resolutions are attempted, it's possible to get many mixed * results. * * <p> * NOTE: The resolved instructions are given as masks and values. Where the mask does not cover, * you can choose any value. * * @param parse a parse result giving a valid tree * @param at the location of the start of the instruction * @param ctx the context register value at the start of the instruction * @return the results of semantic resolution */ public AssemblyResolutionResults resolveTree(AssemblyParseResult parse, Address at, AssemblyPatternBlock ctx); /** * Resolve a given parse tree at the given address. * * <p> * Each item in the returned collection is either a completely resolved instruction, or a * semantic error. Because all resolutions are attempted, it's possible to get many mixed * results. * * <p> * NOTE: The resolved instructions are given as masks and values. Where the mask does not cover, * you can choose any value. * * @param parse a parse result giving a valid tree * @param at the location of the start of the instruction * @return the results of semantic resolution */ public AssemblyResolutionResults resolveTree(AssemblyParseResult parse, Address at); /** * Assemble a line instruction at the given address. * * <p> * This method works like {@link #resolveLine(Address, String, AssemblyPatternBlock)}, except * that it derives the context using {@link #getContextAt(Address)}. * * @param at the location of the start of the instruction * @param line the textual assembly code * @return the collection of semantic resolution results * @throws AssemblySyntaxException the textual instruction is not well-formed */ public AssemblyResolutionResults resolveLine(Address at, String line) throws AssemblySyntaxException; /** * Assemble a line instruction at the given address, assuming the given context. * * <p> * This method works like {@link #assembleLine(Address, String, AssemblyPatternBlock)}, except * that it returns all possible resolutions for the parse trees that pass the * {@link AssemblySelector}. * * @param at the location of the start of the instruction * @param line the textual assembly code * @param ctx the context register value at the start of the instruction * @return the collection of semantic resolution results * @throws AssemblySyntaxException the textual instruction is not well-formed */ public AssemblyResolutionResults resolveLine(Address at, String line, AssemblyPatternBlock ctx) throws AssemblySyntaxException; /** * Place a resolved (and fully-masked) instruction into the bound program. * * <p> * This method is not valid without a program binding. Also, this method must be called during a * program database transaction. * * @param res the resolved and fully-masked instruction * @param at the location of the start of the instruction * @return the new {@link Instruction} code unit * @throws MemoryAccessException there is an issue writing the result to program memory */ public Instruction patchProgram(AssemblyResolvedConstructor res, Address at) throws MemoryAccessException; /** * Place instruction bytes into the bound program. * * <p> * This method is not valid without a program binding. Also, this method must be called during a * program database transaction. * * @param insbytes the instruction data * @param at the location of the start of the instruction * @return an iterator over the disassembled instructions * @throws MemoryAccessException there is an issue writing the result to program memory */ public InstructionIterator patchProgram(byte[] insbytes, Address at) throws MemoryAccessException; /** * Get the context at a given address * * <p> * If there is a program binding, this will extract the actual context at the given address. * Otherwise, it will obtain the default context at the given address for the language. * * @param addr the address * @return the context */ public AssemblyPatternBlock getContextAt(Address addr); }
apache-2.0
KeyBridge/lib-openssrf
src/main/java/us/gov/dod/standard/ssrf/_3_1/metadata/domains/dBWHz.java
4182
/* * Copyright 2015 Key Bridge LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package us.gov.dod.standard.ssrf._3_1.metadata.domains; import java.math.BigDecimal; import java.util.Objects; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import us.gov.dod.standard.ssrf._3_1.adapter.types.XmlAdapterDBWHZ; import us.gov.dod.standard.ssrf._3_1.metadata.AMetadata; /** * The SSRF dBWHz data type. * <p> * @author Key Bridge LLC &lt;developer@keybridge.ch&gt; * @version 3.1.0, 03/30/2015 */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "dBWHz", propOrder = {"value"}) public class dBWHz extends AMetadata<dBWHz> implements Comparable<dBWHz> { /** * The value property. */ @XmlValue @XmlJavaTypeAdapter(XmlAdapterDBWHZ.class) protected BigDecimal value; /** * Construct a new, empty dBWHz instance. */ public dBWHz() { } /** * Construct a new dBWHz instance with the given initial value. * <p> * @param value The initial value. */ public dBWHz(BigDecimal value) { this.value = value; } /** * Construct a new dBWHz instance with the given initial value. * <p> * @param value The initial value. */ public dBWHz(Double value) { this.value = BigDecimal.valueOf(value); } /** * Get the value of the value property. * <p> * @return the value of the value property */ public BigDecimal getValue() { return value; } /** * Set the value of the value property. * <p> * @param value the value */ public void setValue(BigDecimal value) { this.value = value; } /** * Determine if the value property is configured. * <p> * @return TRUE if the value is set, FALSE if the value is null */ public boolean isSetValue() { return this.value != null; } /** * Determine if the required fields in this SSRF data type instance are set. * <p> * Note that this method only checks for the presence of required information; * this method does not validate the information format. * <p> * @return TRUE if required fields are set, otherwise FALSE */ @Override public boolean isSet() { return super.isSet() && isSetValue(); } /** * Get a string representation of this data wrapper value. * <p> * @return The current data value. */ @Override public String toString() { return this.value != null ? this.value.toString() : null; } //<editor-fold defaultstate="collapsed" desc="Hashcode Equals and Comparable"> /** * Hash code is based upon the value. * <p> * @return a unique hash code from the value. */ @Override public int hashCode() { int hash = 7; hash = 97 * hash + Objects.hashCode(this.value); return hash; } /** * Equality is based upon the value. * <p> * @param obj the other object to compare. * @return TRUE if the values match exactly. */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } return Objects.equals(this.value, ((dBWHz) obj).getValue()); } /** * Comparison and sorting. * <p> * @param obj the other object to compare * @return the sorting order. */ @Override public int compareTo(dBWHz obj) { if (obj == null) { return -1; } if (this.value == null) { return 1; } return this.value.compareTo(obj.getValue()); }//</editor-fold> }
apache-2.0
Jonsnow21/Sunshine
app/src/androidTest/java/com/example/android/sunshine/app/data/TestDb.java
9759
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app.data; import android.content.ContentValues; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.test.AndroidTestCase; import java.util.HashSet; public class TestDb extends AndroidTestCase { public static final String LOG_TAG = TestDb.class.getSimpleName(); // Since we want each test to start with a clean slate void deleteTheDatabase() { mContext.deleteDatabase(WeatherDbHelper.DATABASE_NAME); } /* This function gets called before each test is executed to delete the database. This makes sure that we always have a clean test. */ public void setUp() { deleteTheDatabase(); } /* Students: Uncomment this test once you've written the code to create the Location table. Note that you will have to have chosen the same column names that I did in my solution for this test to compile, so if you haven't yet done that, this is a good time to change your column names to match mine. Note that this only tests that the Location table has the correct columns, since we give you the code for the weather table. This test does not look at the */ public void testCreateDb() throws Throwable { // build a HashSet of all of the table names we wish to look for // Note that there will be another table in the DB that stores the // Android metadata (db version information) final HashSet<String> tableNameHashSet = new HashSet<String>(); tableNameHashSet.add(WeatherContract.LocationEntry.TABLE_NAME); tableNameHashSet.add(WeatherContract.WeatherEntry.TABLE_NAME); mContext.deleteDatabase(WeatherDbHelper.DATABASE_NAME); SQLiteDatabase db = new WeatherDbHelper( this.mContext).getWritableDatabase(); assertEquals(true, db.isOpen()); // have we created the tables we want? Cursor c = db.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null); assertTrue("Error: This means that the database has not been created correctly", c.moveToFirst()); // verify that the tables have been created do { tableNameHashSet.remove(c.getString(0)); } while( c.moveToNext() ); // if this fails, it means that your database doesn't contain both the location entry // and weather entry tables assertTrue("Error: Your database was created without both the location entry and weather entry tables", tableNameHashSet.isEmpty()); // now, do our tables contain the correct columns? c = db.rawQuery("PRAGMA table_info(" + WeatherContract.LocationEntry.TABLE_NAME + ")", null); assertTrue("Error: This means that we were unable to query the database for table information.", c.moveToFirst()); // Build a HashSet of all of the column names we want to look for final HashSet<String> locationColumnHashSet = new HashSet<String>(); locationColumnHashSet.add(WeatherContract.LocationEntry._ID); locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_CITY_NAME); locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_COORD_LAT); locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_COORD_LONG); locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING); int columnNameIndex = c.getColumnIndex("name"); do { String columnName = c.getString(columnNameIndex); locationColumnHashSet.remove(columnName); } while(c.moveToNext()); // if this fails, it means that your database doesn't contain all of the required location // entry columns assertTrue("Error: The database doesn't contain all of the required location entry columns", locationColumnHashSet.isEmpty()); db.close(); } /* Students: Here is where you will build code to test that we can insert and query the location database. We've done a lot of work for you. You'll want to look in TestUtilities where you can uncomment out the "createNorthPoleLocationValues" function. You can also make use of the ValidateCurrentRecord function from within TestUtilities. */ public void testLocationTable() { // First step: Get reference to writable database WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); // Create ContentValues of what you want to insert // (you can use the createNorthPoleLocationValues if you wish) ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); // Insert ContentValues into database and get a row ID back long locationRowId; locationRowId = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, testValues); assertTrue(locationRowId != -1); // Query the database and receive a Cursor back Cursor cursor = db.query( WeatherContract.LocationEntry.TABLE_NAME, // Table to Query null, // all columns null, // Columns for the "where" clause null, // Values for the "where" clause null, // columns to group by null, // columns to filter by row groups null // sort order ); // Move the cursor to a valid database row assertTrue( "Error: No Records returned from location query", cursor.moveToFirst() ); // Validate data in resulting Cursor with the original ContentValues // (you can use the validateCurrentRecord function in TestUtilities to validate the // query if you like) TestUtilities.validateCurrentRecord("Error: Location Query Validation Failed", cursor, testValues); // Finally, close the cursor and database assertFalse( "Error: More than one record returned from location query", cursor.moveToNext() ); cursor.close(); db.close(); } /* Students: Here is where you will build code to test that we can insert and query the database. We've done a lot of work for you. You'll want to look in TestUtilities where you can use the "createWeatherValues" function. You can also make use of the validateCurrentRecord function from within TestUtilities. */ public void testWeatherTable() { // First insert the location, and then use the locationRowId to insert // the weather. Make sure to cover as many failure cases as you can. // Instead of rewriting all of the code we've already written in testLocationTable // we can move this code to insertLocation and then call insertLocation from both // tests. Why move it? We need the code to return the ID of the inserted location // and our testLocationTable can only return void because it's a test. // First step: Get reference to writable database WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); // Create ContentValues of what you want to insert // (you can use the createWeatherValues TestUtilities function if you wish) long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext); ContentValues testValues = TestUtilities.createWeatherValues(locationRowId); // Insert ContentValues into database and get a row ID back long weatherRowId = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, testValues); assertTrue(weatherRowId != -1); // Query the database and receive a Cursor back Cursor cursor = db.query( WeatherContract.WeatherEntry.TABLE_NAME, null, null, null, null, null, null ); // Move the cursor to a valid database row assertTrue( "Error: No Records returned from weather query", cursor.moveToFirst() ); // Validate data in resulting Cursor with the original ContentValues // (you can use the validateCurrentRecord function in TestUtilities to validate the // query if you like) TestUtilities.validateCurrentRecord("Error: Weather Query Validation Failed", cursor, testValues); // Finally, close the cursor and database assertFalse( "Error: More than one record returned from weather query", cursor.moveToNext() ); cursor.close(); db.close(); } /* Students: This is a helper method for the testWeatherTable quiz. You can move your code from testLocationTable to here so that you can call this code from both testWeatherTable and testLocationTable. */ public long insertLocation() { return -1L; } }
apache-2.0
olamy/maven
maven-model-builder/src/test/java/org/apache/maven/model/path/DefaultUrlNormalizerTest.java
2913
package org.apache.maven.model.path; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import org.junit.Test; /** * @author Benjamin Bentmann */ public class DefaultUrlNormalizerTest { private UrlNormalizer normalizer = new DefaultUrlNormalizer(); private String normalize( String url ) { return normalizer.normalize( url ); } @Test public void testNullSafe() { assertNull( normalize( null ) ); } @Test public void testTrailingSlash() { assertEquals( "", normalize( "" ) ); assertEquals( "http://server.org/dir", normalize( "http://server.org/dir" ) ); assertEquals( "http://server.org/dir/", normalize( "http://server.org/dir/" ) ); } @Test public void testRemovalOfParentRefs() { assertEquals( "http://server.org/child", normalize( "http://server.org/parent/../child" ) ); assertEquals( "http://server.org/child", normalize( "http://server.org/grand/parent/../../child" ) ); assertEquals( "http://server.org//child", normalize( "http://server.org/parent/..//child" ) ); assertEquals( "http://server.org/child", normalize( "http://server.org/parent//../child" ) ); } @Test public void testPreservationOfDoubleSlashes() { assertEquals( "scm:hg:ssh://localhost//home/user", normalize( "scm:hg:ssh://localhost//home/user" ) ); assertEquals( "file:////UNC/server", normalize( "file:////UNC/server" ) ); assertEquals( "[fetch=]http://server.org/[push=]ssh://server.org/", normalize( "[fetch=]http://server.org/[push=]ssh://server.org/" ) ); } @Test public void absolutePathTraversalPastRootIsOmitted() { assertEquals( "/", normalize("/../" ) ); } @Test public void parentDirectoryRemovedFromRelativeUriReference() { assertEquals( "", normalize( "a/../" ) ); } @Test public void leadingParentDirectoryNotRemovedFromRelativeUriReference() { assertEquals( "../", normalize( "../" ) ); } }
apache-2.0
riengcs/zk-tutorial
src/main/java/com/zk/tutorial/component/panel/package-info.java
70
/** * @author csrieng * */ package com.zk.tutorial.component.panel;
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-directory/src/main/java/com/amazonaws/services/directory/model/transform/RejectSharedDirectoryResultJsonUnmarshaller.java
2925
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.directory.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.directory.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * RejectSharedDirectoryResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RejectSharedDirectoryResultJsonUnmarshaller implements Unmarshaller<RejectSharedDirectoryResult, JsonUnmarshallerContext> { public RejectSharedDirectoryResult unmarshall(JsonUnmarshallerContext context) throws Exception { RejectSharedDirectoryResult rejectSharedDirectoryResult = new RejectSharedDirectoryResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return rejectSharedDirectoryResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("SharedDirectoryId", targetDepth)) { context.nextToken(); rejectSharedDirectoryResult.setSharedDirectoryId(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return rejectSharedDirectoryResult; } private static RejectSharedDirectoryResultJsonUnmarshaller instance; public static RejectSharedDirectoryResultJsonUnmarshaller getInstance() { if (instance == null) instance = new RejectSharedDirectoryResultJsonUnmarshaller(); return instance; } }
apache-2.0
baade-org/eel
eel-orm/src/main/java/org/baade/ell/orm/Entity.java
371
package org.baade.ell.orm; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Created by zz on 2017/5/27. */ @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) public @interface Entity { String dbName(); String tableName(); }
apache-2.0
noemus/kotlin-eclipse
kotlin-eclipse-ui/src/org/jetbrains/kotlin/ui/editors/selection/KotlinSelectPreviousAction.java
1589
package org.jetbrains.kotlin.ui.editors.selection; import org.eclipse.jdt.internal.ui.javaeditor.selectionactions.SelectionHistory; import org.eclipse.jdt.ui.actions.IJavaEditorActionDefinitionIds; import org.jetbrains.annotations.NotNull; import org.jetbrains.kotlin.ui.editors.KotlinEditor; import org.jetbrains.kotlin.ui.editors.selection.handlers.KotlinElementSelectioner; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; public class KotlinSelectPreviousAction extends KotlinSemanticSelectionAction { private static final String ACTION_DESCRIPTION = "Select previous element"; public static final String SELECT_PREVIOUS_TEXT = "SelectPrevious"; public KotlinSelectPreviousAction(KotlinEditor editor, SelectionHistory history) { super(editor, history); setText(ACTION_DESCRIPTION); setActionDefinitionId(IJavaEditorActionDefinitionIds.SELECT_PREVIOUS); } @Override protected @NotNull TextRange runInternalSelection(PsiElement enclosingElement, TextRange selectedRange, String selectedText) { PsiElement selectionCandidate = findSelectionCandidate(enclosingElement, PsiElementChildrenIterable.backwardChildrenIterator(enclosingElement), selectedRange, selectedText); if (selectionCandidate == null) { return KotlinElementSelectioner.INSTANCE.selectEnclosing(enclosingElement, selectedRange); } return KotlinElementSelectioner.INSTANCE.selectPrevious(enclosingElement, selectionCandidate, selectedRange); } }
apache-2.0
shakamunyi/drill
exec/java-exec/src/main/java/org/apache/drill/exec/store/VectorHolder.java
3259
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store; import org.apache.drill.exec.vector.AllocationHelper; import org.apache.drill.exec.vector.RepeatedFixedWidthVectorLike; import org.apache.drill.exec.vector.RepeatedMutator; import org.apache.drill.exec.vector.RepeatedVariableWidthVectorLike; import org.apache.drill.exec.vector.ValueVector; public class VectorHolder { private int count; private int groupCount; private int length; private ValueVector vector; private int currentLength; private boolean repeated; public VectorHolder(int length, ValueVector vector) { this.length = length; this.vector = vector; if (vector instanceof RepeatedFixedWidthVectorLike || vector instanceof RepeatedVariableWidthVectorLike) { repeated = true; } } public VectorHolder(ValueVector vector) { this.length = vector.getValueCapacity(); this.vector = vector; if (vector instanceof RepeatedFixedWidthVectorLike || vector instanceof RepeatedVariableWidthVectorLike) { repeated = true; } } public boolean isRepeated() { return repeated; } public ValueVector getValueVector() { return vector; } public void incAndCheckLength(int newLength) { if (!hasEnoughSpace(newLength)) { throw new BatchExceededException(length, vector.getBufferSize() + newLength); } currentLength += newLength; count += 1; } public void setGroupCount(int groupCount) { if (this.groupCount < groupCount) { RepeatedMutator mutator = (RepeatedMutator) vector.getMutator(); while (this.groupCount < groupCount) { mutator.startNewGroup(++this.groupCount); } } } public boolean hasEnoughSpace(int newLength) { return length >= currentLength + newLength; } public int getLength() { return length; } public void reset() { currentLength = 0; count = 0; allocateNew(length); } public void populateVectorLength() { ValueVector.Mutator mutator = vector.getMutator(); if (vector instanceof RepeatedFixedWidthVectorLike || vector instanceof RepeatedVariableWidthVectorLike) { mutator.setValueCount(groupCount); } else { mutator.setValueCount(count); } } public void allocateNew(int valueLength) { AllocationHelper.allocate(vector, valueLength, 10, 5); } public void allocateNew(int valueLength, int repeatedPerTop) { AllocationHelper.allocate(vector, valueLength, 10, repeatedPerTop); } }
apache-2.0
Whitespell/java-server-sdk
src/whitespell/net/websockets/socketio/transport/XHRPollingTransport.java
9514
/** * Copyright 2012 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package whitespell.net.websockets.socketio.transport; import whitespell.net.websockets.socketio.Configuration; import whitespell.net.websockets.socketio.DisconnectableHub; import whitespell.net.websockets.socketio.SocketIOClient; import whitespell.net.websockets.socketio.Transport; import whitespell.net.websockets.socketio.ack.AckManager; import whitespell.net.websockets.socketio.handler.AuthorizeHandler; import whitespell.net.websockets.socketio.messages.PacketsMessage; import whitespell.net.websockets.socketio.messages.XHRErrorMessage; import whitespell.net.websockets.socketio.messages.XHROutMessage; import whitespell.net.websockets.socketio.parser.ErrorAdvice; import whitespell.net.websockets.socketio.parser.ErrorReason; import whitespell.net.websockets.socketio.parser.Packet; import whitespell.net.websockets.socketio.parser.PacketType; import whitespell.net.websockets.socketio.scheduler.CancelableScheduler; import whitespell.net.websockets.socketio.scheduler.SchedulerKey; import whitespell.net.websockets.socketio.scheduler.SchedulerKey.Type; import io.netty.buffer.ByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandler.Sharable; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.QueryStringDecoder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; @Sharable public class XHRPollingTransport extends BaseTransport { public static final String NAME = "xhr-polling"; private final Logger log = LoggerFactory.getLogger(getClass()); private final Map<UUID, XHRPollingClient> sessionId2Client = new ConcurrentHashMap<UUID, XHRPollingClient>(); private final CancelableScheduler scheduler; private final AckManager ackManager; private final AuthorizeHandler authorizeHandler; private final DisconnectableHub disconnectable; private final Configuration configuration; private final String path; public XHRPollingTransport(String connectPath, AckManager ackManager, DisconnectableHub disconnectable, CancelableScheduler scheduler, AuthorizeHandler authorizeHandler, Configuration configuration) { this.path = connectPath + NAME + "/"; this.ackManager = ackManager; this.authorizeHandler = authorizeHandler; this.configuration = configuration; this.disconnectable = disconnectable; this.scheduler = scheduler; } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (msg instanceof FullHttpRequest) { FullHttpRequest req = (FullHttpRequest) msg; QueryStringDecoder queryDecoder = new QueryStringDecoder(req.getUri()); if (queryDecoder.path().startsWith(path)) { handleMessage(req, queryDecoder, ctx); req.release(); return; } } ctx.fireChannelRead(msg); } private void handleMessage(FullHttpRequest req, QueryStringDecoder queryDecoder, ChannelHandlerContext ctx) throws IOException { String[] parts = queryDecoder.path().split("/"); if (parts.length > 3) { UUID sessionId = UUID.fromString(parts[4]); String origin = req.headers().get(HttpHeaders.Names.ORIGIN); if (queryDecoder.parameters().containsKey("disconnect")) { BaseClient client = sessionId2Client.get(sessionId); client.onChannelDisconnect(); ctx.channel().write(new XHROutMessage(origin, sessionId)); } else if (HttpMethod.POST.equals(req.getMethod())) { onPost(sessionId, ctx, origin, req.content()); } else if (HttpMethod.GET.equals(req.getMethod())) { onGet(sessionId, ctx, origin); } } else { log.warn("Wrong {} method request path: {}, from ip: {}. Channel closed!", req.getMethod(), path, ctx.channel().remoteAddress()); ctx.channel().close(); } } private void scheduleNoop(final UUID sessionId) { SchedulerKey key = new SchedulerKey(Type.POLLING, sessionId); scheduler.cancel(key); scheduler.schedule(key, new Runnable() { @Override public void run() { XHRPollingClient client = sessionId2Client.get(sessionId); if (client != null) { client.send(new Packet(PacketType.NOOP)); } } }, configuration.getPollingDuration(), TimeUnit.SECONDS); } private void scheduleDisconnect(Channel channel, final UUID sessionId) { final SchedulerKey key = new SchedulerKey(Type.CLOSE_TIMEOUT, sessionId); scheduler.cancel(key); ChannelFuture future = channel.closeFuture(); future.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { scheduler.schedule(key, new Runnable() { @Override public void run() { XHRPollingClient client = sessionId2Client.get(sessionId); if (client != null) { client.onChannelDisconnect(); log.debug("Client: {} disconnected due to connection timeout", sessionId); } } }, configuration.getCloseTimeout(), TimeUnit.SECONDS); } }); } private void onPost(UUID sessionId, ChannelHandlerContext ctx, String origin, ByteBuf content) throws IOException { XHRPollingClient client = sessionId2Client.get(sessionId); if (client == null) { log.debug("Client with sessionId: {} was already disconnected. Channel closed!", sessionId); ctx.channel().close(); return; } // release POST response before message processing ctx.channel().writeAndFlush(new XHROutMessage(origin, sessionId)); ctx.pipeline().fireChannelRead(new PacketsMessage(client, content)); } private void onGet(UUID sessionId, ChannelHandlerContext ctx, String origin) { if (!authorizeHandler.isSessionAuthorized(sessionId)) { sendError(ctx, origin, sessionId); return; } XHRPollingClient client = (XHRPollingClient) sessionId2Client.get(sessionId); if (client == null) { client = createClient(origin, ctx.channel(), sessionId); } client.bindChannel(ctx.channel(), origin); scheduleDisconnect(ctx.channel(), sessionId); scheduleNoop(sessionId); } private XHRPollingClient createClient(String origin, Channel channel, UUID sessionId) { XHRPollingClient client = new XHRPollingClient(ackManager, disconnectable, sessionId, Transport.XHRPOLLING); sessionId2Client.put(sessionId, client); client.bindChannel(channel, origin); authorizeHandler.connect(client); log.debug("Client for sessionId: {} was created", sessionId); return client; } private void sendError(ChannelHandlerContext ctx, String origin, UUID sessionId) { log.debug("Client with sessionId: {} was not found! Reconnect error response sended", sessionId); Packet packet = new Packet(PacketType.ERROR); packet.setReason(ErrorReason.CLIENT_NOT_HANDSHAKEN); packet.setAdvice(ErrorAdvice.RECONNECT); ctx.channel().write(new XHRErrorMessage(packet, origin, sessionId)); } @Override public void onDisconnect(BaseClient client) { if (client instanceof XHRPollingClient) { UUID sessionId = client.getSessionId(); sessionId2Client.remove(sessionId); SchedulerKey noopKey = new SchedulerKey(Type.POLLING, sessionId); scheduler.cancel(noopKey); SchedulerKey closeTimeoutKey = new SchedulerKey(Type.CLOSE_TIMEOUT, sessionId); scheduler.cancel(closeTimeoutKey); } } public Iterable<SocketIOClient> getAllClients() { Collection<XHRPollingClient> clients = sessionId2Client.values(); return getAllClients(clients); } }
apache-2.0
rbygrave/avaje-counter
src/main/java/org/avaje/counter/ValueCounter.java
1825
package org.avaje.counter; import java.util.concurrent.atomic.AtomicLong; import org.avaje.counter.jsr166e.LongAdder; /** * Used to collect timed execution statistics. * <p> * It is intended for high concurrent updates to the statistics and relatively infrequent reads. * </p> */ public class ValueCounter { protected final LongAdder count = new LongAdder(); protected final LongAdder total = new LongAdder(); protected final boolean collectMax; protected final AtomicLong max = new AtomicLong(); protected final AtomicLong startTime = new AtomicLong(System.currentTimeMillis()); public ValueCounter(boolean collectMax) { this.collectMax = collectMax; } /** * Add a value. Usually the value is Time or Bytes etc. */ public void add(long value) { count.increment(); total.add(value); if (collectMax && value > max.get()) { max.set(value); } } public boolean isEmpty() { return count.sum() == 0; } /** * Return the current statistics reseting the internal values if reset is true. */ public CounterStatistics getStatistics(boolean reset) { if (reset) { long now = System.currentTimeMillis(); return new CounterStatistics(startTime.getAndSet(now), count.sumThenReset(), total.sumThenReset(), max.getAndSet(0)); } else { return new CounterStatistics(startTime.get(), count.sum(), total.sum(), max.get()); } } public void reset() { startTime.set(System.currentTimeMillis()); max.set(0); count.reset(); total.reset(); } public long getStartTime() { return startTime.get(); } public long getCount() { return count.sum(); } public long getTotal() { return total.sum(); } public long getMax() { return max.get(); } }
apache-2.0
Chicago/opengrid-svc-template
opengridservice/src/main/java/org/opengrid/data/ListOfValuesDataProvider.java
1759
package org.opengrid.data; import java.util.ArrayList; import java.util.List; import org.bson.Document; import org.opengrid.constants.DB; import org.opengrid.constants.Exceptions; import org.opengrid.exception.ServiceException; import org.opengrid.util.ExceptionUtil; import com.mongodb.BasicDBObject; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.util.JSON; public class ListOfValuesDataProvider { public List<KeyValuePair> getList(String listId) { //basic schema is MongoDBHelper ds = new MongoDBHelper(); MongoDatabase db = ds.getConnection(); try { MongoCollection<Document> c = db.getCollection(DB.LOV_COLLECTION_NAME); BasicDBObject q = (BasicDBObject) JSON.parse("{ \"listId\": \"" + listId + "\"}"); //now use limit to limit result sets FindIterable<Document> cur = c.find(q).limit(1); if (cur.iterator().hasNext()) { Document d = cur.first(); @SuppressWarnings("unchecked") List<Document> a = (List<Document>) d.get("keyValues"); return getKeyValuePairs(a); } else { throw new ServiceException("Cannot find list of values with List Id '" + listId + "'."); } } catch (Exception ex) { ex.printStackTrace(); //wrap and bubble up throw ExceptionUtil.getException(Exceptions.ERR_DB, ex.getMessage()); } finally { ds.closeConnection(); } } private List<KeyValuePair> getKeyValuePairs(List<Document> a) { List<KeyValuePair> l = new ArrayList<KeyValuePair>(); //perform transformation for (Document d: a) { l.add( new KeyValuePair ( d.getString("key"), d.getString("value") ) ); } return l; } }
apache-2.0
Syncleus/aparapi
src/test/java/com/aparapi/codegen/test/CallStaticInAnotherClass.java
1602
/** * Copyright (c) 2016 - 2018 Syncleus, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aparapi.codegen.test; import com.aparapi.Kernel; class AnotherClass { static public int foo() { return 42; } }; public class CallStaticInAnotherClass extends Kernel { int out[] = new int[2]; public int doodoo() { return AnotherClass.foo(); } public void run() { out[0] = AnotherClass.foo() + doodoo(); } } /**{OpenCL{ typedef struct This_s{ __global int *out; int passid; }This; int get_pass_id(This *this){ return this->passid; } int com_amd_aparapi_test_AnotherClass__foo(){ return(42); } int com_amd_aparapi_test_CallStaticInAnotherClass__doodoo(This *this){ return(com_amd_aparapi_test_AnotherClass__foo()); } __kernel void run( __global int *out, int passid ){ This thisStruct; This* this=&thisStruct; this->out = out; this->passid = passid; { this->out[0] = com_amd_aparapi_test_AnotherClass__foo() + com_amd_aparapi_test_CallStaticInAnotherClass__doodoo(this); return; } } }OpenCL}**/
apache-2.0
nate-rcl/irplus
ir_web/src/edu/ur/ir/web/util/InstitutionalCollectionPermissionHelper.java
1352
package edu.ur.ir.web.util; import java.util.List; import edu.ur.ir.institution.InstitutionalCollectionSecurityService; import edu.ur.ir.institution.InstitutionalItem; import edu.ur.ir.institution.InstitutionalItemService; import edu.ur.ir.user.IrUser; public class InstitutionalCollectionPermissionHelper { private InstitutionalItemService institutionalItemService; private InstitutionalCollectionSecurityService institutionalCollectionSecurityService; public boolean isInstitutionalCollectionAdmin(IrUser user, Long genericItemId){ List<InstitutionalItem> items = institutionalItemService.getInstitutionalItemsByGenericItemId(genericItemId); for(InstitutionalItem item: items){ Long count = institutionalCollectionSecurityService.hasPermission(item.getInstitutionalCollection(), user, InstitutionalCollectionSecurityService.ADMINISTRATION_PERMISSION); if( count > 0){ return true; } } return false; } public void setInstitutionalItemService( InstitutionalItemService institutionalItemService) { this.institutionalItemService = institutionalItemService; } public void setInstitutionalCollectionSecurityService( InstitutionalCollectionSecurityService institutionalCollectionSecurityService) { this.institutionalCollectionSecurityService = institutionalCollectionSecurityService; } }
apache-2.0
google/intellij-protocol-buffer-editor
python/src/main/java/com/google/devtools/intellij/protoeditor/python/PbPythonGotoDeclarationHandler.java
5499
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.intellij.protoeditor.python; import static java.util.stream.Collectors.toList; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.Multimap; import com.google.devtools.intellij.protoeditor.gencode.ProtoFromSourceComments; import com.google.devtools.intellij.protoeditor.lang.psi.PbElement; import com.google.devtools.intellij.protoeditor.lang.psi.PbFile; import com.google.devtools.intellij.protoeditor.lang.psi.PbSymbol; import com.intellij.codeInsight.navigation.actions.GotoDeclarationHandler; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.Editor; import com.intellij.psi.PsiElement; import com.intellij.psi.util.QualifiedName; import com.jetbrains.python.PythonLanguage; import com.jetbrains.python.psi.PyFile; import java.util.Collection; import java.util.List; import java.util.Map; import org.jetbrains.annotations.Nullable; /** Handles goto declaration from python generated code -> .proto files. */ public final class PbPythonGotoDeclarationHandler implements GotoDeclarationHandler { @Nullable @Override public String getActionText(DataContext context) { return null; } @Nullable @Override public PsiElement[] getGotoDeclarationTargets( @Nullable PsiElement sourceElement, int offset, Editor editor) { if (sourceElement == null) { return null; } if (!sourceElement.getLanguage().is(PythonLanguage.INSTANCE)) { return null; } PyFileReferenceContext context = PyFileReferenceContext.findContext(sourceElement); if (context == null) { return null; } Collection<? extends PbElement> matches = pythonToProto(context); if (matches.isEmpty()) { return null; } return matches.toArray(new PsiElement[0]); } private static ImmutableCollection<? extends PbElement> pythonToProto( PyFileReferenceContext referenceContext) { PyFile file = referenceContext.getFile(); String fileName = file.getName(); Integer apiVersion = null; PbFile protoSource = null; if (fileName.endsWith("_pb2.py")) { protoSource = findSourceOfGeneratedPy(file); apiVersion = 2; } else if (fileName.endsWith("_pb.py")) { protoSource = findSourceOfGeneratedPy(file); apiVersion = 1; } if (protoSource == null) { return ImmutableList.of(); } ImmutableCollection<? extends PbElement> results = locateSymbolInProtoFile(protoSource, referenceContext.getFileLocalSymbol()); if (results.isEmpty() && apiVersion == 1) { return locateWithNormalizedNames(protoSource, referenceContext.getFileLocalSymbol()); } return results; } @Nullable private static PbFile findSourceOfGeneratedPy(PyFile file) { return ProtoFromSourceComments.findProtoOfGeneratedCode("#", file); } private static ImmutableCollection<? extends PbElement> locateSymbolInProtoFile( PbFile pbFile, QualifiedName fileLocalSymbol) { if (fileLocalSymbol.getComponents().isEmpty()) { return ImmutableList.of(pbFile); } QualifiedName qualifiedName = pbFile.getPackageQualifiedName().append(fileLocalSymbol); Multimap<QualifiedName, PbSymbol> fileSymbols = pbFile.getLocalQualifiedSymbolMap(); return ImmutableList.copyOf(fileSymbols.get(qualifiedName)); } // For API v1, the code generator converts nested messages like Foo.Bar.Baz to Foo_Bar_Baz. // Try to match with the '.' separators normalized to '_'. private static ImmutableCollection<? extends PbElement> locateWithNormalizedNames( PbFile pbFile, QualifiedName fileLocalName) { String fileLocalSymbol = fileLocalName.toString(); // Should have been an exact match if '_' didn't come into play. if (!fileLocalSymbol.contains("_")) { return ImmutableList.of(); } String desiredSymbol = fileLocalSymbol.replace('.', '_'); Multimap<QualifiedName, PbSymbol> fileSymbols = pbFile.getLocalQualifiedSymbolMap(); int numPackageComponents = pbFile.getPackageQualifiedName().getComponentCount(); List<PbSymbol> matches = fileSymbols .entries() .stream() .filter( entry -> { QualifiedName qualifiedName = entry.getKey(); String candidateLastComponent = qualifiedName.getLastComponent(); if (candidateLastComponent == null || !desiredSymbol.endsWith(candidateLastComponent)) { return false; } return qualifiedName .removeHead(numPackageComponents) .join("_") .equals(desiredSymbol); }) .map(Map.Entry::getValue) .collect(toList()); return ImmutableList.copyOf(matches); } }
apache-2.0
raulpiiber/j-road
client-transport/src/main/java/com/nortal/jroad/client/service/callback/XRoadProtocolNamespaceStrategyV4.java
4701
package com.nortal.jroad.client.service.callback; import javax.xml.soap.SOAPElement; import javax.xml.soap.SOAPEnvelope; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPHeader; import org.apache.commons.lang.StringUtils; import com.nortal.jroad.client.enums.XroadObjectType; import com.nortal.jroad.client.service.configuration.XRoadServiceConfiguration; import com.nortal.jroad.enums.XRoadProtocolVersion; /** * @author Aleksei Bogdanov (aleksei.bogdanov@nortal.com) * @author Lauri Lättemäe (lauri.lattemae@nortal.com) - protocol 4.0 */ public class XRoadProtocolNamespaceStrategyV4 extends MessageCallbackNamespaceStrategy { private XRoadProtocolVersion protocol = XRoadProtocolVersion.V4_0; @Override public void addNamespaces(SOAPEnvelope env) throws SOAPException { env.addNamespaceDeclaration("xsd", "http://www.w3.org/2001/XMLSchema"); env.addNamespaceDeclaration("xsi", "http://www.w3.org/2001/XMLSchema-instance"); env.addNamespaceDeclaration(protocol.getNamespacePrefix(), protocol.getNamespaceUri()); env.addNamespaceDeclaration("id", "http://x-road.eu/xsd/identifiers"); } @Override public void addXTeeHeaderElements(SOAPEnvelope env, XRoadServiceConfiguration conf) throws SOAPException { SOAPHeader header = env.getHeader(); SOAPElement userId = header.addChildElement("userId", protocol.getNamespacePrefix()); userId.addTextNode(conf.getIdCode()); SOAPElement id = header.addChildElement("id", protocol.getNamespacePrefix()); id.addTextNode(generateUniqueMessageId(conf)); if (StringUtils.isNotBlank(conf.getFile())) { SOAPElement issue = header.addChildElement("issue", protocol.getNamespacePrefix()); issue.addTextNode(conf.getFile()); } SOAPElement protocolVersion = header.addChildElement("protocolVersion", protocol.getNamespacePrefix()); protocolVersion.addTextNode(protocol.getCode()); addClientElements(env, conf, header); addServiceElements(env, conf, header); } private void addClientElements(SOAPEnvelope env, XRoadServiceConfiguration conf, SOAPHeader header) throws SOAPException { // TODO: maybe we should create headers differently according to object type? XroadObjectType objectType = conf.getClientObjectType() != null ? conf.getClientObjectType() : XroadObjectType.SUBSYSTEM; SOAPElement client = header.addChildElement("client", protocol.getNamespacePrefix()); client.addAttribute(env.createName("id:objectType"), objectType.name()); SOAPElement clientXRoadInstance = client.addChildElement("xRoadInstance", "id"); clientXRoadInstance.addTextNode(conf.getClientXRoadInstance()); SOAPElement clientMemberClass = client.addChildElement("memberClass", "id"); clientMemberClass.addTextNode(conf.getClientMemberClass()); SOAPElement clientMemberCode = client.addChildElement("memberCode", "id"); clientMemberCode.addTextNode(conf.getClientMemberCode()); if (StringUtils.isNotBlank(conf.getClientSubsystemCode())) { SOAPElement clientSubsystemCode = client.addChildElement("subsystemCode", "id"); clientSubsystemCode.addTextNode(conf.getClientSubsystemCode()); } } private void addServiceElements(SOAPEnvelope env, XRoadServiceConfiguration conf, SOAPHeader header) throws SOAPException { // TODO: maybe we should create headers differently according to object type? XroadObjectType objectType = conf.getServiceObjectType() != null ? conf.getServiceObjectType() : XroadObjectType.SERVICE; SOAPElement service = header.addChildElement("service", protocol.getNamespacePrefix()); service.addAttribute(env.createName("id:objectType"), objectType.name()); SOAPElement serviceXRoadInstance = service.addChildElement("xRoadInstance", "id"); serviceXRoadInstance.addTextNode(conf.getServiceXRoadInstance()); SOAPElement serviceMemberClass = service.addChildElement("memberClass", "id"); serviceMemberClass.addTextNode(conf.getServiceMemberClass()); SOAPElement serviceMemberCode = service.addChildElement("memberCode", "id"); serviceMemberCode.addTextNode(conf.getServiceMemberCode()); if (StringUtils.isNotBlank(conf.getServiceSubsystemCode())) { SOAPElement subsystemCode = service.addChildElement("subsystemCode", "id"); subsystemCode.addTextNode(conf.getServiceSubsystemCode()); } SOAPElement database = service.addChildElement("serviceCode", "id"); database.addTextNode(conf.getMethod()); if(StringUtils.isNotBlank(conf.getVersion())){ SOAPElement serviceVersion = service.addChildElement("serviceVersion", "id"); serviceVersion.addTextNode(conf.getVersion()); } } }
apache-2.0
ruspl-afed/dbeaver
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/registry/driver/DriverLibraryLocal.java
4995
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry.driver; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.Platform; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBIcon; import org.jkiss.dbeaver.model.connection.DBPDriverLibrary; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.ui.UIIcon; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Collection; /** * DriverLibraryLocal */ public class DriverLibraryLocal extends DriverLibraryAbstract { private static final Log log = Log.getLog(DriverLibraryLocal.class); public DriverLibraryLocal(DriverDescriptor driver, FileType type, String path) { super(driver, type, path); } public DriverLibraryLocal(DriverDescriptor driver, IConfigurationElement config) { super(driver, config); } @Override public boolean isDownloadable() { return false; } @Override public void resetVersion() { // do nothing } @Override public boolean isSecureDownload(DBRProgressMonitor monitor) { return true; } protected String getLocalFilePath() { return path; } @Nullable @Override public String getExternalURL(DBRProgressMonitor monitor) { return null; } @Nullable @Override public File getLocalFile() { // Try to use direct path String localFilePath = this.getLocalFilePath(); File libraryFile = new File(localFilePath); if (libraryFile.exists()) { return libraryFile; } // Try to get local file File platformFile = detectLocalFile(); if (platformFile != null && platformFile.exists()) { // Relative file do not exists - use plain one return platformFile; } URL url = driver.getProviderDescriptor().getContributorBundle().getEntry(localFilePath); if (url == null) { // Find in external resources url = driver.getProviderDescriptor().getRegistry().findResourceURL(localFilePath); } if (url != null) { try { url = FileLocator.toFileURL(url); } catch (IOException ex) { log.warn(ex); } if (url != null) { return new File(url.getFile()); } } else { try { url = FileLocator.toFileURL(new URL(localFilePath)); File urlFile = new File(url.getFile()); if (urlFile.exists()) { platformFile = urlFile; } } catch (IOException ex) { // ignore } } // Nothing fits - just return plain url return platformFile; } @Nullable @Override public Collection<? extends DBPDriverLibrary> getDependencies(@NotNull DBRProgressMonitor monitor) throws IOException { return null; } protected File detectLocalFile() { String localPath = getLocalFilePath(); // Try to use relative path from installation dir File file = new File(new File(Platform.getInstallLocation().getURL().getFile()), localPath); if (!file.exists()) { // Use custom drivers path file = new File(DriverDescriptor.getCustomDriversHome(), localPath); } return file; } @NotNull public String getDisplayName() { return path; } @Override public String getId() { return path; } @NotNull @Override public DBIcon getIcon() { File localFile = getLocalFile(); if (localFile != null && localFile.isDirectory()) { return DBIcon.TREE_FOLDER; } else { switch (type) { case lib: return UIIcon.LIBRARY; case jar: return UIIcon.JAR; default: return DBIcon.TYPE_UNKNOWN; } } } }
apache-2.0
mp911de/lettuce
src/test/java/io/lettuce/core/commands/reactive/KeyReactiveCommandIntegrationTests.java
1148
/* * Copyright 2011-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.lettuce.core.commands.reactive; import javax.inject.Inject; import io.lettuce.core.api.StatefulRedisConnection; import io.lettuce.core.commands.KeyCommandIntegrationTests; import io.lettuce.test.ReactiveSyncInvocationHandler; /** * @author Mark Paluch */ class KeyReactiveCommandIntegrationTests extends KeyCommandIntegrationTests { @Inject KeyReactiveCommandIntegrationTests(StatefulRedisConnection<String, String> connection) { super(ReactiveSyncInvocationHandler.sync(connection)); } }
apache-2.0
dturanski/spring-cloud-stream
spring-cloud-stream/src/main/java/org/springframework/cloud/stream/config/MergableProperties.java
3343
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.stream.config; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import org.springframework.beans.BeanUtils; import org.springframework.beans.BeansException; import org.springframework.beans.FatalBeanException; import org.springframework.cloud.stream.binder.ConsumerProperties; import org.springframework.cloud.stream.binder.ProducerProperties; import org.springframework.util.ClassUtils; import org.springframework.util.ObjectUtils; /** * NOT INTENDED FOR PUBLIC USE! Was primarily created to address GH-1359. * * @see BinderProperties * @see ProducerProperties * @see ConsumerProperties * * @author Oleg Zhurakousky */ public interface MergableProperties { /** * A variation of {@link BeanUtils#copyProperties(Object, Object)} specifically designed to copy properties using the following rule: * * - If source property is null then override with the same from mergable. * - If source property is an array and it is empty then override with same from mergable. * - If source property is mergable then merge. */ default void merge(MergableProperties mergable) { if (mergable == null) { return; } for (PropertyDescriptor targetPd : BeanUtils.getPropertyDescriptors(mergable.getClass())) { Method writeMethod = targetPd.getWriteMethod(); if (writeMethod != null) { PropertyDescriptor sourcePd = BeanUtils.getPropertyDescriptor(this.getClass(), targetPd.getName()); if (sourcePd != null) { Method readMethod = sourcePd.getReadMethod(); if (readMethod != null && ClassUtils.isAssignable(writeMethod.getParameterTypes()[0], readMethod.getReturnType())) { try { if (!Modifier.isPublic(readMethod.getDeclaringClass().getModifiers())) { readMethod.setAccessible(true); } Object value = readMethod.invoke(this); if (value != null) { if (value instanceof MergableProperties) { ((MergableProperties)value).merge((MergableProperties)readMethod.invoke(mergable)); } else { Object v = readMethod.invoke(mergable); if (v == null || (ObjectUtils.isArray(v) && ObjectUtils.isEmpty(v))) { if (!Modifier.isPublic(writeMethod.getDeclaringClass().getModifiers())) { writeMethod.setAccessible(true); } writeMethod.invoke(mergable, value); } } } } catch (Throwable ex) { throw new FatalBeanException( "Could not copy property '" + targetPd.getName() + "' from source to target", ex); } } } } } } default void copyProperties(Object source, Object target) throws BeansException { } }
apache-2.0
Cognifide/AET
core/worker/src/main/java/com/cognifide/aet/worker/drivers/firefox/FirefoxCommunicationWrapperImpl.java
2733
/** * AET * * Copyright (C) 2013 Cognifide Limited * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.cognifide.aet.worker.drivers.firefox; import com.cognifide.aet.job.api.collector.HttpRequestExecutor; import com.cognifide.aet.job.api.collector.JsErrorLog; import com.cognifide.aet.job.api.collector.ProxyServerWrapper; import com.cognifide.aet.job.api.collector.WebCommunicationWrapper; import com.cognifide.aet.worker.helpers.JavaScriptError; import com.google.common.base.Function; import com.google.common.collect.Ordering; import java.util.List; import java.util.Set; import java.util.TreeSet; import java.util.stream.Collectors; import org.openqa.selenium.WebDriver; public class FirefoxCommunicationWrapperImpl implements WebCommunicationWrapper { private static final Function<JavaScriptError, JsErrorLog> ERROR_LOG_FUNCTION = new Function<JavaScriptError, JsErrorLog>() { @Override public JsErrorLog apply(JavaScriptError input) { JsErrorLog jsErrorLog = null; if (input != null) { jsErrorLog = new JsErrorLog(input.getErrorMessage(), input.getSourceName(), input.getLineNumber()); } return jsErrorLog; } }; private final WebDriver webDriver; private final ProxyServerWrapper proxyServer; private final HttpRequestExecutor requestExecutor; public FirefoxCommunicationWrapperImpl(WebDriver webDriver, ProxyServerWrapper server, HttpRequestExecutor requestExecutor) { this.webDriver = webDriver; this.proxyServer = server; this.requestExecutor = requestExecutor; } @Override public WebDriver getWebDriver() { return webDriver; } @Override public ProxyServerWrapper getProxyServer() { return proxyServer; } @Override public boolean isUseProxy() { return proxyServer != null; } @Override public Set<JsErrorLog> getJSErrorLogs() { List<JavaScriptError> javaScriptErrors = JavaScriptError.readErrors(webDriver); return javaScriptErrors.stream() .map(ERROR_LOG_FUNCTION) .collect(Collectors.toCollection(() -> new TreeSet<>(Ordering.natural()))); } @Override public HttpRequestExecutor getHttpRequestExecutor() { return requestExecutor; } }
apache-2.0
kmadiwale/companySearchAPI
src/main/java/com/abc/rest/dao/oc/CSearch.java
853
/** * */ package com.abc.rest.dao.oc; import java.io.Serializable; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** * @author kmadiwal * */ @JsonIgnoreProperties(ignoreUnknown = true) public class CSearch implements Serializable { private static final long serialVersionUID = 5659556315094130619L; @JsonProperty("api_version") private String apiVersion; @JsonProperty("results") private Results results; /** * @return the results */ public Results getResults() { return results; } /** * @param results the results to set */ public void setResults(Results results) { this.results = results; } public String getApiVersion() { return apiVersion; } public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } }
apache-2.0
consulo/consulo-rust
gen/vektah/rust/psi/RustExprAssignMultiply.java
290
// This is a generated file. Not intended for manual editing. package vektah.rust.psi; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.psi.PsiElement; public interface RustExprAssignMultiply extends RustExpr { @NotNull List<RustExpr> getExprList(); }
apache-2.0
tempbottle/jsimpledb
src/java/org/jsimpledb/core/EnumFieldStorageInfo.java
907
/* * Copyright (C) 2015 Archie L. Cobbs. All rights reserved. */ package org.jsimpledb.core; import java.util.List; class EnumFieldStorageInfo extends SimpleFieldStorageInfo<EnumValue> { EnumFieldStorageInfo(EnumField field, int superFieldStorageId) { super(field, superFieldStorageId); } public List<String> getIdentifiers() { return ((EnumFieldType)this.fieldType).getIdentifiers(); } // Object @Override public String toString() { return "enum field with identifiers " + this.getIdentifiers(); } @Override protected boolean fieldTypeEquals(SimpleFieldStorageInfo<?> that0) { final EnumFieldStorageInfo that = (EnumFieldStorageInfo)that0; return this.getIdentifiers().equals(that.getIdentifiers()); } @Override protected int fieldTypeHashCode() { return this.getIdentifiers().hashCode(); } }
apache-2.0
jraduget/kaleido-repository
kaleido-core/src/test/java/org/kaleidofoundry/core/config/XmlPropertiesConfigurationTest.java
1324
/* * Copyright 2008-2021 the original author or authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaleidofoundry.core.config; import java.net.URISyntaxException; import org.kaleidofoundry.core.context.RuntimeContext; import org.kaleidofoundry.core.store.ResourceException; /** * @author jraduget */ public class XmlPropertiesConfigurationTest extends AbstractConfigurationTest { public XmlPropertiesConfigurationTest() throws ResourceException, URISyntaxException { super(); } @Override protected Configuration newInstance() throws ResourceException, URISyntaxException { return new XmlPropertiesConfiguration("propXmlCpConfig", "classpath:/config/test.xmlproperties", new RuntimeContext<org.kaleidofoundry.core.config.Configuration>(Configuration.class)); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/model/AmazonDynamoDBException.java
1201
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.dynamodbv2.model; import javax.annotation.Generated; /** * Base exception for all service exceptions thrown by Amazon DynamoDB */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AmazonDynamoDBException extends com.amazonaws.AmazonServiceException { private static final long serialVersionUID = 1L; /** * Constructs a new AmazonDynamoDBException with the specified error message. * * @param message * Describes the error encountered. */ public AmazonDynamoDBException(String message) { super(message); } }
apache-2.0
janko33bd/bitcoinj
core/src/main/java/org/bitcoinj/script/ScriptBuilder.java
20271
/* * Copyright 2013 Google Inc. * Copyright 2018 Nicola Atzei * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitcoinj.script; import com.google.common.collect.Lists; import org.bitcoinj.core.Address; import org.bitcoinj.core.ECKey; import org.bitcoinj.core.Utils; import org.bitcoinj.crypto.TransactionSignature; import javax.annotation.Nullable; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Stack; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static org.bitcoinj.script.ScriptOpCodes.*; /** * <p>Tools for the construction of commonly used script types. You don't normally need this as it's hidden behind * convenience methods on {@link org.bitcoinj.core.Transaction}, but they are useful when working with the * protocol at a lower level.</p> */ public class ScriptBuilder { private List<ScriptChunk> chunks; /** Creates a fresh ScriptBuilder with an empty program. */ public ScriptBuilder() { chunks = Lists.newLinkedList(); } /** Creates a fresh ScriptBuilder with the given program as the starting point. */ public ScriptBuilder(Script template) { chunks = new ArrayList<>(template.getChunks()); } /** Adds the given chunk to the end of the program */ public ScriptBuilder addChunk(ScriptChunk chunk) { return addChunk(chunks.size(), chunk); } /** Adds the given chunk at the given index in the program */ public ScriptBuilder addChunk(int index, ScriptChunk chunk) { chunks.add(index, chunk); return this; } /** Adds the given opcode to the end of the program. */ public ScriptBuilder op(int opcode) { return op(chunks.size(), opcode); } /** Adds the given opcode to the given index in the program */ public ScriptBuilder op(int index, int opcode) { checkArgument(opcode > OP_PUSHDATA4); return addChunk(index, new ScriptChunk(opcode, null)); } /** Adds a copy of the given byte array as a data element (i.e. PUSHDATA) at the end of the program. */ public ScriptBuilder data(byte[] data) { if (data.length == 0) return smallNum(0); else return data(chunks.size(), data); } /** Adds a copy of the given byte array as a data element (i.e. PUSHDATA) at the given index in the program. */ public ScriptBuilder data(int index, byte[] data) { // implements BIP62 byte[] copy = Arrays.copyOf(data, data.length); int opcode; if (data.length == 0) { opcode = OP_0; } else if (data.length == 1) { byte b = data[0]; if (b >= 1 && b <= 16) opcode = Script.encodeToOpN(b); else opcode = 1; } else if (data.length < OP_PUSHDATA1) { opcode = data.length; } else if (data.length < 256) { opcode = OP_PUSHDATA1; } else if (data.length < 65536) { opcode = OP_PUSHDATA2; } else { throw new RuntimeException("Unimplemented"); } return addChunk(index, new ScriptChunk(opcode, copy)); } /** * Adds the given number to the end of the program. Automatically uses * shortest encoding possible. */ public ScriptBuilder number(long num) { return number(chunks.size(), num); } /** * Adds the given number to the given index in the program. Automatically * uses shortest encoding possible. */ public ScriptBuilder number(int index, long num) { if (num == -1) { return op(index, OP_1NEGATE); } else if (num >= 0 && num <= 16) { return smallNum(index, (int) num); } else { return bigNum(index, num); } } /** * Adds the given number as a OP_N opcode to the end of the program. * Only handles values 0-16 inclusive. * * @see #number(long) */ public ScriptBuilder smallNum(int num) { return smallNum(chunks.size(), num); } /** Adds the given number as a push data chunk. * This is intended to use for negative numbers or values > 16, and although * it will accept numbers in the range 0-16 inclusive, the encoding would be * considered non-standard. * * @see #number(long) */ protected ScriptBuilder bigNum(long num) { return bigNum(chunks.size(), num); } /** * Adds the given number as a OP_N opcode to the given index in the program. * Only handles values 0-16 inclusive. * * @see #number(long) */ public ScriptBuilder smallNum(int index, int num) { checkArgument(num >= 0, "Cannot encode negative numbers with smallNum"); checkArgument(num <= 16, "Cannot encode numbers larger than 16 with smallNum"); return addChunk(index, new ScriptChunk(Script.encodeToOpN(num), null)); } /** * Adds the given number as a push data chunk to the given index in the program. * This is intended to use for negative numbers or values > 16, and although * it will accept numbers in the range 0-16 inclusive, the encoding would be * considered non-standard. * * @see #number(long) */ protected ScriptBuilder bigNum(int index, long num) { final byte[] data; if (num == 0) { data = new byte[0]; } else { Stack<Byte> result = new Stack<>(); final boolean neg = num < 0; long absvalue = Math.abs(num); while (absvalue != 0) { result.push((byte) (absvalue & 0xff)); absvalue >>= 8; } if ((result.peek() & 0x80) != 0) { // The most significant byte is >= 0x80, so push an extra byte that // contains just the sign of the value. result.push((byte) (neg ? 0x80 : 0)); } else if (neg) { // The most significant byte is < 0x80 and the value is negative, // set the sign bit so it is subtracted and interpreted as a // negative when converting back to an integral. result.push((byte) (result.pop() | 0x80)); } data = new byte[result.size()]; for (int byteIdx = 0; byteIdx < data.length; byteIdx++) { data[byteIdx] = result.get(byteIdx); } } // At most the encoded value could take up to 8 bytes, so we don't need // to use OP_PUSHDATA opcodes return addChunk(index, new ScriptChunk(data.length, data)); } /** * Adds true to the end of the program. * @return this */ public ScriptBuilder opTrue() { return number(1); // it push OP_1/OP_TRUE } /** * Adds true to the given index in the program. * @param index at which insert true * @return this */ public ScriptBuilder opTrue(int index) { return number(index, 1); // push OP_1/OP_TRUE } /** * Adds false to the end of the program. * @return this */ public ScriptBuilder opFalse() { return number(0); // push OP_0/OP_FALSE } /** * Adds false to the given index in the program. * @param index at which insert true * @return this */ public ScriptBuilder opFalse(int index) { return number(index, 0); // push OP_0/OP_FALSE } /** Creates a new immutable Script based on the state of the builder. */ public Script build() { return new Script(chunks); } /** Creates a scriptPubKey that encodes payment to the given address. */ public static Script createOutputScript(Address to) { if (to.isP2SHAddress()) { // OP_HASH160 <scriptHash> OP_EQUAL return new ScriptBuilder() .op(OP_HASH160) .data(to.getHash160()) .op(OP_EQUAL) .build(); } else { // OP_DUP OP_HASH160 <pubKeyHash> OP_EQUALVERIFY OP_CHECKSIG return new ScriptBuilder() .op(OP_DUP) .op(OP_HASH160) .data(to.getHash160()) .op(OP_EQUALVERIFY) .op(OP_CHECKSIG) .build(); } } /** Creates a scriptPubKey that encodes payment to the given raw public key. */ public static Script createOutputScript(ECKey key) { return new ScriptBuilder().data(key.getPubKey()).op(OP_CHECKSIG).build(); } /** * Creates a scriptSig that can redeem a pay-to-address output. * If given signature is null, incomplete scriptSig will be created with OP_0 instead of signature */ public static Script createInputScript(@Nullable TransactionSignature signature, ECKey pubKey) { byte[] pubkeyBytes = pubKey.getPubKey(); byte[] sigBytes = signature != null ? signature.encodeToBitcoin() : new byte[]{}; return new ScriptBuilder().data(sigBytes).data(pubkeyBytes).build(); } /** * Creates a scriptSig that can redeem a pay-to-pubkey output. * If given signature is null, incomplete scriptSig will be created with OP_0 instead of signature */ public static Script createInputScript(@Nullable TransactionSignature signature) { byte[] sigBytes = signature != null ? signature.encodeToBitcoin() : new byte[]{}; return new ScriptBuilder().data(sigBytes).build(); } /** Creates a program that requires at least N of the given keys to sign, using OP_CHECKMULTISIG. */ public static Script createMultiSigOutputScript(int threshold, List<ECKey> pubkeys) { checkArgument(threshold > 0); checkArgument(threshold <= pubkeys.size()); checkArgument(pubkeys.size() <= 16); // That's the max we can represent with a single opcode. ScriptBuilder builder = new ScriptBuilder(); builder.smallNum(threshold); for (ECKey key : pubkeys) { builder.data(key.getPubKey()); } builder.smallNum(pubkeys.size()); builder.op(OP_CHECKMULTISIG); return builder.build(); } /** Create a program that satisfies an OP_CHECKMULTISIG program. */ public static Script createMultiSigInputScript(List<TransactionSignature> signatures) { List<byte[]> sigs = new ArrayList<>(signatures.size()); for (TransactionSignature signature : signatures) { sigs.add(signature.encodeToBitcoin()); } return createMultiSigInputScriptBytes(sigs, null); } /** Create a program that satisfies an OP_CHECKMULTISIG program. */ public static Script createMultiSigInputScript(TransactionSignature... signatures) { return createMultiSigInputScript(Arrays.asList(signatures)); } /** Create a program that satisfies an OP_CHECKMULTISIG program, using pre-encoded signatures. */ public static Script createMultiSigInputScriptBytes(List<byte[]> signatures) { return createMultiSigInputScriptBytes(signatures, null); } /** * Create a program that satisfies a pay-to-script hashed OP_CHECKMULTISIG program. * If given signature list is null, incomplete scriptSig will be created with OP_0 instead of signatures */ public static Script createP2SHMultiSigInputScript(@Nullable List<TransactionSignature> signatures, Script multisigProgram) { List<byte[]> sigs = new ArrayList<>(); if (signatures == null) { // create correct number of empty signatures int numSigs = multisigProgram.getNumberOfSignaturesRequiredToSpend(); for (int i = 0; i < numSigs; i++) sigs.add(new byte[]{}); } else { for (TransactionSignature signature : signatures) { sigs.add(signature.encodeToBitcoin()); } } return createMultiSigInputScriptBytes(sigs, multisigProgram.getProgram()); } /** * Create a program that satisfies an OP_CHECKMULTISIG program, using pre-encoded signatures. * Optionally, appends the script program bytes if spending a P2SH output. */ public static Script createMultiSigInputScriptBytes(List<byte[]> signatures, @Nullable byte[] multisigProgramBytes) { checkArgument(signatures.size() <= 16); ScriptBuilder builder = new ScriptBuilder(); builder.smallNum(0); // Work around a bug in CHECKMULTISIG that is now a required part of the protocol. for (byte[] signature : signatures) builder.data(signature); if (multisigProgramBytes!= null) builder.data(multisigProgramBytes); return builder.build(); } /** * Returns a copy of the given scriptSig with the signature inserted in the given position. * * This function assumes that any missing sigs have OP_0 placeholders. If given scriptSig already has all the signatures * in place, IllegalArgumentException will be thrown. * * @param targetIndex where to insert the signature * @param sigsPrefixCount how many items to copy verbatim (e.g. initial OP_0 for multisig) * @param sigsSuffixCount how many items to copy verbatim at end (e.g. redeemScript for P2SH) */ public static Script updateScriptWithSignature(Script scriptSig, byte[] signature, int targetIndex, int sigsPrefixCount, int sigsSuffixCount) { ScriptBuilder builder = new ScriptBuilder(); List<ScriptChunk> inputChunks = scriptSig.getChunks(); int totalChunks = inputChunks.size(); // Check if we have a place to insert, otherwise just return given scriptSig unchanged. // We assume here that OP_0 placeholders always go after the sigs, so // to find if we have sigs missing, we can just check the chunk in latest sig position boolean hasMissingSigs = inputChunks.get(totalChunks - sigsSuffixCount - 1).equalsOpCode(OP_0); checkArgument(hasMissingSigs, "ScriptSig is already filled with signatures"); // copy the prefix for (ScriptChunk chunk: inputChunks.subList(0, sigsPrefixCount)) builder.addChunk(chunk); // copy the sigs int pos = 0; boolean inserted = false; for (ScriptChunk chunk: inputChunks.subList(sigsPrefixCount, totalChunks - sigsSuffixCount)) { if (pos == targetIndex) { inserted = true; builder.data(signature); pos++; } if (!chunk.equalsOpCode(OP_0)) { builder.addChunk(chunk); pos++; } } // add OP_0's if needed, since we skipped them in the previous loop while (pos < totalChunks - sigsPrefixCount - sigsSuffixCount) { if (pos == targetIndex) { inserted = true; builder.data(signature); } else { builder.addChunk(new ScriptChunk(OP_0, null)); } pos++; } // copy the suffix for (ScriptChunk chunk: inputChunks.subList(totalChunks - sigsSuffixCount, totalChunks)) builder.addChunk(chunk); checkState(inserted); return builder.build(); } /** * Creates a scriptPubKey that sends to the given script hash. Read * <a href="https://github.com/bitcoin/bips/blob/master/bip-0016.mediawiki">BIP 16</a> to learn more about this * kind of script. */ public static Script createP2SHOutputScript(byte[] hash) { checkArgument(hash.length == 20); return new ScriptBuilder().op(OP_HASH160).data(hash).op(OP_EQUAL).build(); } /** * Creates a scriptPubKey for the given redeem script. */ public static Script createP2SHOutputScript(Script redeemScript) { byte[] hash = Utils.sha256hash160(redeemScript.getProgram()); return ScriptBuilder.createP2SHOutputScript(hash); } /** * Creates a P2SH output script with given public keys and threshold. Given public keys will be placed in * redeem script in the lexicographical sorting order. */ public static Script createP2SHOutputScript(int threshold, List<ECKey> pubkeys) { Script redeemScript = createRedeemScript(threshold, pubkeys); return createP2SHOutputScript(redeemScript); } /** * Creates redeem script with given public keys and threshold. Given public keys will be placed in * redeem script in the lexicographical sorting order. */ public static Script createRedeemScript(int threshold, List<ECKey> pubkeys) { pubkeys = new ArrayList<>(pubkeys); Collections.sort(pubkeys, ECKey.PUBKEY_COMPARATOR); return ScriptBuilder.createMultiSigOutputScript(threshold, pubkeys); } /** * Creates a script of the form OP_RETURN [data]. This feature allows you to attach a small piece of data (like * a hash of something stored elsewhere) to a zero valued output which can never be spent and thus does not pollute * the ledger. */ public static Script createOpReturnScript(byte[] data) { checkArgument(data.length <= 80); return new ScriptBuilder().op(OP_RETURN).data(data).build(); } public static Script createCLTVPaymentChannelOutput(BigInteger time, ECKey from, ECKey to) { byte[] timeBytes = Utils.reverseBytes(Utils.encodeMPI(time, false)); if (timeBytes.length > 5) { throw new RuntimeException("Time too large to encode as 5-byte int"); } return new ScriptBuilder().op(OP_IF) .data(to.getPubKey()).op(OP_CHECKSIGVERIFY) .op(OP_ELSE) .data(timeBytes).op(OP_CHECKLOCKTIMEVERIFY).op(OP_DROP) .op(OP_ENDIF) .data(from.getPubKey()).op(OP_CHECKSIG).build(); } public static Script createCLTVPaymentChannelRefund(TransactionSignature signature) { ScriptBuilder builder = new ScriptBuilder(); builder.data(signature.encodeToBitcoin()); builder.data(new byte[] { 0 }); // Use the CHECKLOCKTIMEVERIFY if branch return builder.build(); } public static Script createCLTVPaymentChannelP2SHRefund(TransactionSignature signature, Script redeemScript) { ScriptBuilder builder = new ScriptBuilder(); builder.data(signature.encodeToBitcoin()); builder.data(new byte[] { 0 }); // Use the CHECKLOCKTIMEVERIFY if branch builder.data(redeemScript.getProgram()); return builder.build(); } public static Script createCLTVPaymentChannelP2SHInput(byte[] from, byte[] to, Script redeemScript) { ScriptBuilder builder = new ScriptBuilder(); builder.data(from); builder.data(to); builder.smallNum(1); // Use the CHECKLOCKTIMEVERIFY if branch builder.data(redeemScript.getProgram()); return builder.build(); } public static Script createCLTVPaymentChannelInput(TransactionSignature from, TransactionSignature to) { return createCLTVPaymentChannelInput(from.encodeToBitcoin(), to.encodeToBitcoin()); } public static Script createCLTVPaymentChannelInput(byte[] from, byte[] to) { ScriptBuilder builder = new ScriptBuilder(); builder.data(from); builder.data(to); builder.smallNum(1); // Use the CHECKLOCKTIMEVERIFY if branch return builder.build(); } }
apache-2.0
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/numerics/InterpolateArray.java
1311
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.numerics; /** * Do linear interpolation between points in an array. Sample points must be positive and less than one minus * the array's index. * * @author Peter Abeles */ public class InterpolateArray { public double[] data; public double value; private final int end; public InterpolateArray( double[] data ) { this.data = data; end = data.length - 1; } public boolean interpolate( double where ) { if (where < 0) return false; int index = (int)where; if (index >= end) return false; double w = where - index; value = data[index]*(1.0 - w) + data[index + 1]*w; return true; } }
apache-2.0
10045125/spring-boot
spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/ConditionalOnJava.java
3487
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.condition; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.context.annotation.Conditional; import org.springframework.core.JdkVersion; import org.springframework.util.Assert; /** * {@link Conditional} that matches based on the JVM version the application is running * on. * * @author Oliver Gierke * @author Phillip Webb * @since 1.1.0 */ @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) @Documented @Conditional(OnJavaCondition.class) public @interface ConditionalOnJava { /** * Configures whether the value configured in {@link #value()} shall be considered the * upper exclusive or lower inclusive boundary. Defaults to * {@link Range#EQUAL_OR_NEWER}. */ Range range() default Range.EQUAL_OR_NEWER; /** * The {@link JavaVersion} to check for. Use {@link #range()} to specify whether the * configured value is an upper-exclusive or lower-inclusive boundary. */ JavaVersion value(); /** * Range options. */ public enum Range { /** * Equal to, or newer than the specified {@link JavaVersion}. */ EQUAL_OR_NEWER, /** * Older than the specified {@link JavaVersion}. */ OLDER_THAN } /** * Java versions. */ public enum JavaVersion { /** * Java 1.6. */ SIX(JdkVersion.JAVA_16, "1.6"), /** * Java 1.7. */ SEVEN(JdkVersion.JAVA_17, "1.7"), /** * Java 1.8. */ EIGHT(JdkVersion.JAVA_18, "1.8"), /** * Java 1.9. */ NINE(JdkVersion.JAVA_19, "1.9"); private final int value; private final String name; private JavaVersion(int value, String name) { this.value = value; this.name = name; } /** * Determines if this version is within the specified range of versions. * @param range the range * @param version the bounds of the range * @return if this version is within the specified range */ public boolean isWithin(Range range, JavaVersion version) { Assert.notNull(range, "Range must not be null"); Assert.notNull(version, "Version must not be null"); switch (range) { case EQUAL_OR_NEWER: return this.value >= version.value; case OLDER_THAN: return this.value < version.value; } throw new IllegalStateException("Unknown range " + range); } @Override public String toString() { return this.name; } /** * Returns the {@link JavaVersion} of the current runtime. */ public static JavaVersion getJavaVersion() { int version = JdkVersion.getMajorJavaVersion(); for (JavaVersion candidate : JavaVersion.values()) { if (candidate.value == version) { return candidate; } } return SIX; } } }
apache-2.0
prakashme/spring-boot
spring-boot/src/main/java/org/springframework/boot/logging/logback/DefaultLogbackConfiguration.java
6214
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.logging.logback; import java.nio.charset.Charset; import org.springframework.boot.bind.RelaxedPropertyResolver; import org.springframework.boot.logging.LogFile; import org.springframework.boot.logging.LoggingInitializationContext; import org.springframework.core.env.Environment; import org.springframework.core.env.PropertyResolver; import org.springframework.core.env.PropertySourcesPropertyResolver; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.encoder.PatternLayoutEncoder; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; import ch.qos.logback.core.ConsoleAppender; import ch.qos.logback.core.rolling.FixedWindowRollingPolicy; import ch.qos.logback.core.rolling.RollingFileAppender; import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy; import ch.qos.logback.core.util.OptionHelper; /** * Default logback configuration used by Spring Boot. Uses {@link LogbackConfigurator} to * improve startup time. See also the {@code defaults.xml}, {@code console-appender.xml} * and {@code file-appender.xml} files provided for classic {@code logback.xml} use. * * @author Phillip Webb * @since 1.1.2 */ class DefaultLogbackConfiguration { private static final String CONSOLE_LOG_PATTERN = "%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} " + "%clr(%5p) %clr(${PID:- }){magenta} %clr(---){faint} " + "%clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} " + "%clr(:){faint} %m%n%rEx"; private static final String FILE_LOG_PATTERN = "%d{yyyy-MM-dd HH:mm:ss.SSS} %5p " + "${PID:- } --- [%t] %-40.40logger{39} : %m%n%rEx"; private static final Charset UTF8 = Charset.forName("UTF-8"); private final PropertyResolver patterns; private final LogFile logFile; public DefaultLogbackConfiguration( LoggingInitializationContext initializationContext, LogFile logFile) { this.patterns = getPatternsResolver(initializationContext.getEnvironment()); this.logFile = logFile; } private PropertyResolver getPatternsResolver(Environment environment) { if (environment == null) { return new PropertySourcesPropertyResolver(null); } return new RelaxedPropertyResolver(environment, "logging.pattern."); } @SuppressWarnings("unchecked") public void apply(LogbackConfigurator config) { synchronized (config.getConfigurationLock()) { base(config); Appender<ILoggingEvent> consoleAppender = consoleAppender(config); if (this.logFile != null) { Appender<ILoggingEvent> fileAppender = fileAppender(config, this.logFile.toString()); config.root(Level.INFO, consoleAppender, fileAppender); } else { config.root(Level.INFO, consoleAppender); } } } private void base(LogbackConfigurator config) { config.conversionRule("clr", ColorConverter.class); config.conversionRule("wex", WhitespaceThrowableProxyConverter.class); LevelRemappingAppender debugRemapAppender = new LevelRemappingAppender( "org.springframework.boot"); config.start(debugRemapAppender); config.appender("DEBUG_LEVEL_REMAPPER", debugRemapAppender); config.logger("", Level.ERROR); config.logger("org.apache.catalina.startup.DigesterFactory", Level.ERROR); config.logger("org.apache.catalina.util.LifecycleBase", Level.ERROR); config.logger("org.apache.coyote.http11.Http11NioProtocol", Level.WARN); config.logger("org.apache.sshd.common.util.SecurityUtils", Level.WARN); config.logger("org.apache.tomcat.util.net.NioSelectorPool", Level.WARN); config.logger("org.crsh.plugin", Level.WARN); config.logger("org.crsh.ssh", Level.WARN); config.logger("org.eclipse.jetty.util.component.AbstractLifeCycle", Level.ERROR); config.logger("org.hibernate.validator.internal.util.Version", Level.WARN); config.logger("org.springframework.boot.actuate.autoconfigure." + "CrshAutoConfiguration", Level.WARN); config.logger("org.springframework.boot.actuate.endpoint.jmx", null, false, debugRemapAppender); config.logger("org.thymeleaf", null, false, debugRemapAppender); } private Appender<ILoggingEvent> consoleAppender(LogbackConfigurator config) { ConsoleAppender<ILoggingEvent> appender = new ConsoleAppender<ILoggingEvent>(); PatternLayoutEncoder encoder = new PatternLayoutEncoder(); String logPattern = this.patterns.getProperty("console", CONSOLE_LOG_PATTERN); encoder.setPattern(OptionHelper.substVars(logPattern, config.getContext())); encoder.setCharset(UTF8); config.start(encoder); appender.setEncoder(encoder); config.appender("CONSOLE", appender); return appender; } private Appender<ILoggingEvent> fileAppender(LogbackConfigurator config, String logFile) { RollingFileAppender<ILoggingEvent> appender = new RollingFileAppender<ILoggingEvent>(); PatternLayoutEncoder encoder = new PatternLayoutEncoder(); String logPattern = this.patterns.getProperty("file", FILE_LOG_PATTERN); encoder.setPattern(OptionHelper.substVars(logPattern, config.getContext())); appender.setEncoder(encoder); config.start(encoder); appender.setFile(logFile); FixedWindowRollingPolicy rollingPolicy = new FixedWindowRollingPolicy(); rollingPolicy.setFileNamePattern(logFile + ".%i"); appender.setRollingPolicy(rollingPolicy); rollingPolicy.setParent(appender); config.start(rollingPolicy); SizeBasedTriggeringPolicy<ILoggingEvent> triggeringPolicy = new SizeBasedTriggeringPolicy<ILoggingEvent>(); triggeringPolicy.setMaxFileSize("10MB"); appender.setTriggeringPolicy(triggeringPolicy); config.start(triggeringPolicy); config.appender("FILE", appender); return appender; } }
apache-2.0
sjaco002/vxquery
vxquery-core/src/main/java/org/apache/vxquery/exceptions/VXQueryDataException.java
1475
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.vxquery.exceptions; import java.io.File; import org.apache.hyracks.api.exceptions.HyracksDataException; public class VXQueryDataException extends HyracksDataException { private static final long serialVersionUID = 1L; private File file; public VXQueryDataException(String message, Exception ex, File file) { super(message, ex); this.file = file; } @Override public String getMessage() { String message = super.getMessage(); message = message.replaceAll("\\[nodeId\\]", getNodeId()); message = message.replaceAll("\\[path\\]", file.getAbsolutePath()); return message; } }
apache-2.0
tientq/jhipster-microservices
authorization-service/src/main/java/net/tinyset/authorization/config/ThymeleafConfiguration.java
994
package net.tinyset.authorization.config; import org.apache.commons.lang3.CharEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.*; import org.thymeleaf.templateresolver.ClassLoaderTemplateResolver; @Configuration public class ThymeleafConfiguration { @SuppressWarnings("unused") private final Logger log = LoggerFactory.getLogger(ThymeleafConfiguration.class); @Bean @Description("Thymeleaf template resolver serving HTML 5 emails") public ClassLoaderTemplateResolver emailTemplateResolver() { ClassLoaderTemplateResolver emailTemplateResolver = new ClassLoaderTemplateResolver(); emailTemplateResolver.setPrefix("mails/"); emailTemplateResolver.setSuffix(".html"); emailTemplateResolver.setTemplateMode("HTML5"); emailTemplateResolver.setCharacterEncoding(CharEncoding.UTF_8); emailTemplateResolver.setOrder(1); return emailTemplateResolver; } }
apache-2.0
rapidoid/rapidoid
rapidoid-networking/src/main/java/org/rapidoid/net/impl/ConnState.java
2015
/*- * #%L * rapidoid-networking * %% * Copyright (C) 2014 - 2020 Nikolche Mihajlovski and contributors * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.rapidoid.net.impl; import org.rapidoid.RapidoidThing; import org.rapidoid.annotation.Authors; import org.rapidoid.annotation.Since; import org.rapidoid.util.Resetable; import java.util.List; @Authors("Nikolche Mihajlovski") @Since("2.0.0") public class ConnState extends RapidoidThing implements Resetable { public volatile long n; public volatile Object obj; public ConnState() { } public ConnState(long n, Object obj) { this.n = n; this.obj = obj; } /* COMMENTED OUT FOR PRODUCTION (the log is used for debugging during development): */ // private final List<String> log = Collections.synchronizedList(U.<String> list()); public void reset() { n = 0; obj = null; log("<<< RESET >>>"); } /* The log is used for debugging during development. */ public void log(String msg) { /* COMMENTED OUT FOR PRODUCTION: */ // log.add(msg); } /* The log is used for debugging during development. */ public List<String> log() { return null; /* COMMENTED OUT FOR PRODUCTION: */ // return log; } public ConnState copy() { return new ConnState(n, obj); } public void copyFrom(ConnState state) { this.n = state.n; this.obj = state.obj; } }
apache-2.0
quarkusio/quarkus
integration-tests/kafka-snappy/src/test/java/io/quarkus/it/kafka/KafkaSnappyProducerTest.java
2483
package io.quarkus.it.kafka; import static org.hamcrest.Matchers.*; import java.time.Duration; import java.util.Collections; import java.util.Properties; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import io.quarkus.test.junit.QuarkusTest; import io.restassured.RestAssured; @QuarkusTest public class KafkaSnappyProducerTest { public static KafkaConsumer<Integer, String> createConsumer() { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaTestResource.getBootstrapServers()); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("test")); return consumer; } @Test public void test() throws Exception { KafkaConsumer<Integer, String> consumer = createConsumer(); RestAssured.with().body("hello").post("/kafka"); ConsumerRecord<Integer, String> records = consumer.poll(Duration.ofMillis(10000)).iterator().next(); Assertions.assertEquals(records.key(), (Integer) 0); Assertions.assertEquals(records.value(), "hello"); } @Test public void health() throws Exception { RestAssured.when().get("/q/health/ready").then() .body("status", is("UP"), "checks.status", containsInAnyOrder("UP"), "checks.name", containsInAnyOrder("Kafka connection health check")); } @Test public void metrics() throws Exception { // Look for kafka producer metrics (add .log().all() to examine what they are RestAssured.when().get("/q/metrics").then() .statusCode(200) .body(containsString("kafka_producer_")); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-imagebuilder/src/main/java/com/amazonaws/services/imagebuilder/model/transform/UntagResourceRequestProtocolMarshaller.java
2635
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.imagebuilder.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.imagebuilder.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * UntagResourceRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class UntagResourceRequestProtocolMarshaller implements Marshaller<Request<UntagResourceRequest>, UntagResourceRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON).requestUri("/tags/{resourceArn}") .httpMethodName(HttpMethodName.DELETE).hasExplicitPayloadMember(false).hasPayloadMembers(false).serviceName("AWSimagebuilder").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public UntagResourceRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<UntagResourceRequest> marshall(UntagResourceRequest untagResourceRequest) { if (untagResourceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<UntagResourceRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, untagResourceRequest); protocolMarshaller.startMarshalling(); UntagResourceRequestMarshaller.getInstance().marshall(untagResourceRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
googlearchive/firebase-jobdispatcher-android
jobdispatcher/src/main/java/com/firebase/jobdispatcher/JobService.java
23819
// Copyright 2016 Google, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// package com.firebase.jobdispatcher; import static com.firebase.jobdispatcher.GooglePlayReceiver.getJobCoder; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import android.app.Service; import android.content.Intent; import android.content.res.Configuration; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.Looper; import android.os.RemoteException; import android.os.SystemClock; import android.support.annotation.AnyThread; import android.support.annotation.BinderThread; import android.support.annotation.IntDef; import android.support.annotation.MainThread; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.VisibleForTesting; import android.support.annotation.WorkerThread; import android.support.v4.util.SimpleArrayMap; import android.text.format.DateUtils; import android.util.Log; import java.io.FileDescriptor; import java.io.PrintWriter; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.Locale; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import org.json.JSONObject; /** * JobService is the fundamental unit of work used in the JobDispatcher. * * <p>Users will need to override {@link #onStartJob(JobParameters)}, which is where any * asynchronous execution should start. This method, like most lifecycle methods, runs on the main * thread; you <b>must</b> offload execution to another thread (or {@link android.os.AsyncTask}, or * {@link android.os.Handler}, or your favorite flavor of concurrency). * * <p>Once any asynchronous work is complete {@link #jobFinished(JobParameters, boolean)} should be * called to inform the backing driver of the result. * * <p>Implementations should also override {@link #onStopJob(JobParameters)}, which will be called * if the scheduling engine wishes to interrupt your work (most likely because the runtime * constraints that are associated with the job in question are no longer met). * * @deprecated Firebase Job Dispatcher is deprecated. Apps should migrate to WorkManager before Apr * 7, 2020. Please see FJD's README.md file for more information. */ @Deprecated public abstract class JobService extends Service { /** * Returned to indicate the job was executed successfully. If the job is not recurring (i.e. a * one-off) it will be dequeued and forgotten. If it is recurring the trigger will be reset and * the job will be requeued. */ public static final int RESULT_SUCCESS = 0; /** * Returned to indicate the job encountered an error during execution and should be retried after * a backoff period. */ public static final int RESULT_FAIL_RETRY = 1; /** * Returned to indicate the job encountered an error during execution but should not be retried. * If the job is not recurring (i.e. a one-off) it will be dequeued and forgotten. If it is * recurring the trigger will be reset and the job will be requeued. */ public static final int RESULT_FAIL_NORETRY = 2; /** The result returned from a job execution. */ @Retention(RetentionPolicy.SOURCE) @IntDef({RESULT_SUCCESS, RESULT_FAIL_RETRY, RESULT_FAIL_NORETRY}) public @interface JobResult {} static final String TAG = "FJD.JobService"; @VisibleForTesting static final String ACTION_EXECUTE = "com.firebase.jobdispatcher.ACTION_EXECUTE"; private static final Handler mainHandler = new Handler(Looper.getMainLooper()); /** A background executor that lazily creates up to one thread. */ @VisibleForTesting final ExecutorService backgroundExecutor = new ThreadPoolExecutor( /* corePoolSize= */ 0, /* maximumPoolSize= */ 1, /* keepAliveTime= */ 60L, /* unit= */ SECONDS, /* workQueue= */ new LinkedBlockingQueue<Runnable>()); /** * Correlates job tags (unique strings) with Messages, which are used to signal the completion of * a job. * * <p>All access should happen on the {@link #backgroundExecutor}. */ // @GuardedBy("runningJobs") private final SimpleArrayMap<String, JobCallback> runningJobs = new SimpleArrayMap<>(1); private final IRemoteJobService.Stub binder = new IRemoteJobService.Stub() { @Override @BinderThread public void start(Bundle invocationData, IJobCallback callback) { JobInvocation.Builder invocation = getJobCoder().decode(invocationData); if (invocation == null) { Log.wtf(TAG, "start: unknown invocation provided"); return; } JobService.this.handleStartJobRequest(invocation.build(), callback); } @Override @BinderThread public void stop(Bundle invocationData, boolean needToSendResult) { JobInvocation.Builder invocation = getJobCoder().decode(invocationData); if (invocation == null) { Log.wtf(TAG, "stop: unknown invocation provided"); return; } JobService.this.handleStopJobRequest(invocation.build(), needToSendResult); } }; /** * The entry point to your Job. Implementations should offload work to another thread of execution * as soon as possible because this runs on the main thread. If work was offloaded, call {@link * JobService#jobFinished(JobParameters, boolean)} to notify the scheduling service that the work * is completed. * * <p>If a job with the same service and tag was rescheduled during execution {@link * JobService#onStopJob(JobParameters)} will be called and the wakelock will be released. Please * make sure that all reschedule requests happen at the end of the job. * * @return {@code true} if there is more work remaining in the worker thread, {@code false} if the * job was completed. */ @MainThread public abstract boolean onStartJob(@NonNull JobParameters job); /** * Called when the scheduling engine has decided to interrupt the execution of a running job, most * likely because the runtime constraints associated with the job are no longer satisfied. The job * must stop execution. * * @return true if the job should be retried * @see com.firebase.jobdispatcher.JobInvocation.Builder#setRetryStrategy(RetryStrategy) * @see RetryStrategy */ @MainThread public abstract boolean onStopJob(@NonNull JobParameters job); /** * Asks the {@code job} to start running. Calls {@link #onStartJob} on the main thread. Once * complete, the {@code callback} will be used to send the result back. */ @BinderThread private void handleStartJobRequest(JobParameters job, IJobCallback callback) { backgroundExecutor.execute(UnitOfWork.handleStartJobRequest(this, job, callback)); } /** * Records that the provided {@code job} has been started, then arranges for {@link * #onStartJob(JobParameters)} to be called on the main thread (via {@link * #callOnStartJobImpl(JobParameters)}. */ @WorkerThread private void handleStartJobRequestImpl(final JobParameters job, IJobCallback callback) { synchronized (runningJobs) { if (runningJobs.containsKey(job.getTag())) { Log.w( TAG, String.format(Locale.US, "Job with tag = %s was already running.", job.getTag())); return; } runningJobs.put(job.getTag(), new JobCallback(job, callback, SystemClock.elapsedRealtime())); } // onStartJob needs to be called on the main thread mainHandler.post(UnitOfWork.callOnStartJob(this, job)); } /** Calls {@link #onStartJob(JobParameters)}. Should only be run on the main thread. */ @MainThread private void callOnStartJobImpl(JobParameters jobParameters) { boolean moreWork = onStartJob(jobParameters); if (!moreWork) { // If there's no more work to do, we're done. Report success. backgroundExecutor.execute( UnitOfWork.removeAndFinishJobWithResult( this, jobParameters, /* result= */ RESULT_SUCCESS)); } } /** * Asks job to stop. * * <p>Sending results can be skipped if the call was initiated by a reschedule request. */ @BinderThread private void handleStopJobRequest(JobParameters job, boolean needToSendResult) { backgroundExecutor.execute( UnitOfWork.handleStopJobRequest(this, job, /* needToSendResult= */ needToSendResult)); } @WorkerThread private void handleStopJobRequestImpl(final JobParameters job, final boolean needToSendResult) { synchronized (runningJobs) { JobCallback jobCallback = runningJobs.remove(job.getTag()); if (jobCallback == null) { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Provided job has already been executed."); } return; } // onStopJob needs to be called on the main thread mainHandler.post( UnitOfWork.callOnStopJob( this, jobCallback, /* needToSendResult= */ needToSendResult, /* terminatingResult= */ RESULT_SUCCESS)); } } /** Calls {@link #onStopJob(JobParameters)}. Should only be run on the main thread. */ @MainThread private void callOnStopJobImpl( JobCallback jobCallback, boolean needToSendResult, @JobResult int terminatingResult) { boolean shouldRetry = onStopJob(jobCallback.job); if (needToSendResult) { backgroundExecutor.execute( UnitOfWork.finishJobWithResult( jobCallback, shouldRetry ? RESULT_FAIL_RETRY : terminatingResult)); } } /** * Callback to inform the scheduling driver that you've finished executing. Can be called from any * thread. When the system receives this message, it will release the wakelock being held. * * @param job * @param needsReschedule whether the job should be rescheduled * @see com.firebase.jobdispatcher.JobInvocation.Builder#setRetryStrategy(RetryStrategy) */ @AnyThread public final void jobFinished(@NonNull JobParameters job, boolean needsReschedule) { if (job == null) { Log.e(TAG, "jobFinished called with a null JobParameters"); return; } this.backgroundExecutor.execute( UnitOfWork.removeAndFinishJobWithResult( this, job, /* result= */ needsReschedule ? RESULT_FAIL_RETRY : RESULT_SUCCESS)); } /** * Removes the provided {@code job} from the list of {@link #runningJobs} and sends the {@code * result} if the job wasn't already complete. */ @WorkerThread private void removeAndFinishJobWithResultImpl(JobParameters job, @JobResult int result) { synchronized (runningJobs) { JobCallback callback = runningJobs.remove(job.getTag()); if (callback != null) { callback.sendResult(result); } } } @Override @MainThread public final int onStartCommand(Intent intent, int flags, int startId) { stopSelf(startId); return START_NOT_STICKY; } @Nullable @Override @MainThread public final IBinder onBind(Intent intent) { return binder; } @Override @MainThread public final boolean onUnbind(Intent intent) { backgroundExecutor.execute(UnitOfWork.handleOnUnbindEvent(this, intent)); return super.onUnbind(intent); } @WorkerThread private void handleOnUnbindEventImpl(Intent unusedIntent) { synchronized (runningJobs) { for (int i = runningJobs.size() - 1; i >= 0; i--) { JobCallback callback = runningJobs.remove(runningJobs.keyAt(i)); if (callback != null) { // Ask the job to stop. onStopJob needs to be called on the main thread mainHandler.post( UnitOfWork.callOnStopJob( this, callback, /* needToSendResult= */ true, /* terminatingResult= */ RESULT_FAIL_NORETRY)); } } } } @Override @MainThread public final void onRebind(Intent intent) { super.onRebind(intent); } @Override @MainThread public final void onStart(Intent intent, int startId) {} /** * Package-private alias for {@link #dump(FileDescriptor, PrintWriter, String[])}. * * <p>The {@link #dump(FileDescriptor, PrintWriter, String[])} method is protected. This * implementation method is marked package-private to facilitate testing. */ @VisibleForTesting final void dumpImpl(PrintWriter writer) { synchronized (runningJobs) { if (runningJobs.isEmpty()) { writer.println("No running jobs"); return; } long now = SystemClock.elapsedRealtime(); writer.println("Running jobs:"); for (int i = 0; i < runningJobs.size(); i++) { JobCallback callback = runningJobs.get(runningJobs.keyAt(i)); // Add sanitized quotes around the tag to make this easier to parse for robots String name = JSONObject.quote(callback.job.getTag()); // Produces strings like "02:30" String duration = DateUtils.formatElapsedTime(MILLISECONDS.toSeconds(now - callback.startedAtElapsed)); writer.println(" * " + name + " has been running for " + duration); } } } @Override protected final void dump(FileDescriptor fd, PrintWriter writer, String[] args) { dumpImpl(writer); } @Override @MainThread public final void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } @Override @MainThread public final void onTaskRemoved(Intent rootIntent) { super.onTaskRemoved(rootIntent); } private static final class JobCallback { final JobParameters job; final IJobCallback remoteCallback; final long startedAtElapsed; private JobCallback(JobParameters job, IJobCallback callback, long startedAtElapsed) { this.job = job; this.remoteCallback = callback; this.startedAtElapsed = startedAtElapsed; } void sendResult(@JobResult int result) { try { remoteCallback.jobFinished(getJobCoder().encode(job, new Bundle()), result); } catch (RemoteException remoteException) { Log.e(TAG, "Failed to send result to driver", remoteException); } } } /** * A runnable that calls various JobService methods. * * <p>Instances should be constructed via the static factory methods. Kept as a single class to * reduce impact on APK size. */ private static class UnitOfWork implements Runnable { /** See {@link #callOnStartJob(JobService, JobParameters). */ private static final int CALL_ON_START_JOB = 1; /** See {@link #callOnStopJob(JobService, JobCallback, boolean, int}). */ private static final int CALL_ON_STOP_JOB = 2; /** See {@link #handleOnUnbindEvent(JobService, Intent)}. */ private static final int HANDLE_ON_UNBIND_EVENT = 3; /** See {@link #handleStartJobRequest(JobService, JobParameters, IJobCallback)}. */ private static final int HANDLE_START_JOB_REQUEST = 4; /** See {@link #handleStopJobRequest(JobService, JobParameters, boolean)}. */ private static final int HANDLE_STOP_JOB_REQUEST = 5; /** See {@link #finishJobWithResult(JobCallback, int)}. */ private static final int FINISH_JOB_WITH_RESULT = 6; /** See {@link #removeAndFinishJobWithResult(JobService, JobParameters, int)}. */ private static final int REMOVE_AND_FINISH_JOB_WITH_RESULT = 7; @Retention(RetentionPolicy.SOURCE) @IntDef({ CALL_ON_START_JOB, CALL_ON_STOP_JOB, HANDLE_ON_UNBIND_EVENT, HANDLE_START_JOB_REQUEST, HANDLE_STOP_JOB_REQUEST, FINISH_JOB_WITH_RESULT, REMOVE_AND_FINISH_JOB_WITH_RESULT, }) private @interface WorkType {} /** The type of work to do. Always set. */ @WorkType private final int workType; /** The JobService to do the work on. Always set. */ @NonNull private final JobService jobService; /** * Set for {@link #CALL_ON_START_JOB}, {@link #CALL_ON_STOP_JOB}, {@link * #HANDLE_START_JOB_REQUEST}, {@link #HANDLE_STOP_JOB_REQUEST}, and {@link * #REMOVE_AND_FINISH_JOB_WITH_RESULT}. */ @Nullable private final JobParameters jobParameters; /** Set for {@link #HANDLE_START_JOB_REQUEST}. */ @Nullable private final IJobCallback remoteJobCallback; /** Set for {@link #CALL_ON_STOP_JOB} and {@link #FINISH_JOB_WITH_RESULT}. */ @Nullable private final JobCallback jobCallback; /** * Set for {@link #CALL_ON_STOP_JOB}, {@link #FINISH_JOB_WITH_RESULT}, and {@link * #REMOVE_AND_FINISH_JOB_WITH_RESULT}. */ @JobResult private final int terminatingResult; /** * Boolean value whose meaning changes depending on the {@link #workType}. * * <p>Set for {@link #HANDLE_STOP_JOB_REQUEST} and {@link #CALL_ON_STOP_JOB}. */ private final boolean boolValue; /** Set for {@link #HANDLE_ON_UNBIND_EVENT}. */ @Nullable private final Intent unbindIntent; private UnitOfWork( @WorkType int workType, @NonNull JobService jobService, @Nullable JobParameters jobParameters, @Nullable IJobCallback remoteJobCallback, @Nullable JobCallback jobCallback, @Nullable Intent unbindIntent, boolean boolValue, @JobResult int terminatingResult) { this.workType = workType; this.jobService = jobService; this.jobParameters = jobParameters; this.remoteJobCallback = remoteJobCallback; this.jobCallback = jobCallback; this.unbindIntent = unbindIntent; this.boolValue = boolValue; this.terminatingResult = terminatingResult; } /** Creats a Runnable that calls {@link JobService#callOnStartJobImpl(JobParameters)}. */ static UnitOfWork callOnStartJob(JobService jobService, JobParameters jobParameters) { return new UnitOfWork( CALL_ON_START_JOB, /* jobService= */ jobService, /* jobParameters= */ jobParameters, /* remoteJobCallback= */ null, /* jobCallback= */ null, /* unbindIntent= */ null, /* boolValue= */ false, /* terminatingResult= */ RESULT_SUCCESS); } /** * Creats a Runnable that calls {@link JobService#callOnStopJobImpl(JobParameters, JobCallback, * boolean, int)}. */ static UnitOfWork callOnStopJob( JobService jobService, JobCallback jobCallback, boolean needToSendResult, @JobResult int terminatingResult) { return new UnitOfWork( CALL_ON_STOP_JOB, /* jobService= */ jobService, /* jobParameters= */ null, /* remoteJobCallback= */ null, /* jobCallback= */ jobCallback, /* unbindIntent= */ null, /* boolValue= */ needToSendResult, /* terminatingResult= */ terminatingResult); } /** Creats a Runnable that calls {@link JobService#handleOnUnbindEventImpl(Intent)}. */ static UnitOfWork handleOnUnbindEvent( @NonNull JobService jobService, @NonNull Intent unbindIntent) { return new UnitOfWork( HANDLE_ON_UNBIND_EVENT, jobService, /* jobParameters= */ null, /* remoteJobCallback= */ null, /* jobCallback= */ null, /* unbindIntent= */ unbindIntent, /* boolValue= */ false, /* terminatingResult= */ RESULT_SUCCESS); } /** * Creats a Runnable that calls {@link JobService#handleStartJobRequestImpl(JobParameters, * IJobCallback)}. */ static UnitOfWork handleStartJobRequest( @NonNull JobService jobService, @NonNull JobParameters jobParameters, @NonNull IJobCallback remoteJobCallback) { return new UnitOfWork( HANDLE_START_JOB_REQUEST, jobService, /* jobParameters= */ jobParameters, /* remoteJobCallback= */ remoteJobCallback, /* jobCallback= */ null, /* unbindIntent= */ null, /* boolValue= */ false, /* terminatingResult= */ RESULT_SUCCESS); } /** * Creats a Runnable that calls {@link JobService#handleStopJobRequestImpl(JobParameters, * boolean)}. */ static UnitOfWork handleStopJobRequest( @NonNull JobService jobService, @NonNull JobParameters jobParameters, boolean needToSendResult) { return new UnitOfWork( HANDLE_STOP_JOB_REQUEST, jobService, /* jobParameters= */ jobParameters, /* remoteJobCallback= */ null, /* jobCallback= */ null, /* unbindIntent= */ null, /* boolValue= */ needToSendResult, /* terminatingResult= */ RESULT_SUCCESS); } /** Creats a Runnable that calls {@link TODO} */ static UnitOfWork finishJobWithResult(@NonNull JobCallback jobCallback, @JobResult int result) { return new UnitOfWork( FINISH_JOB_WITH_RESULT, /* jobService= */ null, /* jobParameters= */ null, /* remoteJobCallback= */ null, /* jobCallback= */ jobCallback, /* unbindIntent= */ null, /* boolValue= */ false, /* terminatingResult= */ result); } /** * Creats a Runnable that calls {@link * JobService#removeAndFinishJobWithResultImpl(JobParameters, int)}. */ static UnitOfWork removeAndFinishJobWithResult( @NonNull JobService jobService, @NonNull JobParameters jobParameters, @JobResult int result) { return new UnitOfWork( REMOVE_AND_FINISH_JOB_WITH_RESULT, jobService, /* jobParameters= */ jobParameters, /* remoteJobCallback= */ null, /* jobCallback= */ null, /* unbindIntent= */ null, /* boolValue= */ false, /* terminatingResult= */ result); } @Override public void run() { switch (workType) { case CALL_ON_START_JOB: // called on main thread jobService.callOnStartJobImpl(jobParameters); return; case CALL_ON_STOP_JOB: // called on main thread jobService.callOnStopJobImpl( jobCallback, /* needToSendResult= */ boolValue, terminatingResult); return; case HANDLE_ON_UNBIND_EVENT: jobService.handleOnUnbindEventImpl(unbindIntent); return; case HANDLE_START_JOB_REQUEST: jobService.handleStartJobRequestImpl(jobParameters, remoteJobCallback); return; case HANDLE_STOP_JOB_REQUEST: jobService.handleStopJobRequestImpl(jobParameters, /* needToSendResult= */ boolValue); return; case FINISH_JOB_WITH_RESULT: jobCallback.sendResult(terminatingResult); return; case REMOVE_AND_FINISH_JOB_WITH_RESULT: jobService.removeAndFinishJobWithResultImpl( jobParameters, /* result= */ terminatingResult); return; default: throw new AssertionError("unreachable"); } } } }
apache-2.0
nince-wyj/jahhan
common/common-extension/src/main/java/net/jahhan/com/alibaba/dubbo/common/serialize/support/SerializationOptimizer.java
329
package net.jahhan.com.alibaba.dubbo.common.serialize.support; import java.util.Collection; /** * This class can be replaced with the contents in config file, but for now I think the class is easier to write * * @author lishen */ public interface SerializationOptimizer { Collection<Class> getSerializableClasses(); }
apache-2.0
gridgain/apache-ignite
modules/core/src/main/java/org/pcollections/Empty.java
972
package org.pcollections; /* Mike Klein, 2/27/2009 */ /* Empty remembers which classes implement the interface you want, * so you don't have to. */ /** * A static utility class for getting empty PCollections backed by the 'default' * implementations. * * @author mtklein */ public final class Empty { //non-instantiable: private Empty() { } public static <E> PStack<E> stack() { return ConsPStack.empty(); } public static <E> PQueue<E> queue() { return AmortizedPQueue.empty(); } public static <E> PVector<E> vector() { return TreePVector.empty(); } public static <E> PSet<E> set() { return HashTreePSet.empty(); } public static <E> POrderedSet<E> orderedSet() { return OrderedPSet.empty(); } public static <E> PBag<E> bag() { return HashTreePBag.empty(); } public static <K, V> PMap<K, V> map() { return HashTreePMap.empty(); } }
apache-2.0
liuqijie/wind
wind-common/src/main/java/com/wind/common/util/SpringContextUtil.java
1793
package com.wind.common.util; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import java.util.Map; /** * 资源文件读取工具 * * @author liuqijie * @date 2017年6月26日 */ public class SpringContextUtil implements ApplicationContextAware { private static ApplicationContext context = null; private SpringContextUtil() { super(); } @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { context = applicationContext; } /** * 根据名称获取bean * @param beanName * @return */ public static Object getBean(String beanName) { return context.getBean(beanName); } /** * 根据bean名称获取指定类型bean * @param beanName bean名称 * @param clazz 返回的bean类型,若类型不匹配,将抛出异常 */ public static <T> T getBean(String beanName, Class<T> clazz) { return context.getBean(beanName, clazz); } /** * 根据类型获取bean * @param clazz * @return */ public static <T> T getBean(Class<T> clazz) { T t = null; Map<String, T> map = context.getBeansOfType(clazz); for (Map.Entry<String, T> entry : map.entrySet()) { t = entry.getValue(); } return t; } /** * 是否包含bean * @param beanName * @return */ public static boolean containsBean(String beanName) { return context.containsBean(beanName); } /** * 是否是单例 * @param beanName * @return */ public static boolean isSingleton(String beanName) { return context.isSingleton(beanName); } /** * bean的类型 * @param beanName * @return */ public static Class getType(String beanName) { return context.getType(beanName); } }
apache-2.0
howiefh/jee-restful-web
src/main/java/io/github/howiefh/jeews/modules/sys/dao/OrganizationDao.java
427
/** * Copyright (c) 2015 https://github.com/howiefh * * Licensed under the Apache License, Version 2.0 (the "License") */ package io.github.howiefh.jeews.modules.sys.dao; import org.springframework.stereotype.Repository; import io.github.howiefh.jeews.common.dao.CrudDao; import io.github.howiefh.jeews.modules.sys.entity.Organization; @Repository public interface OrganizationDao extends CrudDao<Organization, Long> { }
apache-2.0