gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.diff.impl.settings; import com.intellij.application.options.colors.ColorAndFontSettingsListener; import com.intellij.application.options.colors.PreviewPanel; import com.intellij.openapi.Disposable; import com.intellij.openapi.diff.DiffBundle; import com.intellij.openapi.diff.DiffContent; import com.intellij.openapi.diff.DiffRequest; import com.intellij.openapi.diff.SimpleContent; import com.intellij.openapi.diff.impl.incrementalMerge.Change; import com.intellij.openapi.diff.impl.incrementalMerge.MergeList; import com.intellij.openapi.diff.impl.incrementalMerge.MergeSearchHelper; import com.intellij.openapi.diff.impl.incrementalMerge.ui.EditorPlace; import com.intellij.openapi.diff.impl.incrementalMerge.ui.MergePanel2; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.*; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.fileTypes.PlainTextFileType; import com.intellij.openapi.project.Project; import com.intellij.util.EventDispatcher; import com.intellij.util.diff.FilesTooBigForDiffException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; /** * The panel from the Settings, that allows to see changes to diff/merge coloring scheme right away. */ public class DiffPreviewPanel implements PreviewPanel { //TODO [VISTALL] review text @NonNls private static final String LEFT_TEXT = "class MyClass {\n" + " int value;\n" + "\n" + " void leftOnly() {}\n" + "\n" + " void foo() {\n" + " // Left changes\n" + " }\n" + "}"; @NonNls private static final String CENTER_TEXT = "class MyClass {\n" + " int value;\n" + "\n" + " void foo() {\n" + " }\n" + "\n" + " void removedFromLeft() {}\n" + "}"; @NonNls private static final String RIGHT_TEXT = "class MyClass {\n" + " long value;\n" + "\n" + " void foo() {\n" + " // Right changes\n" + " }\n" + "\n" + " void removedFromLeft() {}\n" + "}"; private final MergePanel2.AsComponent myMergePanelComponent; private final JPanel myPanel = new JPanel(new BorderLayout()); private final EventDispatcher<ColorAndFontSettingsListener> myDispatcher = EventDispatcher.create(ColorAndFontSettingsListener.class); public DiffPreviewPanel(@NotNull Disposable parent) { myMergePanelComponent = new MergePanel2.AsComponent(parent); myPanel.add(myMergePanelComponent, BorderLayout.CENTER); myMergePanelComponent.setToolbarEnabled(false); MergePanel2 mergePanel = getMergePanel(); mergePanel.setScrollToFirstDiff(false); for (int i = 0; i < MergePanel2.EDITORS_COUNT; i++) { final EditorMouseListener motionListener = new EditorMouseListener(i); final EditorClickListener clickListener = new EditorClickListener(i); mergePanel.getEditorPlace(i).addListener(new EditorPlace.EditorListener() { @Override public void onEditorCreated(EditorPlace place) { Editor editor = place.getEditor(); editor.addEditorMouseMotionListener(motionListener); editor.addEditorMouseListener(clickListener); editor.getCaretModel().addCaretListener(clickListener); } @Override public void onEditorReleased(Editor releasedEditor) { releasedEditor.removeEditorMouseMotionListener(motionListener); releasedEditor.removeEditorMouseListener(clickListener); } }); Editor editor = mergePanel.getEditor(i); if (editor != null) { editor.addEditorMouseMotionListener(motionListener); editor.addEditorMouseListener(clickListener); } } } @Override public Component getPanel() { return myPanel; } @Override public void updateView() { MergeList mergeList = getMergePanel().getMergeList(); if (mergeList != null) mergeList.updateMarkup(); myMergePanelComponent.repaint(); } public void setMergeRequest(@Nullable Project project) throws FilesTooBigForDiffException { getMergePanel().setDiffRequest(new SampleMerge(project)); } private MergePanel2 getMergePanel() { return myMergePanelComponent.getMergePanel(); } public void setColorScheme(final EditorColorsScheme highlighterSettings) { getMergePanel().setColorScheme(highlighterSettings); getMergePanel().setHighlighterSettings(highlighterSettings); } private class EditorMouseListener extends EditorMouseMotionAdapter { private final int myIndex; private EditorMouseListener(int index) { myIndex = index; } @Override public void mouseMoved(EditorMouseEvent e) { MergePanel2 mergePanel = getMergePanel(); Editor editor = mergePanel.getEditor(myIndex); if (MergeSearchHelper.findChangeAt(e, mergePanel, myIndex) != null) EditorUtil.setHandCursor(editor); } } public static class SampleMerge extends DiffRequest { public SampleMerge(@Nullable Project project) { super(project); } @Override @NotNull public DiffContent[] getContents() { return new DiffContent[]{ new SimpleContent(LEFT_TEXT, PlainTextFileType.INSTANCE), new SimpleContent(CENTER_TEXT, PlainTextFileType.INSTANCE), new SimpleContent(RIGHT_TEXT, PlainTextFileType.INSTANCE) }; } @Override public String[] getContentTitles() { return new String[]{"", "", ""}; } @Override public String getWindowTitle() { return DiffBundle.message("merge.color.options.dialog.title"); } } @Override public void addListener(@NotNull final ColorAndFontSettingsListener listener) { myDispatcher.addListener(listener); } private class EditorClickListener extends EditorMouseAdapter implements CaretListener { private final int myIndex; private EditorClickListener(int i) { myIndex = i; } @Override public void mouseClicked(EditorMouseEvent e) { select(MergeSearchHelper.findChangeAt(e, getMergePanel(), myIndex)); } private void select(Change change) { if (change == null) return; myDispatcher.getMulticaster().selectionInPreviewChanged(change.getType().getTextDiffType().getDisplayName()); } @Override public void caretPositionChanged(CaretEvent e) { select(MergeSearchHelper.findChangeAt(e, getMergePanel(), myIndex)); } @Override public void caretAdded(CaretEvent e) { } @Override public void caretRemoved(CaretEvent e) { } } @Override public void blinkSelectedHighlightType(final Object selected) { } @Override public void disposeUIResources() { } }
package work.samoje.colors.grid; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.isA; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.awt.Color; import java.awt.Point; import java.util.HashSet; import java.util.Set; import org.easymock.EasyMockSupport; import org.junit.After; import org.junit.Before; import org.junit.Test; import work.samoje.colors.modification.combiner.combiners.ColorCombiner; import work.samoje.colors.modification.combiner.selection.CombinerSelector; import work.samoje.colors.modification.combiner.selection.CombinerState; public class ColorGridTest extends EasyMockSupport { final CombinerSelector combinerProvider = createMock(CombinerSelector.class); @Before public void setUp() { combinerProvider.addObserver(isA(ColorGrid.class)); expectLastCall(); } @After public void tearDown() { verifyAll(); } @Test public void test_constructor() { final int height = 3; final int width = 50; replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); assertEquals(height, underTest.getHeight()); assertEquals(width, underTest.getWidth()); } @Test(expected = IllegalArgumentException.class) public void test_constructor_failsForTooSmallHeight() { resetAll(); final int height = 1; final int width = 50; replayAll(); new ColorGrid(height, width, combinerProvider); } @Test(expected = IllegalArgumentException.class) public void test_constructor_failsForTooSmallWidth() { resetAll(); final int height = 10; final int width = 1; replayAll(); new ColorGrid(height, width, combinerProvider); } @Test public void test_initiatlizeAndRecalculate() { final int height = 3; final int width = 4; final ColorCombiner mockCombiner = createMock(ColorCombiner.class); // Expect top edge: RED - CYAN - BLACK - MAGENTA expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.RED))).andReturn( Color.BLACK); expect(mockCombiner.combine(eq(Color.BLACK), eq(Color.CYAN))) .andReturn(Color.MAGENTA); // Expect side edge: RED - YELLOW - BLUE expect(mockCombiner.combine(eq(Color.YELLOW), eq(Color.RED))) .andReturn(Color.BLUE); // Expect result // RED - CYAN - BLACK - MAGENTA // YELLOW - GREEN - DARK_GRAY - LIGHT_GRAY // BLUE - ORANGE - PINK - WHITE expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.YELLOW))) .andReturn(Color.GREEN); expect(mockCombiner.combine(eq(Color.BLACK), eq(Color.GREEN))) .andReturn(Color.DARK_GRAY); expect(mockCombiner.combine(eq(Color.MAGENTA), eq(Color.DARK_GRAY))) .andReturn(Color.LIGHT_GRAY); expect(mockCombiner.combine(eq(Color.GREEN), eq(Color.BLUE))) .andReturn(Color.ORANGE); expect(mockCombiner.combine(eq(Color.DARK_GRAY), eq(Color.ORANGE))) .andReturn(Color.PINK); expect(mockCombiner.combine(eq(Color.LIGHT_GRAY), eq(Color.PINK))) .andReturn(Color.WHITE); expect(combinerProvider.getCombiner()).andReturn(mockCombiner).times(1); replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); underTest.initialize(); assertEquals(Color.WHITE, underTest.getColorForPoint(3, 2).get()); } @Test public void test_getColorForPoint_returnsEmptyOutsideOfBounds() { final int height = 2; final int width = 2; replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); assertFalse(underTest.getColorForPoint(-1, 0).isPresent()); assertFalse(underTest.getColorForPoint(0, -1).isPresent()); assertFalse(underTest.getColorForPoint(-1, -10).isPresent()); assertFalse(underTest.getColorForPoint(width, height).isPresent()); assertFalse(underTest.getColorForPoint(width, 0).isPresent()); assertFalse(underTest.getColorForPoint(width + 1, 0).isPresent()); assertFalse(underTest.getColorForPoint(width, height).isPresent()); assertFalse(underTest.getColorForPoint(width, height + 1).isPresent()); assertFalse(underTest.getColorForPoint(0, height).isPresent()); } @Test public void test_getColorForPoint_returnsEmptyForNullValue() { final int height = 2; final int width = 2; replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); assertFalse(underTest.getColorForPoint(1, 1).isPresent()); } @Test public void test_getColorForPoint_returnsValueWhenPresent() { final int height = 2; final int width = 2; final ColorCombiner mockCombiner = createMock(ColorCombiner.class); expect(mockCombiner.combine(isA(Color.class), isA(Color.class))) .andReturn(Color.BLACK).times(1); expect(combinerProvider.getCombiner()).andReturn(mockCombiner).times(1); replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); underTest.initialize(); assertTrue(underTest.getColorForPoint(0, 0).isPresent()); assertEquals(Color.RED, underTest.getColorForPoint(0, 0).get()); assertTrue(underTest.getColorForPoint(1, 0).isPresent()); assertEquals(Color.CYAN, underTest.getColorForPoint(1, 0).get()); assertTrue(underTest.getColorForPoint(0, 1).isPresent()); assertEquals(Color.YELLOW, underTest.getColorForPoint(0, 1).get()); assertTrue(underTest.getColorForPoint(1, 1).isPresent()); assertEquals(Color.BLACK, underTest.getColorForPoint(1, 1).get()); } @Test public void test_getColorRow_getsACopy() { final int height = 2; final int width = 3; // Expect result // RED - CYAN - BLACK // YELLOW - GREEN - DARK_GRAY final ColorCombiner mockCombiner = createMock(ColorCombiner.class); expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.RED))).andReturn( Color.BLACK).times(1); expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.YELLOW))) .andReturn(Color.GREEN).times(1); expect(mockCombiner.combine(eq(Color.BLACK), eq(Color.GREEN))) .andReturn(Color.DARK_GRAY).times(1); expect(combinerProvider.getCombiner()).andReturn(mockCombiner).times(1); replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); underTest.initialize(); final Color[] row = underTest.getColorRow(1); // Ensure we get the right values assertEquals(3, row.length); assertEquals(Color.YELLOW, row[0]); assertEquals(Color.GREEN, row[1]); assertEquals(Color.DARK_GRAY, row[2]); assertEquals(Color.GREEN, underTest.getColorForPoint(1, 1).get()); // Ensure modifications to the column returned do not affect the grid row[1] = Color.WHITE; assertEquals(Color.GREEN, underTest.getColorForPoint(1, 1).get()); } @Test(expected = IllegalArgumentException.class) public void test_getColorRow_failsWhenOutOfBounds() { final int height = 2; final int width = 3; // Expect result // RED - CYAN - BLACK // YELLOW - GREEN - DARK_GRAY final ColorCombiner mockCombiner = createMock(ColorCombiner.class); expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.RED))).andReturn( Color.BLACK).times(1); expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.YELLOW))) .andReturn(Color.GREEN).times(1); expect(mockCombiner.combine(eq(Color.BLACK), eq(Color.GREEN))) .andReturn(Color.DARK_GRAY).times(1); expect(combinerProvider.getCombiner()).andReturn(mockCombiner).times(1); replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); underTest.initialize(); underTest.getColorRow(width); } @Test(expected = IllegalArgumentException.class) public void test_getColorRow_failsWhenNegativeBounds() { final int height = 2; final int width = 3; // Expect result // RED - CYAN - BLACK // YELLOW - GREEN - DARK_GRAY final ColorCombiner mockCombiner = createMock(ColorCombiner.class); expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.RED))).andReturn( Color.BLACK).times(1); expect(mockCombiner.combine(eq(Color.CYAN), eq(Color.YELLOW))) .andReturn(Color.GREEN).times(1); expect(mockCombiner.combine(eq(Color.BLACK), eq(Color.GREEN))) .andReturn(Color.DARK_GRAY).times(1); expect(combinerProvider.getCombiner()).andReturn(mockCombiner).times(1); replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); underTest.initialize(); underTest.getColorRow(-1); } @Test public void test_writeToNearestEdge_writesToTheRightSpot() { final int height = 10; final int width = 15; final ColorCombiner mockCombiner = createMock(ColorCombiner.class); // For initialization, we combine for all values except the 3 hardcoded final int initializationCombinationCount = height * width - 3; // For repaint after write, we combine for all values except the edges final int redrawAfterWriteCount = (height - 1) * (width - 1); expect(mockCombiner.combine(isA(Color.class), isA(Color.class))) .andReturn(Color.BLACK).times( initializationCombinationCount + redrawAfterWriteCount); // Get combiner once for initialization, once for recalculation after // write points expect(combinerProvider.getCombiner()).andReturn(mockCombiner).times(2); replayAll(); final ColorGrid underTest = new ColorGrid(height, width, combinerProvider); underTest.initialize(); final Set<Point> points = new HashSet<>(); final Set<Point> expectedWhitePoints = new HashSet<>(); points.add(new Point(2, 3)); // Should result in a write to (0, 3) expectedWhitePoints.add(new Point(0, 3)); points.add(new Point(3, 4)); // Should result in a write to (0, 4) expectedWhitePoints.add(new Point(0, 4)); points.add(new Point(5, 5)); // Should write to (5, 0) and (0, 5) expectedWhitePoints.add(new Point(5, 0)); expectedWhitePoints.add(new Point(0, 5)); points.add(new Point(6, 5)); // Should result in a write to (6, 0) expectedWhitePoints.add(new Point(6, 0)); underTest.writeToNearestEdge(points, Color.WHITE); final Set<Point> hardcodedPoints = new HashSet<>(); hardcodedPoints.add(new Point(0, 0)); hardcodedPoints.add(new Point(1, 0)); hardcodedPoints.add(new Point(0, 1)); // Ensure we updated the right values for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { final Point point = new Point(x, y); if (hardcodedPoints.contains(point)) { assertNotEquals("Equality assertion failed for " + point, Color.BLACK, underTest.getColorForPoint(x, y).get()); assertNotEquals("Equality assertion failed for " + point, Color.WHITE, underTest.getColorForPoint(x, y).get()); } else if (expectedWhitePoints.contains(point)) { assertEquals("Equality assertion failed for " + point, Color.WHITE, underTest.getColorForPoint(x, y).get()); } else { assertEquals("Equality assertion failed for " + point, Color.BLACK, underTest.getColorForPoint(x, y).get()); } } } } @Test public void test_getGridState_shouldGetTheGridState() { final CombinerState mockState = createMock(CombinerState.class); expect(combinerProvider.getCombinerState()).andReturn(mockState).times( 1); replayAll(); final ColorGrid underTest = new ColorGrid(2, 2, combinerProvider); final GridState result = underTest.getGridState(); assertEquals(mockState, result.getCombinerState()); } }
package apoc.mongodb; import apoc.util.Util; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.mongodb.MongoClient; import com.mongodb.MongoClientURI; import com.mongodb.MongoCommandException; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.MongoIterable; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; import org.apache.commons.lang.StringUtils; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonNumber; import org.bson.BsonRegularExpression; import org.bson.BsonTimestamp; import org.bson.Document; import org.bson.types.Binary; import org.bson.types.Code; import org.bson.types.MaxKey; import org.bson.types.MinKey; import org.bson.types.ObjectId; import org.bson.types.Symbol; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static java.lang.String.format; /** * @author mh * @since 30.06.16 */ class MongoDBColl implements MongoDBUtils.Coll { private static final ObjectMapper jsonMapper = new ObjectMapper().enable(DeserializationFeature.USE_LONG_FOR_INTS); public static final String ID = "_id"; private final MongoCollection<Document> collection; private final MongoClient mongoClient; private boolean compatibleValues = false; private boolean doorStop = false; private final MongoDatabase database; private boolean extractReferences = false; private boolean objectIdAsMap = true; // visible for testing public static final String ERROR_MESSAGE = "The connection string must have %s name"; private MongoDBColl(String url, String db, String coll) { MongoClientURI connectionString = new MongoClientURI(url); mongoClient = new MongoClient(connectionString); database = mongoClient.getDatabase(db); collection = database.getCollection(coll); } /** * * @param url * @param db * @param coll * @param compatibleValues if true we convert the document to JSON and than back to a Map */ public MongoDBColl(String url, String db, String coll, boolean compatibleValues, boolean extractReferences, boolean objectIdAsMap) { this(url, db, coll); getConfigs(compatibleValues, extractReferences, objectIdAsMap); } /** * * @param uri the string Uri to convert in connectionString * @see MongoClientURI * @param conf the configuration * @see MongoDbConfig */ public MongoDBColl(String uri, MongoDbConfig conf) { MongoClientURI connectionString = new MongoClientURI(uri); if (connectionString.getDatabase() == null) { throw new RuntimeException(format(ERROR_MESSAGE, "db")); } final String collectionName; final String confCollection = conf.getCollection(); if (StringUtils.isNotBlank(confCollection)) { collectionName = confCollection; } else { final String collectionFromUri = connectionString.getCollection(); if (collectionFromUri == null) { throw new RuntimeException(format(ERROR_MESSAGE, "collection")); } collectionName = collectionFromUri; } mongoClient = new MongoClient(connectionString); database = mongoClient.getDatabase(connectionString.getDatabase()); try { // check if correctly authenticated database.runCommand(new Document("listCollections", 1)); } catch (MongoCommandException e) { mongoClient.close(); throw new RuntimeException(e); } this.collection = database.getCollection(collectionName); // with new procedure we return always Neo4j values getConfigs(true, conf.isExtractReferences(), conf.isObjectIdAsMap()); } private void getConfigs(boolean compatibleValues, boolean extractReferences, boolean objectIdAsMap) { this.compatibleValues = compatibleValues; this.extractReferences = extractReferences; this.objectIdAsMap = objectIdAsMap; } @Override public void close() { if (doorStop) return; mongoClient.close(); } /** * It translates a MongoDB document into a Map where the "_id" field is not an ObjectId * but a simple String representation of it * * @param document * * @return */ private Map<String, Object> documentToPackableMap(Map<String, Object> document) { return (Map<String, Object>) convertAndExtract(document); } public Object convertAndExtract(Object data) { if (data == null) { return null; } if (data instanceof Map) { Map<String, Object> map = (Map<String, Object>) data; return map.entrySet().stream() .map(e -> { Object value; if (ID.equals(e.getKey())) { // avoid circular conversions if (compatibleValues && objectIdAsMap) { try { value = jsonMapper.readValue(jsonMapper.writeValueAsBytes(e.getValue()), Map.class); } catch (Exception exc) { throw new RuntimeException("Cannot convert document to json and back to Map " + exc.getMessage()); } } else { value = e.getValue().toString(); } } else { value = convertAndExtract(e.getValue()); } return new AbstractMap.SimpleEntry(e.getKey(), value); }) .collect(HashMap::new, (m, e)-> m.put(e.getKey(), e.getValue()), HashMap::putAll); // please look at https://bugs.openjdk.java.net/browse/JDK-8148463 } if (data instanceof Collection) { Collection<Object> collection = (Collection<Object>) data; return collection.stream() .map(elem -> convertAndExtract(elem)) .collect(Collectors.toList()); } if (data.getClass().isArray() && !(data.getClass().getComponentType().isPrimitive() || !data.getClass().getComponentType().equals(String.class))) { return Stream.of((Object[]) data) .map(elem -> convertAndExtract(elem)) .collect(Collectors.toList()); } if (compatibleValues) { if (data instanceof Integer) { return ((Integer) data).longValue(); } if (data instanceof BsonInt64 || data instanceof BsonInt32) { return ((BsonNumber) data).longValue(); } if (data instanceof BsonDouble) { return ((BsonDouble) data).doubleValue(); } if (data instanceof Binary) { return ((Binary) data).getData(); } if (data instanceof Float) { return ((Float) data).doubleValue(); } if (data instanceof BsonTimestamp) { return (long) ((BsonTimestamp) data).getTime(); } if (data instanceof MinKey || data instanceof MaxKey) { return data.toString(); } if (data instanceof BsonRegularExpression) { return ((BsonRegularExpression) data).getPattern(); } if (data instanceof Code) { return ((Code) data).getCode(); } if (data instanceof Symbol) { return ((Symbol) data).getSymbol(); } } if (data instanceof Date) { // temporal types don't work with ValueUtils.of return LocalDateTime.ofInstant(((Date) data).toInstant(), ZoneId.systemDefault()); } if (data instanceof ObjectId) { return extractReferences ? extractReference((ObjectId) data) : data.toString(); } return data; } private Object extractReference(ObjectId objectId) { return StreamSupport.stream(database.listCollectionNames().spliterator(), false) .map(collectionName -> database.getCollection(collectionName)) .map(collection -> collection.find(new Document(ID, objectId)).first()) .filter(result -> result != null && !result.isEmpty()) .findFirst() .map(this::documentToPackableMap) .orElse(null); } @Override public Map<String, Object> first(Map<String, Object> query) { return documentToPackableMap(collection.find(new Document(query)).first()); } @Override public Stream<Map<String, Object>> all(Map<String, Object> query, Long skip, Long limit) { FindIterable<Document> documents = query == null ? collection.find() : collection.find(new Document(query)); if (skip != 0) documents = documents.skip(skip.intValue()); if (limit != 0) documents = documents.limit(limit.intValue()); return asStream(documents); } @Override public long count(Map<String, Object> query) { return query == null ? collection.count() : collection.count(new Document(query)); } @Override public long count(Document query) { return collection.count(query); } @Override public Stream<Map<String, Object>> aggregate(List<Document> pipeline) { return asStream(collection.aggregate(pipeline)); } @Override public Stream<Map<String, Object>> find(Map<String, Object> query, Map<String, Object> project, Map<String, Object> sort, Long skip, Long limit) { FindIterable<Document> documents = query == null ? collection.find() : collection.find(new Document(query)); if (project != null) documents = documents.projection(new Document(project)); if (sort != null) documents = documents.sort(new Document(sort)); if (skip != 0) documents = documents.skip(skip.intValue()); if (limit != 0) documents = documents.limit(limit.intValue()); return asStream(documents); } @Override public Stream<Map<String, Object>> find(Document query, Document project, Document sort, int skip, int limit) { FindIterable<Document> documents = collection.find(query) .projection(project).sort(sort) .skip(skip).limit(limit); return asStream(documents); } private FindIterable<Document> getDocuments(Map<String, Object> query) { return query == null ? collection.find() : collection.find(new Document(query)); } private Stream<Map<String, Object>> asStream(MongoIterable<Document> result) { this.doorStop = true; Iterable<Document> it = () -> result.iterator(); return StreamSupport .stream(it.spliterator(), false) .map(doc -> this.documentToPackableMap(doc)) .onClose(() -> { Util.close(result.iterator()); Util.close(mongoClient); }); } @Override public void insert(List<Map<String, Object>> docs) { for (Map<String, Object> doc : docs) { collection.insertOne(new Document(doc)); } } @Override public void insertDocs(List<Document> documents) { collection.insertMany(documents); } @Override public long update(Map<String, Object> query, Map<String, Object> update) { return update(new Document(query), new Document(update)); } @Override public long update(Document query, Document update) { UpdateResult updateResult = collection.updateMany(query, update); return updateResult.getModifiedCount(); } @Override public long delete(Map<String, Object> query) { return delete(new Document(query)); } @Override public long delete(Document query) { DeleteResult result = collection.deleteMany(query); return result.getDeletedCount(); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.adapter.servlet; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.graphene.page.Page; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.Assert; import org.junit.Test; import org.keycloak.admin.client.resource.ClientResource; import org.keycloak.admin.client.resource.ProtocolMappersResource; import org.keycloak.protocol.saml.mappers.AttributeStatementHelper; import org.keycloak.protocol.saml.mappers.RoleListMapper; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.ProtocolMapperRepresentation; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.saml.BaseSAML2BindingBuilder; import org.keycloak.saml.SAML2ErrorResponseBuilder; import org.keycloak.saml.common.constants.JBossSAMLURIConstants; import org.keycloak.testsuite.adapter.AbstractServletsAdapterTest; import org.keycloak.testsuite.adapter.page.BadAssertionSalesPostSig; import org.keycloak.testsuite.adapter.page.BadClientSalesPostSigServlet; import org.keycloak.testsuite.adapter.page.BadRealmSalesPostSigServlet; import org.keycloak.testsuite.adapter.page.Employee2Servlet; import org.keycloak.testsuite.adapter.page.EmployeeServlet; import org.keycloak.testsuite.adapter.page.EmployeeSigFrontServlet; import org.keycloak.testsuite.adapter.page.EmployeeSigServlet; import org.keycloak.testsuite.adapter.page.InputPortal; import org.keycloak.testsuite.adapter.page.MissingAssertionSig; import org.keycloak.testsuite.adapter.page.SAMLServlet; import org.keycloak.testsuite.adapter.page.SalesMetadataServlet; import org.keycloak.testsuite.adapter.page.SalesPost2Servlet; import org.keycloak.testsuite.adapter.page.SalesPostAssertionAndResponseSig; import org.keycloak.testsuite.adapter.page.SalesPostEncServlet; import org.keycloak.testsuite.adapter.page.SalesPostPassiveServlet; import org.keycloak.testsuite.adapter.page.SalesPostServlet; import org.keycloak.testsuite.adapter.page.SalesPostSigEmailServlet; import org.keycloak.testsuite.adapter.page.SalesPostSigPersistentServlet; import org.keycloak.testsuite.adapter.page.SalesPostSigServlet; import org.keycloak.testsuite.adapter.page.SalesPostSigTransientServlet; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.auth.page.login.Login; import org.keycloak.testsuite.auth.page.login.SAMLIDPInitiatedLogin; import org.keycloak.testsuite.page.AbstractPage; import org.keycloak.testsuite.util.IOUtil; import org.openqa.selenium.By; import org.w3c.dom.Document; import org.xml.sax.SAXException; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Form; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.xml.XMLConstants; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URI; import java.net.URL; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.keycloak.testsuite.auth.page.AuthRealm.SAMLSERVLETDEMO; import static org.keycloak.testsuite.util.IOUtil.loadRealm; import static org.keycloak.testsuite.util.IOUtil.loadXML; import static org.keycloak.testsuite.util.IOUtil.modifyDocElementAttribute; import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith; import static org.keycloak.testsuite.util.WaitUtils.waitUntilElement; /** * @author mhajas */ public abstract class AbstractSAMLServletsAdapterTest extends AbstractServletsAdapterTest { @Page protected BadClientSalesPostSigServlet badClientSalesPostSigServletPage; @Page protected BadRealmSalesPostSigServlet badRealmSalesPostSigServletPage; @Page protected Employee2Servlet employee2ServletPage; @Page protected EmployeeSigServlet employeeSigServletPage; @Page protected EmployeeSigFrontServlet employeeSigFrontServletPage; @Page protected SalesMetadataServlet salesMetadataServletPage; @Page protected SalesPostServlet salesPostServletPage; @Page private SalesPost2Servlet salesPost2ServletPage; @Page protected SalesPostEncServlet salesPostEncServletPage; @Page protected SalesPostPassiveServlet salesPostPassiveServletPage; @Page protected SalesPostSigServlet salesPostSigServletPage; @Page protected SalesPostSigEmailServlet salesPostSigEmailServletPage; @Page protected SalesPostSigPersistentServlet salesPostSigPersistentServletPage; @Page protected SalesPostSigTransientServlet salesPostSigTransientServletPage; @Page protected SAMLIDPInitiatedLogin samlidpInitiatedLogin; protected boolean forbiddenIfNotAuthenticated = true; @Page protected SalesPostAssertionAndResponseSig salesPostAssertionAndResponseSigPage; @Page protected BadAssertionSalesPostSig badAssertionSalesPostSigPage; @Page protected MissingAssertionSig missingAssertionSigPage; @Page protected EmployeeServlet employeeServletPage; @Page private InputPortal inputPortalPage; @Page private SAMLIDPInitiatedLogin samlidpInitiatedLoginPage; public static final String FORBIDDEN_TEXT = "HTTP status code: 403"; @Deployment(name = BadClientSalesPostSigServlet.DEPLOYMENT_NAME) protected static WebArchive badClientSalesPostSig() { return samlServletDeployment(BadClientSalesPostSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = BadRealmSalesPostSigServlet.DEPLOYMENT_NAME) protected static WebArchive badRealmSalesPostSig() { return samlServletDeployment(BadRealmSalesPostSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = Employee2Servlet.DEPLOYMENT_NAME) protected static WebArchive employee2() { return samlServletDeployment(Employee2Servlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = EmployeeSigServlet.DEPLOYMENT_NAME) protected static WebArchive employeeSig() { return samlServletDeployment(EmployeeSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = EmployeeSigFrontServlet.DEPLOYMENT_NAME) protected static WebArchive employeeSigFront() { return samlServletDeployment(EmployeeSigFrontServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesMetadataServlet.DEPLOYMENT_NAME) protected static WebArchive salesMetadata() { return samlServletDeployment(SalesMetadataServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostServlet.DEPLOYMENT_NAME) protected static WebArchive salesPost() { return samlServletDeployment(SalesPostServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostEncServlet.DEPLOYMENT_NAME) protected static WebArchive salesPostEnc() { return samlServletDeployment(SalesPostEncServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostPassiveServlet.DEPLOYMENT_NAME) protected static WebArchive salesPostPassive() { return samlServletDeployment(SalesPostPassiveServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostSigServlet.DEPLOYMENT_NAME) protected static WebArchive salesPostSig() { return samlServletDeployment(SalesPostSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostSigEmailServlet.DEPLOYMENT_NAME) protected static WebArchive salesPostSigEmail() { return samlServletDeployment(SalesPostSigEmailServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostSigPersistentServlet.DEPLOYMENT_NAME) protected static WebArchive salesPostSigPersistent() { return samlServletDeployment(SalesPostSigPersistentServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostSigTransientServlet.DEPLOYMENT_NAME) protected static WebArchive salesPostSigTransient() { return samlServletDeployment(SalesPostSigTransientServlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = InputPortal.DEPLOYMENT_NAME) protected static WebArchive inputPortal() { return samlServletDeployment(InputPortal.DEPLOYMENT_NAME, "input-portal/WEB-INF/web.xml" , InputServlet.class); } @Deployment(name = SalesPost2Servlet.DEPLOYMENT_NAME) protected static WebArchive salesPost2() { return samlServletDeployment(SalesPost2Servlet.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = SalesPostAssertionAndResponseSig.DEPLOYMENT_NAME) protected static WebArchive salesPostAssertionAndResponseSig() { return samlServletDeployment(SalesPostAssertionAndResponseSig.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = BadAssertionSalesPostSig.DEPLOYMENT_NAME) protected static WebArchive badAssertionSalesPostSig() { return samlServletDeployment(BadAssertionSalesPostSig.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = MissingAssertionSig.DEPLOYMENT_NAME) protected static WebArchive missingAssertionSig() { return samlServletDeployment(MissingAssertionSig.DEPLOYMENT_NAME, SendUsernameServlet.class); } @Deployment(name = EmployeeServlet.DEPLOYMENT_NAME) protected static WebArchive employeeServlet() { return samlServletDeployment(EmployeeServlet.DEPLOYMENT_NAME, "employee/WEB-INF/web.xml", SamlSPFacade.class); } @Override public void addAdapterTestRealms(List<RealmRepresentation> testRealms) { testRealms.add(loadRealm("/adapter-test/keycloak-saml/testsaml.json")); } @Override public void setDefaultPageUriParameters() { super.setDefaultPageUriParameters(); testRealmPage.setAuthRealm(SAMLSERVLETDEMO); testRealmSAMLRedirectLoginPage.setAuthRealm(SAMLSERVLETDEMO); testRealmSAMLPostLoginPage.setAuthRealm(SAMLSERVLETDEMO); } private void assertForbidden(AbstractPage page, String expectedNotContains) { page.navigateTo(); waitUntilElement(By.xpath("//body")).text().not().contains(expectedNotContains); assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT)); } private void assertSuccessfullyLoggedIn(AbstractPage page, String expectedText) { page.navigateTo(); waitUntilElement(By.xpath("//body")).text().contains(expectedText); } private void assertForbiddenLogin(AbstractPage page, String username, String password, Login loginPage, String expectedNotContains) { page.navigateTo(); assertCurrentUrlStartsWith(loginPage); loginPage.form().login(username, password); waitUntilElement(By.xpath("//body")).text().not().contains(expectedNotContains); //Different 403 status page on EAP and Wildfly assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT)); } private void assertSuccessfulLogin(AbstractPage page, UserRepresentation user, Login loginPage, String expectedString) { page.navigateTo(); assertCurrentUrlStartsWith(loginPage); loginPage.form().login(user); waitUntilElement(By.xpath("//body")).text().contains(expectedString); } private void testSuccessfulAndUnauthorizedLogin(SAMLServlet page, Login loginPage) { testSuccessfulAndUnauthorizedLogin(page, loginPage, "principal=bburke"); } private void testSuccessfulAndUnauthorizedLogin(SAMLServlet page, Login loginPage, String expectedText) { testSuccessfulAndUnauthorizedLogin(page, loginPage, expectedText, "principal="); } private void testSuccessfulAndUnauthorizedLogin(SAMLServlet page, Login loginPage, String expectedText, String expectedNotContains) { assertSuccessfulLogin(page, bburkeUser, loginPage, expectedText); page.logout(); checkLoggedOut(page, loginPage); assertForbiddenLogin(page, "unauthorized", "password", loginPage, expectedNotContains); page.logout(); checkLoggedOut(page, loginPage); } private void checkLoggedOut(AbstractPage page, Login loginPage) { page.navigateTo(); waitUntilElement(By.xpath("//body")).is().present(); assertCurrentUrlStartsWith(loginPage); } @Test public void disabledClientTest() { ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "http://localhost:8081/sales-post-sig/"); ClientRepresentation client = clientResource.toRepresentation(); client.setEnabled(false); clientResource.update(client); salesPostSigServletPage.navigateTo(); waitUntilElement(By.xpath("//body")).text().contains("Login requester not enabled"); client.setEnabled(true); clientResource.update(client); } @Test public void unauthorizedSSOTest() { assertForbiddenLogin(salesPostServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal="); assertForbidden(employee2ServletPage, "principal="); assertForbidden(employeeSigFrontServletPage, "principal="); assertForbidden(salesPostSigPersistentServletPage, "principal="); salesPostServletPage.logout(); checkLoggedOut(salesPostServletPage, testRealmSAMLPostLoginPage); } @Test public void singleLoginAndLogoutSAMLTest() { assertSuccessfulLogin(salesPostServletPage, bburkeUser, testRealmSAMLPostLoginPage, "principal=bburke"); assertSuccessfullyLoggedIn(salesPostSigServletPage, "principal=bburke"); assertSuccessfullyLoggedIn(employee2ServletPage, "principal=bburke"); assertSuccessfullyLoggedIn(salesPostEncServletPage, "principal=bburke"); employeeSigFrontServletPage.logout(); checkLoggedOut(employeeSigFrontServletPage, testRealmSAMLRedirectLoginPage); checkLoggedOut(employeeSigServletPage, testRealmSAMLRedirectLoginPage); salesPostPassiveServletPage.navigateTo(); if (forbiddenIfNotAuthenticated) { assertOnForbiddenPage(); } else { waitUntilElement(By.xpath("//body")).text().contains("principal=null"); } checkLoggedOut(salesPostSigEmailServletPage, testRealmSAMLPostLoginPage); } @Test public void badClientSalesPostSigTest() { badClientSalesPostSigServletPage.navigateTo(); waitUntilElement(By.xpath("//body")).text().contains("Invalid requester"); } @Test public void badRealmSalesPostSigTest() { badRealmSalesPostSigServletPage.navigateTo(); testRealmSAMLRedirectLoginPage.form().login(bburkeUser); waitUntilElement(By.xpath("//body")).text().not().contains("principal="); //Different 403 status page on EAP and Wildfly assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT)); } @Test public void employee2Test() { testSuccessfulAndUnauthorizedLogin(employee2ServletPage, testRealmSAMLPostLoginPage); } @Test public void employeeSigTest() { testSuccessfulAndUnauthorizedLogin(employeeSigServletPage, testRealmSAMLRedirectLoginPage); } @Test public void employeeSigFrontTest() { testSuccessfulAndUnauthorizedLogin(employeeSigFrontServletPage, testRealmSAMLRedirectLoginPage); } @Test public void salesMetadataTest() throws Exception { Document doc = loadXML(AbstractSAMLServletsAdapterTest.class.getResourceAsStream("/adapter-test/keycloak-saml/sp-metadata.xml")); modifyDocElementAttribute(doc, "SingleLogoutService", "Location", "8080", System.getProperty("app.server.http.port", null)); modifyDocElementAttribute(doc, "AssertionConsumerService", "Location", "8080", System.getProperty("app.server.http.port", null)); ClientRepresentation clientRep = testRealmResource().convertClientDescription(IOUtil.documentToString(doc)); String appServerUrl; if (Boolean.parseBoolean(System.getProperty("app.server.ssl.required"))) { appServerUrl = "https://localhost:" + System.getProperty("app.server.https.port", "8543") + "/"; } else { appServerUrl = "http://localhost:" + System.getProperty("app.server.http.port", "8280") + "/"; } clientRep.setAdminUrl(appServerUrl + "sales-metadata/saml"); Response response = testRealmResource().clients().create(clientRep); assertEquals(201, response.getStatus()); response.close(); testSuccessfulAndUnauthorizedLogin(salesMetadataServletPage, testRealmSAMLPostLoginPage); } @Test public void salesPostTest() { testSuccessfulAndUnauthorizedLogin(salesPostServletPage, testRealmSAMLPostLoginPage); } @Test public void salesPostEncTest() { testSuccessfulAndUnauthorizedLogin(salesPostEncServletPage, testRealmSAMLPostLoginPage); } @Test public void salesPostPassiveTest() { salesPostPassiveServletPage.navigateTo(); if (forbiddenIfNotAuthenticated) { assertOnForbiddenPage(); } else { waitUntilElement(By.xpath("//body")).text().contains("principal=null"); } assertSuccessfulLogin(salesPostServletPage, bburkeUser, testRealmSAMLPostLoginPage, "principal=bburke"); assertSuccessfullyLoggedIn(salesPostPassiveServletPage, "principal=bburke"); salesPostPassiveServletPage.logout(); salesPostPassiveServletPage.navigateTo(); if (forbiddenIfNotAuthenticated) { assertOnForbiddenPage(); } else { waitUntilElement(By.xpath("//body")).text().contains("principal=null"); } assertForbiddenLogin(salesPostServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal="); assertForbidden(salesPostPassiveServletPage, "principal="); salesPostPassiveServletPage.logout(); } @Test public void salesPostSigTest() { testSuccessfulAndUnauthorizedLogin(salesPostSigServletPage, testRealmSAMLPostLoginPage); } @Test public void salesPostSigEmailTest() { testSuccessfulAndUnauthorizedLogin(salesPostSigEmailServletPage, testRealmSAMLPostLoginPage, "principal=bburke@redhat.com"); } @Test public void salesPostSigPersistentTest() { salesPostSigPersistentServletPage.navigateTo(); testRealmSAMLPostLoginPage.form().login(bburkeUser); waitUntilElement(By.xpath("//body")).text().not().contains("bburke"); waitUntilElement(By.xpath("//body")).text().contains("principal=G-"); salesPostSigPersistentServletPage.logout(); checkLoggedOut(salesPostSigPersistentServletPage, testRealmSAMLPostLoginPage); assertForbiddenLogin(salesPostSigPersistentServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal="); salesPostSigPersistentServletPage.logout(); checkLoggedOut(salesPostSigPersistentServletPage, testRealmSAMLPostLoginPage); } @Test public void salesPostSigTransientTest() { salesPostSigTransientServletPage.navigateTo(); testRealmSAMLPostLoginPage.form().login(bburkeUser); waitUntilElement(By.xpath("//body")).text().not().contains("bburke"); waitUntilElement(By.xpath("//body")).text().contains("principal=G-"); salesPostSigTransientServletPage.logout(); checkLoggedOut(salesPostSigTransientServletPage, testRealmSAMLPostLoginPage); assertForbiddenLogin(salesPostSigTransientServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal="); salesPostSigTransientServletPage.logout(); checkLoggedOut(salesPostSigTransientServletPage, testRealmSAMLPostLoginPage); } @Test public void idpInitiatedLogin() { samlidpInitiatedLoginPage.setAuthRealm(SAMLSERVLETDEMO); samlidpInitiatedLoginPage.setUrlName("employee2"); samlidpInitiatedLoginPage.navigateTo(); samlidpInitiatedLoginPage.form().login(bburkeUser); waitUntilElement(By.xpath("//body")).text().contains("principal=bburke"); assertSuccessfullyLoggedIn(salesPostSigServletPage, "principal=bburke"); employee2ServletPage.logout(); checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage); } @Test public void idpInitiatedUnauthorizedLoginTest() { samlidpInitiatedLoginPage.setAuthRealm(SAMLSERVLETDEMO); samlidpInitiatedLoginPage.setUrlName("employee2"); samlidpInitiatedLoginPage.navigateTo(); samlidpInitiatedLoginPage.form().login("unauthorized", "password"); waitUntilElement(By.xpath("//body")).text().not().contains("bburke"); assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT)); assertForbidden(employee2ServletPage, "principal="); employee2ServletPage.logout(); checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage); } @Test public void testSavedPostRequest() { inputPortalPage.navigateTo(); assertCurrentUrlStartsWith(inputPortalPage); inputPortalPage.execute("hello"); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmLoginPage.form().login("bburke@redhat.com", "password"); Assert.assertEquals(driver.getCurrentUrl(), inputPortalPage + "/secured/post"); waitUntilElement(By.xpath("//body")).text().contains("parameter=hello"); // test that user principal and KeycloakSecurityContext available driver.navigate().to(inputPortalPage + "/insecure"); waitUntilElement(By.xpath("//body")).text().contains("Insecure Page"); if (System.getProperty("insecure.user.principal.unsupported") == null) waitUntilElement(By.xpath("//body")).text().contains("UserPrincipal"); // test logout inputPortalPage.logout(); // test unsecured POST KEYCLOAK-901 Client client = ClientBuilder.newClient(); Form form = new Form(); form.param("parameter", "hello"); String text = client.target(inputPortalPage + "/unsecured").request().post(Entity.form(form), String.class); Assert.assertTrue(text.contains("parameter=hello")); client.close(); } @Test public void testPostSimpleLoginLogoutIdpInitiatedRedirectTo() { samlidpInitiatedLoginPage.setAuthRealm(SAMLSERVLETDEMO); samlidpInitiatedLoginPage.setUrlName("sales-post2"); samlidpInitiatedLoginPage.navigateTo(); samlidpInitiatedLoginPage.form().login(bburkeUser); assertCurrentUrlStartsWith(salesPost2ServletPage); assertTrue(driver.getCurrentUrl().endsWith("/foo")); waitUntilElement(By.xpath("//body")).text().contains("principal=bburke"); salesPost2ServletPage.logout(); checkLoggedOut(salesPost2ServletPage, testRealmSAMLPostLoginPage); } @Test public void salesPostAssertionAndResponseSigTest() { testSuccessfulAndUnauthorizedLogin(salesPostAssertionAndResponseSigPage, testRealmSAMLPostLoginPage); } @Test public void testPostBadAssertionSignature() { badAssertionSalesPostSigPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login("bburke", "password"); waitUntilElement(By.xpath("//body")).text().contains("Error info: SamlAuthenticationError [reason=INVALID_SIGNATURE, status=null]"); assertEquals(driver.getCurrentUrl(), badAssertionSalesPostSigPage + "/saml"); } @Test public void testMissingAssertionSignature() { missingAssertionSigPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login("bburke", "password"); waitUntilElement(By.xpath("//body")).text().contains("Error info: SamlAuthenticationError [reason=INVALID_SIGNATURE, status=null]"); assertEquals(driver.getCurrentUrl(), missingAssertionSigPage + "/saml"); } @Test public void testErrorHandling() throws Exception { Client client = ClientBuilder.newClient(); // make sure Response response = client.target(employeeSigServletPage.toString()).request().get(); response.close(); SAML2ErrorResponseBuilder builder = new SAML2ErrorResponseBuilder() .destination(employeeSigServletPage.toString() + "/saml") .issuer("http://localhost:" + System.getProperty("auth.server.http.port", "8180") + "/realms/demo") .status(JBossSAMLURIConstants.STATUS_REQUEST_DENIED.get()); BaseSAML2BindingBuilder binding = new BaseSAML2BindingBuilder() .relayState(null); Document document = builder.buildDocument(); URI uri = binding.redirectBinding(document).generateURI(employeeSigServletPage.toString() + "/saml", false); response = client.target(uri).request().get(); String errorPage = response.readEntity(String.class); response.close(); Assert.assertTrue(errorPage.contains("Error info: SamlAuthenticationError [reason=ERROR_STATUS")); Assert.assertFalse(errorPage.contains("status=null")); client.close(); } @Test public void testRelayStateEncoding() throws Exception { // this test has a hardcoded SAMLRequest and we hack a SP face servlet to get the SAMLResponse so we can look // at the relay state employeeServletPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login("bburke", "password"); assertCurrentUrlStartsWith(employeeServletPage); waitUntilElement(By.xpath("//body")).text().contains("Relay state: " + SamlSPFacade.RELAY_STATE); waitUntilElement(By.xpath("//body")).text().not().contains("SAML response: null"); } @Test public void testAttributes() throws Exception { ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "http://localhost:8081/employee2/"); ProtocolMappersResource protocolMappersResource = clientResource.getProtocolMappers(); Map<String, String> config = new LinkedHashMap<>(); config.put("attribute.nameformat", "Basic"); config.put("user.attribute", "topAttribute"); config.put("attribute.name", "topAttribute"); createProtocolMapper(protocolMappersResource, "topAttribute", "saml", "saml-user-attribute-mapper", config); config = new LinkedHashMap<>(); config.put("attribute.nameformat", "Basic"); config.put("user.attribute", "level2Attribute"); config.put("attribute.name", "level2Attribute"); createProtocolMapper(protocolMappersResource, "level2Attribute", "saml", "saml-user-attribute-mapper", config); config = new LinkedHashMap<>(); config.put("attribute.nameformat", "Basic"); config.put("single", "true"); config.put("attribute.name", "group"); createProtocolMapper(protocolMappersResource, "groups", "saml", "saml-group-membership-mapper", config); setRolesToCheck("manager,user"); employee2ServletPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login("level2GroupUser", "password"); driver.navigate().to(employee2ServletPage.toString() + "/getAttributes"); waitUntilElement(By.xpath("//body")).text().contains("topAttribute: true"); waitUntilElement(By.xpath("//body")).text().contains("level2Attribute: true"); waitUntilElement(By.xpath("//body")).text().contains("attribute email: level2@redhat.com"); waitUntilElement(By.xpath("//body")).text().not().contains("group: []"); waitUntilElement(By.xpath("//body")).text().not().contains("group: null"); waitUntilElement(By.xpath("//body")).text().contains("group: [level2]"); employee2ServletPage.logout(); checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage); setRolesToCheck("manager,employee,user"); employee2ServletPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login(bburkeUser); driver.navigate().to(employee2ServletPage.toString() + "/getAttributes"); waitUntilElement(By.xpath("//body")).text().contains("attribute email: bburke@redhat.com"); waitUntilElement(By.xpath("//body")).text().contains("friendlyAttribute email: bburke@redhat.com"); waitUntilElement(By.xpath("//body")).text().contains("phone: 617"); waitUntilElement(By.xpath("//body")).text().contains("friendlyAttribute phone: null"); employee2ServletPage.logout(); checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage); config = new LinkedHashMap<>(); config.put("attribute.value", "hard"); config.put("attribute.nameformat", "Basic"); config.put("attribute.name", "hardcoded-attribute"); createProtocolMapper(protocolMappersResource, "hardcoded-attribute", "saml", "saml-hardcode-attribute-mapper", config); config = new LinkedHashMap<>(); config.put("role", "hardcoded-role"); createProtocolMapper(protocolMappersResource, "hardcoded-role", "saml", "saml-hardcode-role-mapper", config); config = new LinkedHashMap<>(); config.put("new.role.name", "pee-on"); config.put("role", "http://localhost:8081/employee/.employee"); createProtocolMapper(protocolMappersResource, "renamed-employee-role", "saml", "saml-role-name-mapper", config); for (ProtocolMapperRepresentation mapper : clientResource.toRepresentation().getProtocolMappers()) { if (mapper.getName().equals("role-list")) { protocolMappersResource.delete(mapper.getId()); mapper.setId(null); mapper.getConfig().put(RoleListMapper.SINGLE_ROLE_ATTRIBUTE, "true"); mapper.getConfig().put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, "memberOf"); protocolMappersResource.createMapper(mapper); } } setRolesToCheck("pee-on,el-jefe,manager,hardcoded-role"); config = new LinkedHashMap<>(); config.put("new.role.name", "el-jefe"); config.put("role", "user"); createProtocolMapper(protocolMappersResource, "renamed-role", "saml", "saml-role-name-mapper", config); employee2ServletPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login(bburkeUser); driver.navigate().to(employee2ServletPage.toString() + "/getAttributes"); waitUntilElement(By.xpath("//body")).text().contains("hardcoded-attribute: hard"); employee2ServletPage.checkRolesEndPoint(false); employee2ServletPage.logout(); checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage); } @Test public void idpMetadataValidation() throws Exception { driver.navigate().to(authServerPage.toString() + "/realms/" + SAMLSERVLETDEMO + "/protocol/saml/descriptor"); validateXMLWithSchema(driver.getPageSource(), "/adapter-test/keycloak-saml/metadata-schema/saml-schema-metadata-2.0.xsd"); } @Test public void spMetadataValidation() throws Exception { ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "http://localhost:8081/sales-post-sig/"); ClientRepresentation representation = clientResource.toRepresentation(); Client client = ClientBuilder.newClient(); WebTarget target = client.target(authServerPage.toString() + "/admin/realms/" + SAMLSERVLETDEMO + "/clients/" + representation.getId() + "/installation/providers/saml-sp-descriptor"); Response response = target.request().header(HttpHeaders.AUTHORIZATION, "Bearer " + adminClient.tokenManager().getAccessToken().getToken()).get(); validateXMLWithSchema(response.readEntity(String.class), "/adapter-test/keycloak-saml/metadata-schema/saml-schema-metadata-2.0.xsd"); response.close(); } private void validateXMLWithSchema(String xml, String schemaFileName) throws SAXException, IOException { URL schemaFile = getClass().getResource(schemaFileName); Source xmlFile = new StreamSource(new ByteArrayInputStream(xml.getBytes()), xml); SchemaFactory schemaFactory = SchemaFactory .newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Schema schema = schemaFactory.newSchema(schemaFile); Validator validator = schema.newValidator(); try { validator.validate(xmlFile); System.out.println(xmlFile.getSystemId() + " is valid"); } catch (SAXException e) { System.out.println(xmlFile.getSystemId() + " is NOT valid"); System.out.println("Reason: " + e.getLocalizedMessage()); Assert.fail(); } } private void createProtocolMapper(ProtocolMappersResource resource, String name, String protocol, String protocolMapper, Map<String, String> config) { ProtocolMapperRepresentation representation = new ProtocolMapperRepresentation(); representation.setName(name); representation.setProtocol(protocol); representation.setProtocolMapper(protocolMapper); representation.setConfig(config); resource.createMapper(representation); } private void setRolesToCheck(String roles) { employee2ServletPage.navigateTo(); assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage); testRealmSAMLPostLoginPage.form().login(bburkeUser); driver.navigate().to(employee2ServletPage.toString() + "/setCheckRoles?roles=" + roles); employee2ServletPage.logout(); } private void assertOnForbiddenPage() { switch (System.getProperty("app.server")) { case "eap6": waitUntilElement(By.xpath("//body")).text().not().contains("principal="); String source = driver.getPageSource(); assertTrue(source.isEmpty() || source.contains("<body></body>")); break; default: waitUntilElement(By.xpath("//body")).text().contains(FORBIDDEN_TEXT); } } }
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.ant; import org.apache.tools.ant.*; import org.apache.tools.ant.types.*; import proguard.*; import proguard.util.ListUtil; import java.io.File; /** * This FileSet represents a class path entry (or a set of class path entries) * in Ant. * * @author Eric Lafortune */ public class ClassPathElement extends Path { private String filter; private String apkFilter; private String jarFilter; private String aarFilter; private String warFilter; private String earFilter; private String zipFilter; /** * @see Path#Path(Project) */ public ClassPathElement(Project project) { super(project); } /** * Adds the contents of this class path element to the given class path. * @param classPath the class path to be extended. * @param output specifies whether this is an output entry or not. */ public void appendClassPathEntriesTo(ClassPath classPath, boolean output) { File baseDir = getProject().getBaseDir(); String[] fileNames; if (isReference()) { // Get the referenced path or file set. Object referencedObject = getCheckedRef(DataType.class, DataType.class.getName()); if (referencedObject instanceof Path) { Path path = (Path)referencedObject; // Get the names of the files in the referenced path. fileNames = path.list(); } else if (referencedObject instanceof AbstractFileSet) { AbstractFileSet fileSet = (AbstractFileSet)referencedObject; // Get the names of the existing input files in the referenced file set. DirectoryScanner scanner = fileSet.getDirectoryScanner(getProject()); baseDir = scanner.getBasedir(); fileNames = scanner.getIncludedFiles(); } else { throw new BuildException("The refid attribute doesn't point to a <path> element or a <fileset> element"); } } else { // Get the names of the files in this path. fileNames = list(); } if (output) { if (fileNames.length != 1) { throw new BuildException("The <outjar> element must specify exactly one file or directory ["+fileNames.length+"]"); } } //else //{ // if (fileNames.length < 1) // { // throw new BuildException("The <injar> element must specify at least one file or directory"); // } //} for (int index = 0; index < fileNames.length; index++) { // Create a new class path entry, with the proper file name and // any filters. String fileName = fileNames[index]; File file = new File(fileName); ClassPathEntry entry = new ClassPathEntry(file.isAbsolute() ? file : new File(baseDir, fileName), output); entry.setFilter(ListUtil.commaSeparatedList(filter)); entry.setApkFilter(ListUtil.commaSeparatedList(apkFilter)); entry.setJarFilter(ListUtil.commaSeparatedList(jarFilter)); entry.setAarFilter(ListUtil.commaSeparatedList(aarFilter)); entry.setWarFilter(ListUtil.commaSeparatedList(warFilter)); entry.setEarFilter(ListUtil.commaSeparatedList(earFilter)); entry.setZipFilter(ListUtil.commaSeparatedList(zipFilter)); // Add it to the class path. classPath.add(entry); } } // Ant task attributes. /** * @deprecated Use {@link #setLocation(File)} instead. */ public void setFile(File file) { setLocation(file); } /** * @deprecated Use {@link #setLocation(File)} instead. */ public void setDir(File file) { setLocation(file); } /** * @deprecated Use {@link #setLocation(File)} instead. */ public void setName(File file) { setLocation(file); } public void setFilter(String filter) { this.filter = filter; } public void setApkfilter(String apkFilter) { this.apkFilter = apkFilter; } public void setJarfilter(String jarFilter) { this.jarFilter = jarFilter; } public void setAarfilter(String aarFilter) { this.aarFilter = aarFilter; } public void setWarfilter(String warFilter) { this.warFilter = warFilter; } public void setEarfilter(String earFilter) { this.earFilter = earFilter; } public void setZipfilter(String zipFilter) { this.zipFilter = zipFilter; } }
/* ***** BEGIN LICENSE BLOCK ***** * JTransforms * Copyright (c) 2007 onward, Piotr Wendykier * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ package org.jtransforms.fft; // @formatter:off import pl.edu.icm.jlargearrays.DoubleLargeArray; import pl.edu.icm.jlargearrays.FloatLargeArray; /** * * This is a set of utility methods for R/W access to data resulting from a call * to the Fourier transform of <em>real</em> data. Memory optimized methods, * namely * <ul> * <li>{@link DoubleFFT_2D#realForward(double[])}</li> * <li>{@link DoubleFFT_2D#realForward(DoubleLargeArray)}</li> * <li>{@link DoubleFFT_2D#realForward(double[][])}</li> * <li>{@link FloatFFT_2D#realForward(float[])}</li> * <li>{@link FloatFFT_2D#realForward(FloatLargeArray)}</li> * <li>{@link FloatFFT_2D#realForward(float[][])}</li> * </ul> * are implemented to handle this case specifically. However, packing of the * data in the data array is somewhat obscure. This class provides methods for * direct access to the data, without the burden of all necessary tests. * <h3>Example for Fourier Transform of real, double precision 1d data</h3> * * <pre> * DoubleFFT_2D fft = new DoubleFFT_2D(rows, columns); * double[] data = new double[2 * rows * columns]; * ... * fft.realForwardFull(data); * data[r1 * 2 * columns + c1] = val1; * val2 = data[r2 * 2 * columns + c2]; * </pre> * is equivalent to * <pre> * DoubleFFT_2D fft = new DoubleFFT_2D(rows, columns); * RealFFTUtils_2D unpacker = new RealFFTUtils_2D(rows, columns); * double[] data = new double[rows * columns]; * ... * fft.realForward(data); * unpacker.pack(val1, r1, c1, data); * val2 = unpacker.unpack(r2, c2, data, 0); * </pre> * Even (resp. odd) values of <code>c</code> correspond to the real (resp. * imaginary) part of the Fourier mode. * <h3>Example for Fourier Transform of real, double precision 2d data</h3> * * <pre> * DoubleFFT_2D fft = new DoubleFFT_2D(rows, columns); * double[][] data = new double[rows][2 * columns]; * ... * fft.realForwardFull(data); * data[r1][c1] = val1; * val2 = data[r2][c2]; * </pre> * is equivalent to * <pre> * DoubleFFT_2D fft = new DoubleFFT_2D(rows, columns); * RealFFTUtils_2D unpacker = new RealFFTUtils_2D(rows, columns); * double[][] data = new double[rows][columns]; * ... * fft.realForward(data); * unpacker.pack(val1, r1, c1, data); * val2 = unpacker.unpack(r2, c2, data, 0); * </pre> * Even (resp. odd) values of <code>c</code> correspond to the real (resp. * imaginary) part of the Fourier mode. * * @author S&eacute;bastien Brisard */ // @formatter:on public class RealFFTUtils_2D { /** * The constant <code>int</code> value of 1. */ private static final int ONE = 1; /** * The constant <code>int</code> value of 2. */ private static final int TWO = 2; /** * The constant <code>int</code> value of 0. */ private static final int ZERO = 0; /** * The constant <code>int</code> value of 1. */ private static final long ONEL = 1; /** * The constant <code>int</code> value of 2. */ private static final long TWOL = 2; /** * The constant <code>int</code> value of 0. */ private static final long ZEROL = 0; /** * The size of the data in the second direction. */ private final int columns; /** * The size of the data in the first direction. */ private final int rows; /** * The size of the data in the second direction. */ private final long columnsl; /** * The size of the data in the first direction. */ private final long rowsl; /** * Creates a new instance of this class. The size of the underlying * {@link DoubleFFT_2D} or {@link FloatFFT_2D} must be specified. * * @param rows * number of rows * @param columns * number of columns */ public RealFFTUtils_2D(final long rows, final long columns) { this.columns = (int) columns; this.rows = (int) rows; this.columnsl = columns; this.rowsl = rows; } /** * * Returns the 1d index of the specified 2d Fourier mode. In other words, if * <code>packed</code> contains the transformed data following a call to * {@link DoubleFFT_2D#realForward(double[])} or * {@link FloatFFT_2D#realForward(float[])}, then the returned value * <code>index</code> gives access to the <code>[r][c]</code> Fourier mode * <ul> * <li>if <code>index == {@link Integer#MIN_VALUE}</code>, then the Fourier * mode is zero,</li> * <li>if <code>index &ge; 0</code>, then the Fourier mode is * <code>packed[index]</code>,</li> * <li>if <code>index &lt; 0 </code>, then the Fourier mode is * <code>-packed[-index]</code>,</li> * </ul> * * @param r * the row index * @param c * the column index * * @return the value of <code>index</code> */ public int getIndex(final int r, final int c) { final int cmod2 = c & ONE; final int rmul2 = r << ONE; if (r != ZERO) { if (c <= ONE) { if (rmul2 == rows) { if (cmod2 == ONE) { return Integer.MIN_VALUE; } return ((rows * columns) >> ONE); } else if (rmul2 < rows) { return columns * r + cmod2; } else if (cmod2 == ZERO) { return columns * (rows - r); } else { return -(columns * (rows - r) + ONE); } } else if ((c == columns) || (c == columns + ONE)) { if (rmul2 == rows) { if (cmod2 == ONE) { return Integer.MIN_VALUE; } return ((rows * columns) >> ONE) + ONE; } else if (rmul2 < rows) { if (cmod2 == ZERO) { return columns * (rows - r) + ONE; } else { return -(columns * (rows - r)); } } else { return columns * r + ONE - cmod2; } } else if (c < columns) { return columns * r + c; } else if (cmod2 == ZERO) { return columns * (rows + TWO - r) - c; } else { return -(columns * (rows + TWO - r) - c + TWO); } } else if ((c == ONE) || (c == columns + ONE)) { return Integer.MIN_VALUE; } else if (c == columns) { return ONE; } else if (c < columns) { return c; } else if (cmod2 == ZERO) { return (columns << ONE) - c; } else { return -((columns << ONE) - c + TWO); } } /** * * Returns the 1d index of the specified 2d Fourier mode. In other words, if * <code>packed</code> contains the transformed data following a call to * {@link DoubleFFT_2D#realForward(DoubleLargeArray)} or * {@link FloatFFT_2D#realForward(FloatLargeArray)}, then the returned value * <code>index</code> gives access to the <code>[r][c]</code> Fourier mode * <ul> * <li>if <code>index == {@link Long#MIN_VALUE}</code>, then the Fourier * mode is zero,</li> * <li>if <code>index &ge; 0</code>, then the Fourier mode is * <code>packed[index]</code>,</li> * <li>if <code>index &lt; 0</code>, then the Fourier mode is * <code>-packed[-index]</code>,</li> * </ul> * * @param r * the row index * @param c * the column index * * @return the value of <code>index</code> */ public long getIndex(final long r, final long c) { final long cmod2 = c & ONEL; final long rmul2 = r << ONEL; if (r != ZERO) { if (c <= ONEL) { if (rmul2 == rowsl) { if (cmod2 == ONEL) { return Long.MIN_VALUE; } return ((rowsl * columnsl) >> ONEL); } else if (rmul2 < rowsl) { return columnsl * r + cmod2; } else if (cmod2 == ZEROL) { return columnsl * (rowsl - r); } else { return -(columnsl * (rowsl - r) + ONEL); } } else if ((c == columnsl) || (c == columnsl + ONEL)) { if (rmul2 == rowsl) { if (cmod2 == ONEL) { return Long.MIN_VALUE; } return ((rowsl * columnsl) >> ONEL) + ONEL; } else if (rmul2 < rowsl) { if (cmod2 == ZEROL) { return columnsl * (rowsl - r) + ONEL; } else { return -(columnsl * (rowsl - r)); } } else { return columnsl * r + ONEL - cmod2; } } else if (c < columnsl) { return columnsl * r + c; } else if (cmod2 == ZEROL) { return columnsl * (rowsl + TWOL - r) - c; } else { return -(columnsl * (rowsl + TWOL - r) - c + TWOL); } } else if ((c == ONEL) || (c == columnsl + ONEL)) { return Long.MIN_VALUE; } else if (c == columnsl) { return ONEL; } else if (c < columnsl) { return c; } else if (cmod2 == ZEROL) { return (columnsl << ONEL) - c; } else { return -((columnsl << ONEL) - c + TWOL); } } /** * Sets the specified Fourier mode of the transformed data. The data array * results from a call to {@link DoubleFFT_2D#realForward(double[])}. * * @param val * the new value of the <code>[r][c]</code> Fourier mode * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> */ public void pack(final double val, final int r, final int c, final double[] packed, final int pos) { final int index = getIndex(r, c); if (index >= 0) { packed[pos + index] = val; } else if (index > Integer.MIN_VALUE) { packed[pos - index] = -val; } else { throw new IllegalArgumentException( String.format( "[%d][%d] component cannot be modified (always zero)", r, c)); } } /** * Sets the specified Fourier mode of the transformed data. The data array * results from a call to {@link DoubleFFT_2D#realForward(DoubleLargeArray)}. * * @param val * the new value of the <code>[r][c]</code> Fourier mode * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> */ public void pack(final double val, final long r, final long c, final DoubleLargeArray packed, final long pos) { final long index = getIndex(r, c); if (index >= 0) { packed.setDouble(pos + index, val); } else if (index > Long.MIN_VALUE) { packed.setDouble(pos - index, -val); } else { throw new IllegalArgumentException( String.format( "[%d][%d] component cannot be modified (always zero)", r, c)); } } /** * Sets the specified Fourier mode of the transformed data. The data array * results from a call to {@link DoubleFFT_2D#realForward(double[][])}. * * @param val * the new value of the <code>[r][c]</code> Fourier mode * @param r * the row index * @param c * the column index * @param packed * the transformed data */ public void pack(final double val, final int r, final int c, final double[][] packed) { final int index = getIndex(r, c); if (index >= 0) { packed[index / columns][index % columns] = val; } else if (index > Integer.MIN_VALUE) { packed[(-index) / columns][(-index) % columns] = -val; } else { throw new IllegalArgumentException( String.format( "[%d][%d] component cannot be modified (always zero)", r, c)); } } /** * Sets the specified Fourier mode of the transformed data. The data array * results from a call to {@link FloatFFT_2D#realForward(float[])}. * * @param val * the new value of the <code>[r][c]</code> Fourier mode * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> */ public void pack(final float val, final int r, final int c, final float[] packed, final int pos) { final int index = getIndex(r, c); if (index >= 0) { packed[pos + index] = val; } else if (index > Integer.MIN_VALUE) { packed[pos - index] = -val; } else { throw new IllegalArgumentException( String.format( "[%d][%d] component cannot be modified (always zero)", r, c)); } } /** * Sets the specified Fourier mode of the transformed data. The data array * results from a call to {@link FloatFFT_2D#realForward(FloatLargeArray)}. * * @param val * the new value of the <code>[r][c]</code> Fourier mode * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> */ public void pack(final float val, final long r, final long c, final FloatLargeArray packed, final long pos) { final long index = getIndex(r, c); if (index >= 0) { packed.setFloat(pos + index, val); } else if (index > Long.MIN_VALUE) { packed.setFloat(pos - index, -val); } else { throw new IllegalArgumentException( String.format( "[%d][%d] component cannot be modified (always zero)", r, c)); } } /** * Sets the specified Fourier mode of the transformed data. The data array * results from a call to {@link FloatFFT_2D#realForward(float[][])}. * * @param val * the new value of the <code>[r][c]</code> Fourier mode * @param r * the row index * @param c * the column index * @param packed * the transformed data */ public void pack(final float val, final int r, final int c, final float[][] packed) { final int index = getIndex(r, c); if (index >= 0) { packed[index / columns][index % columns] = val; } else if (index > Integer.MIN_VALUE) { packed[(-index) / columns][(-index) % columns] = -val; } else { throw new IllegalArgumentException( String.format( "[%d][%d] component cannot be modified (always zero)", r, c)); } } /** * Returns the specified Fourier mode of the transformed data. The data * array results from a call to {@link DoubleFFT_2D#realForward(double[])}. * * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> * * @return the value of the <code>[r][c]</code> Fourier mode */ public double unpack(final int r, final int c, final double[] packed, final int pos) { final int index = getIndex(r, c); if (index >= 0) { return packed[pos + index]; } else if (index > Integer.MIN_VALUE) { return -packed[pos - index]; } else { return ZERO; } } /** * Returns the specified Fourier mode of the transformed data. The data * array results from a call to {@link DoubleFFT_2D#realForward(DoubleLargeArray)}. * * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> * * @return the value of the <code>[r][c]</code> Fourier mode */ public double unpack(final long r, final long c, final DoubleLargeArray packed, final long pos) { final long index = getIndex(r, c); if (index >= 0) { return packed.getDouble(pos + index); } else if (index > Long.MIN_VALUE) { return -packed.getDouble(pos - index); } else { return ZEROL; } } /** * Returns the specified Fourier mode of the transformed data. The data * array results from a call to {@link DoubleFFT_2D#realForward(double[][])} * . * * @param r * the row index * @param c * the column index * @param packed * the transformed data * * @return the value of the <code>[r][c]</code> Fourier mode */ public double unpack(final int r, final int c, final double[][] packed) { final int index = getIndex(r, c); if (index >= 0) { return packed[index / columns][index % columns]; } else if (index > Integer.MIN_VALUE) { return -packed[(-index) / columns][(-index) % columns]; } else { return ZERO; } } /** * Returns the specified Fourier mode of the transformed data. The data * array results from a call to {@link FloatFFT_2D#realForward(float[])} * . * * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> * * @return the value of the <code>[r][c]</code> Fourier mode */ public float unpack(final int r, final int c, final float[] packed, final int pos) { final int index = getIndex(r, c); if (index >= 0) { return packed[pos + index]; } else if (index > Integer.MIN_VALUE) { return -packed[pos - index]; } else { return ZERO; } } /** * Returns the specified Fourier mode of the transformed data. The data * array results from a call to {@link FloatFFT_2D#realForward(FloatLargeArray)} * . * * @param r * the row index * @param c * the column index * @param packed * the transformed data * @param pos * index of the first element in array <code>packed</code> * * @return the value of the <code>[r][c]</code> Fourier mode */ public float unpack(final long r, final long c, final FloatLargeArray packed, final long pos) { final long index = getIndex(r, c); if (index >= 0) { return packed.getFloat(pos + index); } else if (index > Long.MIN_VALUE) { return -packed.getFloat(pos - index); } else { return ZEROL; } } /** * Returns the specified Fourier mode of the transformed data. The data * array results from a call to {@link FloatFFT_2D#realForward(float[][])} . * * @param r * the row index * @param c * the column index * @param packed * the transformed data * * @return the value of the <code>[r][c]</code> Fourier mode */ public float unpack(final int r, final int c, final float[][] packed) { final int index = getIndex(r, c); if (index >= 0) { return packed[index / columns][index % columns]; } else if (index > Integer.MIN_VALUE) { return -packed[(-index) / columns][(-index) % columns]; } else { return ZERO; } } }
package com.capitalone.dashboard.collector; import com.capitalone.dashboard.misc.HygieiaException; import com.capitalone.dashboard.model.ChangeOrder; import com.capitalone.dashboard.model.Cmdb; import com.capitalone.dashboard.model.HpsmCollector; import com.capitalone.dashboard.model.Incident; import com.capitalone.dashboard.repository.BaseCollectorRepository; import com.capitalone.dashboard.repository.ChangeOrderRepository; import com.capitalone.dashboard.repository.CmdbRepository; import com.capitalone.dashboard.repository.HpsmRepository; import com.capitalone.dashboard.repository.IncidentRepository; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.TaskScheduler; import org.springframework.stereotype.Component; import java.util.ArrayList; import java.util.List; /** * CollectorTask that fetches configuration item data from HPSM */ @Component public class HpsmCollectorTask extends CollectorTask<HpsmCollector> { private static final Log LOG = LogFactory.getLog(HpsmCollectorTask.class); private final HpsmRepository hpsmRepository; private final CmdbRepository cmdbRepository; private final ChangeOrderRepository changeOrderRepository; private final IncidentRepository incidentRepository; private final HpsmClient hpsmClient; private final HpsmSettings hpsmSettings; private static final String APP_ACTION_NAME = "Hpsm"; private static final String CHANGE_ACTION_NAME = "HpsmChange"; private static final String INCIDENT_ACTION_NAME = "HpsmIncident"; private String collectorAction; private static final String DEFAULT_COLLECTOR_ACTION_NAME = APP_ACTION_NAME; private static final String COLLECTOR_ACTION_PROPERTY_KEY="collector.action"; @Autowired public HpsmCollectorTask(TaskScheduler taskScheduler, HpsmSettings hpsmSettings, HpsmRepository hpsmRepository, CmdbRepository cmdbRepository, ChangeOrderRepository changeOrderRepository, IncidentRepository incidentRepository, HpsmClient hpsmClient) { super(taskScheduler, (System.getProperty(COLLECTOR_ACTION_PROPERTY_KEY) == null) ? DEFAULT_COLLECTOR_ACTION_NAME : System.getProperty(COLLECTOR_ACTION_PROPERTY_KEY)); collectorAction = (System.getProperty(COLLECTOR_ACTION_PROPERTY_KEY) == null) ? DEFAULT_COLLECTOR_ACTION_NAME : System.getProperty(COLLECTOR_ACTION_PROPERTY_KEY); this.hpsmSettings = hpsmSettings; this.hpsmRepository = hpsmRepository; this.cmdbRepository = cmdbRepository; this.changeOrderRepository = changeOrderRepository; this.incidentRepository = incidentRepository; this.hpsmClient = hpsmClient; } /** * Accessor method for the collector prototype object */ @Override public HpsmCollector getCollector() { return HpsmCollector.prototype(collectorAction); } @Override public BaseCollectorRepository<HpsmCollector> getCollectorRepository() { return hpsmRepository; } @Override public String getCron() { String cron = hpsmSettings.getCron(); if(collectorAction.equals(CHANGE_ACTION_NAME)) { cron = hpsmSettings.getChangeOrderCron(); } else if(collectorAction.equals(INCIDENT_ACTION_NAME)) { cron = hpsmSettings.getIncidentCron(); } return cron; } public String getCollectorAction() { return collectorAction; } public void setCollectorAction(String collectorAction) { this.collectorAction = collectorAction; } private void collectApps(HpsmCollector collector) throws HygieiaException{ List<Cmdb> cmdbList; List<String> configurationItemNameList = new ArrayList<>(); int updatedCount = 0; int insertCount = 0; int inValidCount; cmdbList = hpsmClient.getApps(); for(Cmdb cmdb: cmdbList){ String configItem = cmdb.getConfigurationItem(); Cmdb cmdbDbItem = cmdbRepository.findByConfigurationItem(configItem); configurationItemNameList.add(configItem); if(cmdbDbItem != null && !cmdb.equals(cmdbDbItem)){ cmdb.setId(cmdbDbItem.getId()); cmdb.setCollectorItemId(collector.getId()); cmdbRepository.save(cmdb); updatedCount++; }else if(cmdbDbItem == null){ cmdb.setCollectorItemId(collector.getId()); cmdbRepository.save(cmdb); insertCount++; } } inValidCount = cleanUpOldCmdbItems(configurationItemNameList); LOG.info("Inserted Cmdb Item Count: " + insertCount); LOG.info("Updated Cmdb Item Count: " + updatedCount); LOG.info("Invalid Cmdb Item Count: " + inValidCount); } private void collectChangeOrders(HpsmCollector collector) throws HygieiaException{ long lastExecuted = collector.getLastExecuted(); long changeCount = changeOrderRepository.count(); hpsmClient.setLastExecuted(lastExecuted); hpsmClient.setChangeCount(changeCount); List<ChangeOrder> changeList; int updatedCount = 0; int insertCount = 0; changeList = hpsmClient.getChangeOrders(); for (ChangeOrder changeOrder : changeList) { String changeId = changeOrder.getChangeID(); ChangeOrder changeDbItem = changeOrderRepository.findByChangeID(changeId); if (changeDbItem != null && !changeOrder.equals(changeDbItem)) { changeOrder.setId(changeDbItem.getId()); changeOrder.setCollectorItemId(collector.getId()); changeOrderRepository.save(changeOrder); updatedCount++; } else if (changeDbItem == null) { changeOrder.setCollectorItemId(collector.getId()); changeOrderRepository.save(changeOrder); insertCount++; } } LOG.info("Inserted ChangeOrder Item Count: " + insertCount); LOG.info("Updated ChangeOrder Item Count: " + updatedCount); } private void collectIncidents(HpsmCollector collector) throws HygieiaException { long lastExecuted = collector.getLastExecuted(); long incidentCount = incidentRepository.count(); List<Incident> incidentList; int updatedCount = 0; int insertCount = 0; hpsmClient.setLastExecuted(lastExecuted); hpsmClient.setIncidentCount(incidentCount); incidentList = hpsmClient.getIncidents(); for (Incident incident : incidentList) { String incidentId = incident.getIncidentID(); Incident incidentDbItem = incidentRepository.findByIncidentID(incidentId); if (incidentDbItem != null && !incident.equals(incidentDbItem)) { incident.setId(incidentDbItem.getId()); incident.setCollectorItemId(collector.getId()); incidentRepository.save(incident); updatedCount++; } else if (incidentDbItem == null) { incident.setCollectorItemId(collector.getId()); incidentRepository.save(incident); insertCount++; } } LOG.info("Inserted Incident Item Count: " + insertCount); LOG.info("Updated Incident Item Count: " + updatedCount); } @Override public void collect(HpsmCollector collector) { long start = System.currentTimeMillis(); logBanner("Starting..."); try { switch (collectorAction) { case APP_ACTION_NAME: log("Collecting Apps"); collectApps(collector); break; case CHANGE_ACTION_NAME: log("Collecting Changes"); collectChangeOrders(collector); break; case INCIDENT_ACTION_NAME: log("Collecting Incidents"); collectIncidents(collector); break; default: log("Unknown value passed to -D" + COLLECTOR_ACTION_PROPERTY_KEY + ": " + collectorAction); break; } }catch (HygieiaException he){ LOG.error(he); } log("Finished", start); } /** * Takes configurationItemNameList (list of all APP/component names) and List<Cmdb> from client and sets flag to false for old items in mongo * @param configurationItemNameList * @return return count of items invalidated */ private int cleanUpOldCmdbItems(List<String> configurationItemNameList) { int inValidCount = 0; for(Cmdb cmdb: cmdbRepository.findAllByValidConfigItem(true)){ String configItem = cmdb.getConfigurationItem(); if(configurationItemNameList != null && !configurationItemNameList.contains(configItem)){ cmdb.setValidConfigItem(false); cmdbRepository.save(cmdb); inValidCount++; } } return inValidCount; } }
package com.earthstormsoftware.motecontrol.api; import android.app.IntentService; import android.content.Intent; import android.content.Context; import android.graphics.Color; import com.earthstormsoftware.motecontrol.MoteControl; import com.earthstormsoftware.motecontrol.model.Mote; import okhttp3.OkHttpClient; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; public class MoteAPIService extends IntentService { private static final String ACTION_GET_STATE = "com.earthstormsoftware.motecontrol.api.action.GET_STATE"; private static final String ACTION_SET_STATE = "com.earthstormsoftware.motecontrol.api.action.SET_STATE"; private static final String ACTION_SET_COLOUR = "com.earthstormsoftware.motecontrol.api.action.SET_COLOUR"; private static final String EXTRA_MOTE = "com.earthstormsoftware.motecontrol.api.extra.MOTE"; private static final String EXTRA_STATE = "com.earthstormsoftware.motecontrol.api.extra.STATE"; private static final String EXTRA_COLOUR = "com.earthstormsoftware.motecontrol.api.extra.COLOUR"; public MoteAPIService() { super("MoteAPIService"); } /** * Starts the service to call the Mote API to get the current state. If * the service is already performing a task this action will be queued. */ public static void startActionGetState(Context context, Mote mote) { Intent intent = new Intent(context, MoteAPIService.class); intent.setAction(ACTION_GET_STATE); intent.putExtra(EXTRA_MOTE, mote); context.startService(intent); } /** * Starts the service to call the Mote API to set the current state. If * the service is already performing a task this action will be queued. */ public static void startActionSetState(Context context, Mote mote, boolean newState) { Intent intent = new Intent(context, MoteAPIService.class); intent.setAction(ACTION_SET_STATE); intent.putExtra(EXTRA_MOTE, mote); intent.putExtra(EXTRA_STATE, newState); context.startService(intent); } /** * Starts the service to call the Mote API to change the current colour. If * the service is already performing a task this action will be queued. */ public static void startActionSetColour(Context context, Mote mote, int newColour) { Intent intent = new Intent(context, MoteAPIService.class); intent.setAction(ACTION_SET_COLOUR); intent.putExtra(EXTRA_MOTE, mote); intent.putExtra(EXTRA_COLOUR, newColour); context.startService(intent); } @Override protected void onHandleIntent(Intent intent) { if (intent != null) { final String action = intent.getAction(); if (ACTION_GET_STATE.equals(action)) { final Mote mote = intent.getParcelableExtra(EXTRA_MOTE); handleActionGetState(mote); } else if (ACTION_SET_STATE.equals(action)) { final Mote mote = intent.getParcelableExtra(EXTRA_MOTE); final boolean newState = intent.getBooleanExtra(EXTRA_STATE, false); handleActionSetState(mote, newState); } else if (ACTION_SET_COLOUR.equals(action)) { final Mote mote = intent.getParcelableExtra(EXTRA_MOTE); final int newColour = intent.getIntExtra(EXTRA_COLOUR, 0); handleActionSetColour(mote, newColour); } } } /** * Handle action Foo in the provided background thread with the provided * parameters. */ private void handleActionGetState(Mote mote) { MoteAPIV0 moteAPI = getAPIInstance(mote); // This is where the API actually gets called. // Note: Using Enqueue means this is an asynchronous call, and not handled on the UI thread. final Call<MoteAPIResponseV0> call = moteAPI.getMoteStatus(); call.enqueue(new Callback<MoteAPIResponseV0>() { @Override public void onResponse(Call<MoteAPIResponseV0> call, Response<MoteAPIResponseV0> response) { handleResponse(response); } @Override public void onFailure(Call<MoteAPIResponseV0> call, Throwable t) { handleFailure(call, t); } }); } /** * Handle action Baz in the provided background thread with the provided * parameters. */ private void handleActionSetState(Mote mote, boolean newState) { MoteAPIV0 moteAPI = getAPIInstance(mote); // Call different API methods for turning on and off Call<MoteAPIResponseV0> call; if (newState) { call = moteAPI.setMoteOn(); } else { call = moteAPI.setMoteOff(); } call.enqueue(new Callback<MoteAPIResponseV0>() { @Override public void onResponse(Call<MoteAPIResponseV0> call, Response<MoteAPIResponseV0> response) { handleResponse(response); } @Override public void onFailure(Call<MoteAPIResponseV0> call, Throwable t) { handleFailure(call, t); } }); } private void handleActionSetColour(Mote mote, int newColour) { // Initialise the API if required MoteAPIV0 moteAPI = getAPIInstance(mote); // Android stores colour as ints, but the API expects RGB values in the form RRGGBB, so we // need to convert before calling the API String strColour = String.format("%06X", (0xFFFFFF & newColour)); // This is where the API actually gets called. // Note: Using Enqueue means this is an asynchronous call, and not handled on the UI thread. final Call<MoteAPIResponseV0> call = moteAPI.setMoteColour(strColour); call.enqueue(new Callback<MoteAPIResponseV0>() { @Override public void onResponse(Call<MoteAPIResponseV0> call, Response<MoteAPIResponseV0> response) { handleResponse(response); } @Override public void onFailure(Call<MoteAPIResponseV0> call, Throwable t) { handleFailure(call, t); } }); } // Common initialisation for Retrofit for each API call. private MoteAPIV0 getAPIInstance(Mote mote){ // Uncomment to enable Retrofit logging //HttpLoggingInterceptor logging = new HttpLoggingInterceptor(); //logging.setLevel(HttpLoggingInterceptor.Level.BODY); OkHttpClient.Builder httpClient = new OkHttpClient.Builder(); //httpClient.addInterceptor(logging); Retrofit retrofit = new Retrofit.Builder() .baseUrl(mote.getUri()) .addConverterFactory(GsonConverterFactory.create()) .client(httpClient.build()) .build(); return retrofit.create(MoteAPIV0.class); } /* * This method handles 'good' responses from the API. A 'good' response is one where the HTTP * request completed, not that the call worked. For example, an HTTP 404 counts as a 'good' * respone. * * Currently the API returns the same JSON object for every call, so a common method can be * used. If that changes, this approach will need to be reviewed. */ private void handleResponse(Response<MoteAPIResponseV0> response){ // Create an intent for sending the response Intent i = new Intent(MoteControl.MOTE_API_RESPONSE); // Check that any response is what would be expected MoteAPIResponseType mrt = validateResponse(response); // If the response was validated, parse it into our local object if (mrt != MoteAPIResponseType.VALIDATION_ERROR) { // A 'successful' response is an HTTP 200 return code if (response.isSuccessful()) { // Update the local state MoteAPIResponseV0 mar = response.body(); if (mar.getStatus() == 1) { i.putExtra("state", true); } else { i.putExtra("state", false); } int colour = Color.parseColor("#" + mar.getColour()); i.putExtra("colour", colour); // Otherwise the HTTP call failed in some way. } else { mrt = MoteAPIResponseType.API_ERROR; } } // Send a broadcast indicating the result of the API call. i.putExtra("result", mrt); MoteControl.getAppContext().sendBroadcast(i); } /* * This method handles 'failure' responses from the API, where the HTTP call was not able to * complete (i.e. there is no HTTP response code). This includes, for example, ConnectionRefused * and SocketTimeout errors. These errors could be pulled out in a more granular fashion by * checking the Exception that was thrown. */ private void handleFailure(Call<MoteAPIResponseV0> call, Throwable t){ MoteAPIResponseType mrt = MoteAPIResponseType.IO_ERROR; Intent i = new Intent(MoteControl.MOTE_API_RESPONSE); i.putExtra("result", mrt); MoteControl.getAppContext().sendBroadcast(i); } // If the HTTP call completed in some way, we should check to the response to ensure it is // correctly formatted before trying to use it private MoteAPIResponseType validateResponse(Response<MoteAPIResponseV0> response){ MoteAPIResponseType mart = MoteAPIResponseType.OK; if (response.isSuccessful()){ MoteAPIResponseV0 mar = response.body(); // Status should be 1 or 0 for on and off if ((mar.getStatus() == 1) || (mar.getStatus() == 0)) { mart = MoteAPIResponseType.OK; } else { mart = MoteAPIResponseType.VALIDATION_ERROR; } // The colour string can be tested by running it through the Android Color parser. If // it is not a valid colour, an exception will be thrown. try { int testColour = Color.parseColor("#" + mar.getColour()); } catch (IllegalArgumentException iae) { mart = MoteAPIResponseType.VALIDATION_ERROR; } } return mart; } }
/* * Copyright 2007 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.common.reedsolomon; /** * <p>Represents a polynomial whose coefficients are elements of a GF. * Instances of this class are immutable.</p> * <p> * <p>Much credit is due to William Rucklidge since portions of this code are an indirect * port of his C++ Reed-Solomon implementation.</p> * * @author Sean Owen */ final class GenericGFPoly { private final GenericGF field; private final int[] coefficients; /** * @param field the {@link GenericGF} instance representing the field to use * to perform computations * @param coefficients coefficients as ints representing elements of GF(size), arranged * from most significant (highest-power term) coefficient to least significant * @throws IllegalArgumentException if argument is null or empty, * or if leading coefficient is 0 and this is not a * constant polynomial (that is, it is not the monomial "0") */ GenericGFPoly(GenericGF field, int[] coefficients) { if (coefficients.length == 0) { throw new IllegalArgumentException(); } this.field = field; int coefficientsLength = coefficients.length; if (coefficientsLength > 1 && coefficients[0] == 0) { // Leading term must be non-zero for anything except the constant polynomial "0" int firstNonZero = 1; while (firstNonZero < coefficientsLength && coefficients[firstNonZero] == 0) { firstNonZero++; } if (firstNonZero == coefficientsLength) { this.coefficients = new int[]{0}; } else { this.coefficients = new int[coefficientsLength - firstNonZero]; System.arraycopy(coefficients, firstNonZero, this.coefficients, 0, this.coefficients.length); } } else { this.coefficients = coefficients; } } int[] getCoefficients() { return coefficients; } /** * @return degree of this polynomial */ int getDegree() { return coefficients.length - 1; } /** * @return true iff this polynomial is the monomial "0" */ boolean isZero() { return coefficients[0] == 0; } /** * @return coefficient of x^degree term in this polynomial */ int getCoefficient(int degree) { return coefficients[coefficients.length - 1 - degree]; } /** * @return evaluation of this polynomial at a given point */ int evaluateAt(int a) { if (a == 0) { // Just return the x^0 coefficient return getCoefficient(0); } int size = coefficients.length; if (a == 1) { // Just the sum of the coefficients int result = 0; for (int coefficient : coefficients) { result = GenericGF.addOrSubtract(result, coefficient); } return result; } int result = coefficients[0]; for (int i = 1; i < size; i++) { result = GenericGF.addOrSubtract(field.multiply(a, result), coefficients[i]); } return result; } GenericGFPoly addOrSubtract(GenericGFPoly other) { if (!field.equals(other.field)) { throw new IllegalArgumentException("GenericGFPolys do not have same GenericGF field"); } if (isZero()) { return other; } if (other.isZero()) { return this; } int[] smallerCoefficients = this.coefficients; int[] largerCoefficients = other.coefficients; if (smallerCoefficients.length > largerCoefficients.length) { int[] temp = smallerCoefficients; smallerCoefficients = largerCoefficients; largerCoefficients = temp; } int[] sumDiff = new int[largerCoefficients.length]; int lengthDiff = largerCoefficients.length - smallerCoefficients.length; // Copy high-order terms only found in higher-degree polynomial's coefficients System.arraycopy(largerCoefficients, 0, sumDiff, 0, lengthDiff); for (int i = lengthDiff; i < largerCoefficients.length; i++) { sumDiff[i] = GenericGF.addOrSubtract(smallerCoefficients[i - lengthDiff], largerCoefficients[i]); } return new GenericGFPoly(field, sumDiff); } GenericGFPoly multiply(GenericGFPoly other) { if (!field.equals(other.field)) { throw new IllegalArgumentException("GenericGFPolys do not have same GenericGF field"); } if (isZero() || other.isZero()) { return field.getZero(); } int[] aCoefficients = this.coefficients; int aLength = aCoefficients.length; int[] bCoefficients = other.coefficients; int bLength = bCoefficients.length; int[] product = new int[aLength + bLength - 1]; for (int i = 0; i < aLength; i++) { int aCoeff = aCoefficients[i]; for (int j = 0; j < bLength; j++) { product[i + j] = GenericGF.addOrSubtract(product[i + j], field.multiply(aCoeff, bCoefficients[j])); } } return new GenericGFPoly(field, product); } GenericGFPoly multiply(int scalar) { if (scalar == 0) { return field.getZero(); } if (scalar == 1) { return this; } int size = coefficients.length; int[] product = new int[size]; for (int i = 0; i < size; i++) { product[i] = field.multiply(coefficients[i], scalar); } return new GenericGFPoly(field, product); } GenericGFPoly multiplyByMonomial(int degree, int coefficient) { if (degree < 0) { throw new IllegalArgumentException(); } if (coefficient == 0) { return field.getZero(); } int size = coefficients.length; int[] product = new int[size + degree]; for (int i = 0; i < size; i++) { product[i] = field.multiply(coefficients[i], coefficient); } return new GenericGFPoly(field, product); } GenericGFPoly[] divide(GenericGFPoly other) { if (!field.equals(other.field)) { throw new IllegalArgumentException("GenericGFPolys do not have same GenericGF field"); } if (other.isZero()) { throw new IllegalArgumentException("Divide by 0"); } GenericGFPoly quotient = field.getZero(); GenericGFPoly remainder = this; int denominatorLeadingTerm = other.getCoefficient(other.getDegree()); int inverseDenominatorLeadingTerm = field.inverse(denominatorLeadingTerm); while (remainder.getDegree() >= other.getDegree() && !remainder.isZero()) { int degreeDifference = remainder.getDegree() - other.getDegree(); int scale = field.multiply(remainder.getCoefficient(remainder.getDegree()), inverseDenominatorLeadingTerm); GenericGFPoly term = other.multiplyByMonomial(degreeDifference, scale); GenericGFPoly iterationQuotient = field.buildMonomial(degreeDifference, scale); quotient = quotient.addOrSubtract(iterationQuotient); remainder = remainder.addOrSubtract(term); } return new GenericGFPoly[]{quotient, remainder}; } @Override public String toString() { StringBuilder result = new StringBuilder(8 * getDegree()); for (int degree = getDegree(); degree >= 0; degree--) { int coefficient = getCoefficient(degree); if (coefficient != 0) { if (coefficient < 0) { result.append(" - "); coefficient = -coefficient; } else { if (result.length() > 0) { result.append(" + "); } } if (degree == 0 || coefficient != 1) { int alphaPower = field.log(coefficient); if (alphaPower == 0) { result.append('1'); } else if (alphaPower == 1) { result.append('a'); } else { result.append("a^"); result.append(alphaPower); } } if (degree != 0) { if (degree == 1) { result.append('x'); } else { result.append("x^"); result.append(degree); } } } } return result.toString(); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package controller.alarma; import controller.*; import alarma.Alarma; import alarma.Criticidad; import alarma.EstadoAlarma; import alarma.HistorialAlarma; import alarma.Periodicidad; import alarma.TipoAlarma; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import soporte.UtilFecha; /** * * @author carranza.matias */ public class GestorHistorialAlarma { private ConexionDB conexion; private Connection conn; private ArrayList list; public GestorHistorialAlarma() { conexion = ConexionDB.getInstance(); list = new ArrayList(); } public ArrayList getTodos() { try { conn = conexion.getConnection(); String sql = "SELECT * FROM HistorialAlarma ORDER BY 1"; PreparedStatement ps; ps = conn.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { HistorialAlarma historialAlarma = new HistorialAlarma(); historialAlarma.setNumero(rs.getInt("idHistorialAlarma")); historialAlarma.setIdAlarma(rs.getInt("idAlarma")); historialAlarma.setFecha(UtilFecha.convertiFecha(rs.getDate("fecha"))); historialAlarma.setValor(rs.getDouble("valor")); historialAlarma.setAcciones(rs.getBoolean("acciones")); //Regenero la Criticidad GestorCriticidadAlarma gestorCriticidad = new GestorCriticidadAlarma(); Criticidad criticidad = (Criticidad) gestorCriticidad.getUno(rs.getInt("idCriticidad")); historialAlarma.setCriticidad(criticidad); list.add(historialAlarma); } ps.close(); // connPool.closeConnection(conn); } catch (Exception a) { a.printStackTrace(); System.out.print("Error en conexion BD: GestorHistorialAlarma! (getTodos)"); } return list; } public ArrayList getTodos(int idAlarma) { try { conn = conexion.getConnection(); String sql = "SELECT * FROM HistorialAlarma where idAlarma = ? ORDER BY 1"; PreparedStatement ps; ps = conn.prepareStatement(sql); ps.setInt(1, idAlarma); ResultSet rs = ps.executeQuery(); while (rs.next()) { HistorialAlarma historialAlarma = new HistorialAlarma(); historialAlarma.setNumero(rs.getInt("idHistorialAlarma")); historialAlarma.setIdAlarma(rs.getInt("idAlarma")); historialAlarma.setFecha(UtilFecha.convertiFecha(rs.getDate("fecha"))); historialAlarma.setValor(rs.getDouble("valor")); historialAlarma.setAcciones(rs.getBoolean("acciones")); //Regenero la Criticidad GestorCriticidadAlarma gestorCriticidad = new GestorCriticidadAlarma(); Criticidad criticidad = (Criticidad) gestorCriticidad.getUno(rs.getInt("idCriticidad")); historialAlarma.setCriticidad(criticidad); list.add(historialAlarma); } ps.close(); // connPool.closeConnection(conn); } catch (Exception a) { a.printStackTrace(); System.out.print("Error en conexion BD: GestorHistorialAlarma! (getTodos)"); } return list; } public Object getUltimo() { HistorialAlarma historialAlarma = new HistorialAlarma(); try { conn = conexion.getConnection(); String sql = "SELECT * FROM HistorialAlarma WHERE idHistorialAlarma = (select MAX(idHistorialAlarma) from HistorialAlarma)"; PreparedStatement ps; ps = conn.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { historialAlarma.setNumero(rs.getInt("idHistorialAlarma")); historialAlarma.setIdAlarma(rs.getInt("idAlarma")); historialAlarma.setFecha(UtilFecha.convertiFecha(rs.getDate("fecha"))); historialAlarma.setValor(rs.getDouble("valor")); historialAlarma.setAcciones(rs.getBoolean("acciones")); //Regenero la Criticidad GestorCriticidadAlarma gestorCriticidad = new GestorCriticidadAlarma(); Criticidad criticidad = (Criticidad) gestorCriticidad.getUno(rs.getInt("idCriticidad")); historialAlarma.setCriticidad(criticidad); } ps.close(); // connPool.closeConnection(conn); } catch (Exception a) { a.printStackTrace(); System.out.print("Error en conexion BD: GestorHistorialAlarma !!! (getUltimo)"); } return historialAlarma; } public Object getUno(int idObjeto) { HistorialAlarma historialAlarma = new HistorialAlarma(); try { conn = conexion.getConnection(); String sql = "SELECT * FROM HistorialAlarma where idHistorialAlarma = ? "; PreparedStatement ps; ps = conn.prepareStatement(sql); ps.setInt(1, idObjeto); ResultSet rs = ps.executeQuery(); while (rs.next()) { historialAlarma.setNumero(rs.getInt("idHistorialAlarma")); historialAlarma.setIdAlarma(rs.getInt("idAlarma")); historialAlarma.setFecha(UtilFecha.convertiFecha(rs.getDate("fecha"))); historialAlarma.setValor(rs.getDouble("valor")); historialAlarma.setAcciones(rs.getBoolean("acciones")); //Regenero la Criticidad GestorCriticidadAlarma gestorCriticidad = new GestorCriticidadAlarma(); Criticidad criticidad = (Criticidad) gestorCriticidad.getUno(rs.getInt("idCriticidad")); historialAlarma.setCriticidad(criticidad); list.add(historialAlarma); } ps.close(); // connPool.closeConnection(conn); } catch (Exception a) { a.printStackTrace(); System.out.print("Error en conexion BD: GestorHistorialAlarma! (getUno)"); } return historialAlarma; } public Object getUno(int idObjeto, int idAlarma) { HistorialAlarma historialAlarma = new HistorialAlarma(); try { conn = conexion.getConnection(); String sql = "SELECT * FROM HistorialAlarma where idHistorialAlarma = ? and idAlarma = ?"; PreparedStatement ps; ps = conn.prepareStatement(sql); ps.setInt(1, idObjeto); ps.setInt(2, idAlarma); ResultSet rs = ps.executeQuery(); while (rs.next()) { historialAlarma.setNumero(rs.getInt("idHistorialAlarma")); historialAlarma.setIdAlarma(rs.getInt("idAlarma")); historialAlarma.setFecha(UtilFecha.convertiFecha(rs.getDate("fecha"))); historialAlarma.setValor(rs.getDouble("valor")); historialAlarma.setAcciones(rs.getBoolean("acciones")); //Regenero el tipo alarma GestorCriticidadAlarma gestorCriticidad = new GestorCriticidadAlarma(); Criticidad criticidad = (Criticidad) gestorCriticidad.getUno(rs.getInt("idCriticidad")); historialAlarma.setCriticidad(criticidad); list.add(historialAlarma); } ps.close(); // connPool.closeConnection(conn); } catch (Exception a) { a.printStackTrace(); System.out.print("Error en conexion BD: GestorHistorialAlarma! (getUno)"); } return historialAlarma; } public int insertUno(Object object) { HistorialAlarma historialAlarma = (HistorialAlarma) object; int resultado = 0; try { conn = conexion.getConnection(); String sql = "INSERT INTO HistorialAlarma (idHistorialAlarma, idAlarma, " + " fecha, valor, idCriticidad, acciones) " + " values (?,?,?,?,?,?)"; PreparedStatement ps; ps = conn.prepareStatement(sql); ps.setInt(1, historialAlarma.getNumero()); ps.setInt(2, historialAlarma.getIdAlarma()); ps.setDate(3, UtilFecha.convertiFecha(historialAlarma.getFecha())); ps.setDouble(4, historialAlarma.getValor()); ps.setInt(5, historialAlarma.getCriticidad().getNumero()); ps.setBoolean(6, historialAlarma.isAcciones()); resultado = ps.executeUpdate(); ps.close(); // connPool.closeConnection(conn); } catch (Exception a) { a.printStackTrace(); System.out.print("Error en conexion BD: GestorHistorialAlarma !!! (insertUno)"); } return resultado; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.http4; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.Serializable; import java.net.URLDecoder; import java.util.Enumeration; import java.util.Map; import javax.activation.DataHandler; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.InvalidPayloadException; import org.apache.camel.Message; import org.apache.camel.RuntimeCamelException; import org.apache.camel.StreamCache; import org.apache.camel.component.http4.helper.CamelFileDataSource; import org.apache.camel.component.http4.helper.HttpHelper; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.util.GZIPHelper; import org.apache.camel.util.IOHelper; import org.apache.camel.util.MessageHelper; import org.apache.camel.util.ObjectHelper; /** * Binding between {@link HttpMessage} and {@link HttpServletResponse}. * * @version */ public class DefaultHttpBinding implements HttpBinding { private boolean useReaderForPayload; private HeaderFilterStrategy headerFilterStrategy = new HttpHeaderFilterStrategy(); private HttpEndpoint endpoint; @Deprecated public DefaultHttpBinding() { } @Deprecated public DefaultHttpBinding(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } public DefaultHttpBinding(HttpEndpoint endpoint) { this.endpoint = endpoint; this.headerFilterStrategy = endpoint.getHeaderFilterStrategy(); } public void readRequest(HttpServletRequest request, HttpMessage message) { // lets force a parse of the body and headers message.getBody(); // populate the headers from the request Map<String, Object> headers = message.getHeaders(); //apply the headerFilterStrategy Enumeration names = request.getHeaderNames(); while (names.hasMoreElements()) { String name = (String) names.nextElement(); Object value = request.getHeader(name); // mapping the content-type if (name.toLowerCase().equals("content-type")) { name = Exchange.CONTENT_TYPE; } if (headerFilterStrategy != null && !headerFilterStrategy.applyFilterToExternalHeaders(name, value, message.getExchange())) { headers.put(name, value); } } if (request.getCharacterEncoding() != null) { headers.put(Exchange.HTTP_CHARACTER_ENCODING, request.getCharacterEncoding()); message.getExchange().setProperty(Exchange.CHARSET_NAME, request.getCharacterEncoding()); } try { populateRequestParameters(request, message); } catch (Exception e) { throw new RuntimeCamelException("Cannot read request parameters due " + e.getMessage(), e); } Object body = message.getBody(); // reset the stream cache if the body is the instance of StreamCache if (body instanceof StreamCache) { ((StreamCache) body).reset(); } // store the method and query and other info in headers headers.put(Exchange.HTTP_METHOD, request.getMethod()); headers.put(Exchange.HTTP_QUERY, request.getQueryString()); headers.put(Exchange.HTTP_URL, request.getRequestURL()); headers.put(Exchange.HTTP_URI, request.getRequestURI()); headers.put(Exchange.HTTP_PATH, request.getPathInfo()); headers.put(Exchange.CONTENT_TYPE, request.getContentType()); // if content type is serialized java object, then de-serialize it to a Java object if (request.getContentType() != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(request.getContentType())) { try { InputStream is = endpoint.getCamelContext().getTypeConverter().mandatoryConvertTo(InputStream.class, body); Object object = HttpHelper.deserializeJavaObjectFromStream(is); if (object != null) { message.setBody(object); } } catch (Exception e) { throw new RuntimeCamelException("Cannot deserialize body to Java object", e); } } populateAttachments(request, message); } protected void populateRequestParameters(HttpServletRequest request, HttpMessage message) throws Exception { //we populate the http request parameters without checking the request method Map<String, Object> headers = message.getHeaders(); Enumeration names = request.getParameterNames(); while (names.hasMoreElements()) { String name = (String) names.nextElement(); Object value = request.getParameter(name); if (headerFilterStrategy != null && !headerFilterStrategy.applyFilterToExternalHeaders(name, value, message.getExchange())) { headers.put(name, value); } } if (request.getMethod().equals("POST") && request.getContentType() != null && request.getContentType().startsWith(HttpConstants.CONTENT_TYPE_WWW_FORM_URLENCODED)) { String charset = request.getCharacterEncoding(); if (charset == null) { charset = "UTF-8"; } // Push POST form params into the headers to retain compatibility with DefaultHttpBinding String body = message.getBody(String.class); for (String param : body.split("&")) { String[] pair = param.split("=", 2); if (pair.length == 2) { String name = URLDecoder.decode(pair[0], charset); String value = URLDecoder.decode(pair[1], charset); if (headerFilterStrategy != null && !headerFilterStrategy.applyFilterToExternalHeaders(name, value, message.getExchange())) { headers.put(name, value); } } else { throw new IllegalArgumentException("Invalid parameter, expected to be a pair but was " + param); } } } } protected void populateAttachments(HttpServletRequest request, HttpMessage message) { // check if there is multipart files, if so will put it into DataHandler Enumeration names = request.getAttributeNames(); while (names.hasMoreElements()) { String name = (String) names.nextElement(); Object object = request.getAttribute(name); if (object instanceof File) { String fileName = request.getParameter(name); message.addAttachment(fileName, new DataHandler(new CamelFileDataSource((File) object, fileName))); } } } public void writeResponse(Exchange exchange, HttpServletResponse response) throws IOException { if (exchange.isFailed()) { if (exchange.getException() != null) { doWriteExceptionResponse(exchange.getException(), response); } else { // it must be a fault, no need to check for the fault flag on the message doWriteFaultResponse(exchange.getOut(), response, exchange); } } else { // just copy the protocol relates header copyProtocolHeaders(exchange.getIn(), exchange.getOut()); Message out = exchange.getOut(); if (out != null) { doWriteResponse(out, response, exchange); } } } private void copyProtocolHeaders(Message request, Message response) { if (request.getHeader(Exchange.CONTENT_ENCODING) != null) { String contentEncoding = request.getHeader(Exchange.CONTENT_ENCODING, String.class); response.setHeader(Exchange.CONTENT_ENCODING, contentEncoding); } if (checkChunked(response, response.getExchange())) { response.setHeader(Exchange.TRANSFER_ENCODING, "chunked"); } } public void doWriteExceptionResponse(Throwable exception, HttpServletResponse response) throws IOException { // 500 for internal server error response.setStatus(500); if (endpoint != null && endpoint.isTransferException()) { // transfer the exception as a serialized java object HttpHelper.writeObjectToServletResponse(response, exception); } else { // write stacktrace as plain text response.setContentType("text/plain"); PrintWriter pw = response.getWriter(); exception.printStackTrace(pw); pw.flush(); } } public void doWriteFaultResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException { doWriteResponse(message, response, exchange); } public void doWriteResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException { // set the status code in the response. Default is 200. if (message.getHeader(Exchange.HTTP_RESPONSE_CODE) != null) { int code = message.getHeader(Exchange.HTTP_RESPONSE_CODE, Integer.class); response.setStatus(code); } // set the content type in the response. String contentType = MessageHelper.getContentType(message); if (MessageHelper.getContentType(message) != null) { response.setContentType(contentType); } // append headers for (String key : message.getHeaders().keySet()) { String value = message.getHeader(key, String.class); if (headerFilterStrategy != null && !headerFilterStrategy.applyFilterToCamelHeaders(key, value, exchange)) { response.setHeader(key, value); } } // write the body. if (message.getBody() != null) { if (GZIPHelper.isGzip(message)) { doWriteGZIPResponse(message, response, exchange); } else { doWriteDirectResponse(message, response, exchange); } } } protected void doWriteDirectResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException { // if content type is serialized Java object, then serialize and write it to the response String contentType = message.getHeader(Exchange.CONTENT_TYPE, String.class); if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) { try { Object object = message.getMandatoryBody(Serializable.class); HttpHelper.writeObjectToServletResponse(response, object); // object is written so return return; } catch (InvalidPayloadException e) { throw new IOException(e); } } // other kind of content type InputStream is = null; if (checkChunked(message, exchange)) { is = message.getBody(InputStream.class); } if (is != null) { ServletOutputStream os = response.getOutputStream(); try { // copy directly from input stream to output stream IOHelper.copy(is, os); } finally { IOHelper.close(os); IOHelper.close(is); } } else { // not convertable as a stream so try as a String String data = message.getBody(String.class); if (data != null) { // set content length before we write data response.setContentLength(data.length()); response.getWriter().print(data); response.getWriter().flush(); } } } protected boolean checkChunked(Message message, Exchange exchange) { boolean answer = true; if (message.getHeader(Exchange.HTTP_CHUNKED) == null) { // check the endpoint option Endpoint endpoint = exchange.getFromEndpoint(); if (endpoint instanceof HttpEndpoint) { answer = ((HttpEndpoint) endpoint).isChunked(); } } else { answer = message.getHeader(Exchange.HTTP_CHUNKED, boolean.class); } return answer; } protected void doWriteGZIPResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException { byte[] bytes; try { bytes = message.getMandatoryBody(byte[].class); } catch (InvalidPayloadException e) { throw ObjectHelper.wrapRuntimeCamelException(e); } byte[] data = GZIPHelper.compressGZIP(bytes); ServletOutputStream os = response.getOutputStream(); try { response.setContentLength(data.length); os.write(data); os.flush(); } finally { IOHelper.close(os); } } public Object parseBody(HttpMessage httpMessage) throws IOException { // lets assume the body is a reader HttpServletRequest request = httpMessage.getRequest(); // Need to handle the GET Method which has no inputStream if ("GET".equals(request.getMethod())) { return null; } if (isUseReaderForPayload()) { // use reader to read the response body return request.getReader(); } else { // reade the response body from servlet request return HttpHelper.readResponseBodyFromServletRequest(request, httpMessage.getExchange()); } } public boolean isUseReaderForPayload() { return useReaderForPayload; } public void setUseReaderForPayload(boolean useReaderForPayload) { this.useReaderForPayload = useReaderForPayload; } public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } }
package it.michelelacorte.androidshortcuts; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.LauncherApps; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.pm.ShortcutInfo; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.Drawable; import android.os.Environment; import android.os.UserHandle; import android.support.annotation.RequiresApi; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.util.DisplayMetrics; import android.util.Log; import java.io.ByteArrayOutputStream; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutput; import java.io.ObjectOutputStream; import java.io.StreamCorruptedException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import it.michelelacorte.androidshortcuts.util.Utils; /** * Created by Michele on 10/01/2017. */ /** * Remote Shortcuts class provide method to serialize and deserialize shortcuts for save/get shortcuts from different apps */ public class RemoteShortcuts { private static final String TAG = "RemoteShorctus"; public static boolean USE_SHORTCUTS_FROM_API_25 = false; /** * Save shortcuts on file * @param activity Activity * @param listOfShortcuts ArrayList<Shortcuts> */ public static void saveRemoteShortcuts(Activity activity, ArrayList<Shortcuts> listOfShortcuts){ String fileName = activity.getPackageName() + "/shortcut.shc"; ObjectOutput out = null; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) { checkPermission(activity); } try { File file = new File(Environment.getExternalStorageDirectory() + "/Shortcuts/"+fileName); file.getParentFile().mkdirs(); file.createNewFile(); out = new ObjectOutputStream(new FileOutputStream(file, false)); for(Shortcuts shortcuts : listOfShortcuts){ if(shortcuts.getShortcutsText() != null) { out.writeUTF(shortcuts.getShortcutsText()); }if(shortcuts.getShortcutsImage() != 0) { Bitmap image = BitmapFactory.decodeResource(activity.getResources(), shortcuts.getShortcutsImage()); final ByteArrayOutputStream stream = new ByteArrayOutputStream(); image.compress(Bitmap.CompressFormat.PNG, 100, stream); final byte[] imageByteArray = stream.toByteArray(); out.writeInt(imageByteArray.length); out.write(imageByteArray); }else if (shortcuts.getShortcutsImageBitmap() != null){ final ByteArrayOutputStream stream = new ByteArrayOutputStream(); shortcuts.getShortcutsImageBitmap().compress(Bitmap.CompressFormat.PNG, 100, stream); final byte[] imageByteArray = stream.toByteArray(); out.writeInt(imageByteArray.length); out.write(imageByteArray); } if(shortcuts.getTargetPackage() != null && shortcuts.getTargetClass() != null){ out.writeUTF(shortcuts.getTargetPackage()); out.writeUTF(shortcuts.getTargetClass()); }else{ out.writeUTF(activity.getPackageName()); out.writeUTF(activity.getPackageName()+"."+activity.getLocalClassName()); } } out.close(); Log.d(TAG, "Shortcuts saved into: " + Environment.getExternalStorageDirectory() + "/Shortcuts/"+fileName); } catch (FileNotFoundException e) { Log.e(TAG, e.toString()); } catch (IOException e) { Log.e(TAG, e.toString()); } } /** * Get shortcuts from file * @param activity Activity * @return ArrayList<Shotrcuts> */ public static ArrayList<Shortcuts> getRemoteShortcuts(Activity activity, String packageName){ String fileName = packageName + "/shortcut.shc"; ObjectInputStream input; ArrayList<Shortcuts> listOfShortcuts = new ArrayList<>(); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) { checkPermission(activity); } try { input = new ObjectInputStream(new FileInputStream(Environment.getExternalStorageDirectory() + "/Shortcuts/"+fileName)); try{ while(true) { String shortcutsText = input.readUTF(); final int length = input.readInt(); final byte[] imageByteArray = new byte[length]; input.readFully(imageByteArray); Bitmap shortcutsImage = BitmapFactory.decodeByteArray(imageByteArray, 0, length); String targetPackage = input.readUTF(); String targetClass = input.readUTF(); listOfShortcuts.add(new Shortcuts(shortcutsImage, shortcutsText, targetClass, targetPackage)); } }catch (EOFException e){} input.close(); } catch (StreamCorruptedException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } Log.d(TAG, "Shortcuts getted from: " + Environment.getExternalStorageDirectory() + "/Shortcuts/"+fileName); return listOfShortcuts; } /** * This method get shortcuts defined by App in Android 7.1 Nougat (API 25), returned shortcuts are sorted by rank * in according to Google Doc * @param activity Activity * @param targetPackageName String * @param uid int * @return ArrayList<Shortcuts> * @throws Exception */ @TargetApi(25) @RequiresApi(25) public static ArrayList<Shortcuts> getRemoteShortcutsOnAPI25(Activity activity, String targetPackageName, int uid) throws Exception { LauncherApps launcherApps = (LauncherApps) activity.getApplicationContext().getSystemService(Context.LAUNCHER_APPS_SERVICE); if (!launcherApps.hasShortcutHostPermission()) { Log.e(TAG, "Don't have permission, you may need set this app as default launcher!"); throw new Exception("Don't have permission, you may need set this app as default launcher!"); } PackageManager packageManager = activity.getPackageManager(); Intent mainIntent = new Intent(Intent.ACTION_MAIN, null); mainIntent.addCategory(Intent.CATEGORY_LAUNCHER); List<ResolveInfo> resolveInfoList; if (packageManager == null || (resolveInfoList = packageManager.queryIntentActivities(mainIntent, 0)) == null) { Log.e(TAG, "No Main and Launcher Activity!"); throw new Exception("No Main and Launcher Activity!"); } ArrayList<Shortcuts> shortcutsArrayList = new ArrayList<>(); int queryFlags = LauncherApps.ShortcutQuery.FLAG_MATCH_DYNAMIC | LauncherApps.ShortcutQuery.FLAG_MATCH_MANIFEST | LauncherApps.ShortcutQuery.FLAG_MATCH_PINNED; List<ShortcutInfo> shortcutInfoList = launcherApps.getShortcuts( new LauncherApps.ShortcutQuery().setPackage(targetPackageName).setQueryFlags(queryFlags), UserHandle.getUserHandleForUid(uid)); for (int j = 0; j < shortcutInfoList.size(); j++) { if (shortcutInfoList.get(j) != null) { try { //Get shortcuts text (short label) String shortcutsText = shortcutInfoList.get(j).getShortLabel().toString(); //Get packageName String packageName = shortcutInfoList.get(j).getActivity().getPackageName(); //Get className String className = shortcutInfoList.get(j).getActivity().getClassName(); //Get display metrics and get shortcuts drawable DisplayMetrics metrics = activity.getResources().getDisplayMetrics(); Drawable shortcutsImage = launcherApps.getShortcutIconDrawable(shortcutInfoList.get(j), metrics.densityDpi); Bitmap shortcutsImageBitmap = Utils.convertDrawableToBitmap(shortcutsImage); //Get image badge with density adjust Drawable shortcutsImageBadged = launcherApps.getShortcutBadgedIconDrawable(shortcutInfoList.get(j), metrics.densityDpi); Bitmap shortcutsImageBadgedBitmap = Utils.convertDrawableToBitmap(shortcutsImageBadged); //Get rank to order list int rank = shortcutInfoList.get(j).getRank(); //Initialize shortcuts shortcutsArrayList.add(new Shortcuts(shortcutsImageBitmap, shortcutsImageBadgedBitmap, shortcutsText, className, packageName, rank)); } catch (Exception e) { Log.e(TAG, e.toString()); } } } USE_SHORTCUTS_FROM_API_25 = true; //Order by rank (lowest to highest) Collections.sort(shortcutsArrayList, new Comparator<Shortcuts>() { @Override public int compare(Shortcuts shortcuts, Shortcuts shortcuts1) { return shortcuts1.getRank() - shortcuts.getRank(); } }); //return sorted arraylist return shortcutsArrayList; } /** * Check if user had permission * @param activity Activity */ private static void checkPermission(Activity activity) { int result = ContextCompat.checkSelfPermission(activity, android.Manifest.permission.WRITE_EXTERNAL_STORAGE); if (result != PackageManager.PERMISSION_GRANTED) { requestPermission(activity); } } /** * Make request permission * @param activity Activity */ private static void requestPermission(Activity activity) { if (ActivityCompat.shouldShowRequestPermissionRationale(activity, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)) { Log.d(TAG, "Write External Storage permission allows us to do store shortcuts data. Please allow this permission in App Settings."); } else { ActivityCompat.requestPermissions(activity, new String[]{android.Manifest.permission.WRITE_EXTERNAL_STORAGE}, 111); Log.d(TAG, "Write External Storage permission allows us to do store shortcuts data."); } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.xdebugger.impl.frame; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.dnd.DnDEvent; import com.intellij.ide.dnd.DnDManager; import com.intellij.ide.dnd.DnDNativeTarget; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.CompositeDisposable; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.ActionToolbarImpl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.FocusChangeListener; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.registry.Registry; import com.intellij.ui.*; import com.intellij.ui.border.CustomLineBorder; import com.intellij.util.Alarm; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.components.BorderLayoutPanel; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.xdebugger.XDebugSession; import com.intellij.xdebugger.XDebuggerBundle; import com.intellij.xdebugger.XDebuggerManager; import com.intellij.xdebugger.XExpression; import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider; import com.intellij.xdebugger.frame.XStackFrame; import com.intellij.xdebugger.impl.XDebugSessionImpl; import com.intellij.xdebugger.impl.XDebuggerManagerImpl; import com.intellij.xdebugger.impl.XDebuggerUtilImpl; import com.intellij.xdebugger.impl.XDebuggerWatchesManager; import com.intellij.xdebugger.impl.actions.XDebuggerActions; import com.intellij.xdebugger.impl.breakpoints.XExpressionImpl; import com.intellij.xdebugger.impl.evaluate.DebuggerEvaluationStatisticsCollector; import com.intellij.xdebugger.impl.evaluate.XDebuggerEvaluationDialog; import com.intellij.xdebugger.impl.frame.actions.XToggleEvaluateExpressionFieldAction; import com.intellij.xdebugger.impl.inline.InlineWatch; import com.intellij.xdebugger.impl.inline.InlineWatchNode; import com.intellij.xdebugger.impl.inline.InlineWatchesRootNode; import com.intellij.xdebugger.impl.inline.XInlineWatchesView; import com.intellij.xdebugger.impl.ui.*; import com.intellij.xdebugger.impl.ui.tree.XDebuggerTree; import com.intellij.xdebugger.impl.ui.tree.actions.XWatchTransferable; import com.intellij.xdebugger.impl.ui.tree.nodes.*; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.PopupMenuEvent; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.datatransfer.DataFlavor; import java.awt.event.*; import java.util.List; import java.util.*; public class XWatchesViewImpl extends XVariablesView implements DnDNativeTarget, XWatchesView, XInlineWatchesView { protected WatchesRootNode myRootNode; private XDebuggerExpressionComboBox myEvaluateComboBox; private final CompositeDisposable myDisposables = new CompositeDisposable(); private final boolean myWatchesInVariables; private final boolean inlineWatchesEnabled; public XWatchesViewImpl(@NotNull XDebugSessionImpl session, boolean watchesInVariables) { this(session, watchesInVariables, watchesInVariables); } protected XWatchesViewImpl(@NotNull XDebugSessionImpl session, boolean watchesInVariables, boolean vertical) { this(session, watchesInVariables, vertical, true); } public XWatchesViewImpl(@NotNull XDebugSessionImpl session, boolean watchesInVariables, boolean vertical, boolean withToolbar) { super(session); myWatchesInVariables = watchesInVariables; inlineWatchesEnabled = Registry.is("debugger.watches.inline.enabled"); XDebuggerTree tree = getTree(); createNewRootNode(null); DebuggerUIUtil.registerActionOnComponent(XDebuggerActions.XNEW_WATCH, tree, myDisposables); DebuggerUIUtil.registerActionOnComponent(XDebuggerActions.XREMOVE_WATCH, tree, myDisposables); DebuggerUIUtil.registerActionOnComponent(XDebuggerActions.XCOPY_WATCH, tree, myDisposables); DebuggerUIUtil.registerActionOnComponent(XDebuggerActions.XEDIT_WATCH, tree, myDisposables); EmptyAction.registerWithShortcutSet(XDebuggerActions.XNEW_WATCH, CommonShortcuts.getNew(), tree); EmptyAction.registerWithShortcutSet(XDebuggerActions.XREMOVE_WATCH, CommonShortcuts.getDelete(), tree); DnDManager.getInstance().registerTarget(this, tree); new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { Object contents = CopyPasteManager.getInstance().getContents(XWatchTransferable.EXPRESSIONS_FLAVOR); if (contents instanceof List) { for (Object item : ((List)contents)){ if (item instanceof XExpression) { addWatchExpression(((XExpression)item), -1, true); } } } } }.registerCustomShortcutSet(CommonShortcuts.getPaste(), tree, myDisposables); if (withToolbar) { ActionToolbarImpl toolbar = (ActionToolbarImpl)ActionManager.getInstance().createActionToolbar( ActionPlaces.DEBUGGER_TOOLBAR, DebuggerSessionTabBase.getCustomizedActionGroup(XDebuggerActions.WATCHES_TREE_TOOLBAR_GROUP), !vertical); toolbar.setBorder(new CustomLineBorder(CaptionPanel.CNT_ACTIVE_BORDER_COLOR, 0, 0, vertical ? 0 : 1, vertical ? 1 : 0)); toolbar.setTargetComponent(tree); getPanel().add(toolbar.getComponent(), vertical ? BorderLayout.WEST : BorderLayout.NORTH); } if (!myWatchesInVariables) { getTree().getEmptyText().setText(XDebuggerBundle.message("debugger.no.watches")); } installEditListeners(); if (!ApplicationManager.getApplication().isUnitTestMode()) { fixEditorNotReleasedFalsePositiveException(session); } } // Workaround for IDEA-273987, IDEA-278153. // Should be removed after IDEA-285001 is fixed private void fixEditorNotReleasedFalsePositiveException(@NotNull XDebugSessionImpl session) { Optional.ofNullable(myEvaluateComboBox) .map(XDebuggerExpressionComboBox::getEditorComponent) .map(component -> ObjectUtils.tryCast(component, EditorTextField.class)) .ifPresent(field -> { var disposable = Disposer.newDisposable("XWatchesView Disposable"); Disposer.register(this, () -> { // In case the project is closing this block is called // from the BaseContentCloseListener#disposeContent // and then removes editor with EditorComboBox#releaseLater. // The latter causes a false-positive exception (IDEA-273987) that editor is not released // when validation is running (see IDEA-285001). // Until IDEA-285001 is fixed this one is scheduled for next EDT call // to let Disposer.register(session.getProject(), disposable) dispose an editor // with correct way when project is closed. // If this one is triggered because XWatchesViewImpl is closed // then it's ok to release it later. ApplicationManager.getApplication().invokeLater(() -> Disposer.dispose(disposable)); }); // Dispose editor when project is closed with a custom disposable. Disposer.register(session.getProject(), disposable); // Blocks default behaviour on add/remove component disposal/ field.setDisposedWith(disposable); }); } @Override protected JPanel createMainPanel(@NotNull JComponent localsPanelComponent) { var top = createTopPanel(); if (top == null) { return super.createMainPanel(localsPanelComponent); } var layout = localsPanelComponent.getLayout(); boolean canAddComponentToTheRightOfToolbar = layout instanceof BorderLayout; if (canAddComponentToTheRightOfToolbar) { var panel = new BorderLayoutPanel() .addToCenter(((BorderLayout)layout).getLayoutComponent(BorderLayout.CENTER)) .addToTop(top); localsPanelComponent.add(panel, BorderLayout.CENTER); return super.createMainPanel(localsPanelComponent); } else { return new BorderLayoutPanel() .addToCenter(localsPanelComponent) .addToTop(top); } } private @Nullable JComponent createTopPanel() { //if (Registry.is("debugger.new.tool.window.layout")) { XDebuggerTree tree = getTree(); Ref<AnAction> addToWatchesActionRef = new Ref<>(); XDebuggerEditorsProvider provider = tree.getEditorsProvider(); if (!provider.isEvaluateExpressionFieldEnabled()) { return null; } myEvaluateComboBox = new XDebuggerExpressionComboBox(tree.getProject(), provider, "evaluateExpression", null, false, true) { @Override protected ComboBox<XExpression> createComboBox(CollectionComboBoxModel<XExpression> model, int width) { AnAction addToWatchesAction = new DumbAwareAction(ActionsBundle.actionText(XDebuggerActions.ADD_TO_WATCH), null, AllIcons.Debugger.AddToWatch) { @Override public void actionPerformed(@NotNull AnActionEvent e) { myEvaluateComboBox.saveTextInHistory(); addWatchExpression(getExpression(), -1, false); DebuggerEvaluationStatisticsCollector.WATCH_FROM_INLINE_ADD.log(e); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(!XDebuggerUtilImpl.isEmptyExpression(getExpression())); } }; ActionToolbarImpl toolbar = (ActionToolbarImpl)ActionManager.getInstance() .createActionToolbar("DebuggerVariablesEvaluate", new DefaultActionGroup(addToWatchesAction), true); addToWatchesActionRef.set(addToWatchesAction); toolbar.setOpaque(false); toolbar.setReservePlaceAutoPopupIcon(false); toolbar.setTargetComponent(tree); XDebuggerEmbeddedComboBox<XExpression> comboBox = new XDebuggerEmbeddedComboBox<>(model, width); comboBox.setExtension(toolbar); return comboBox; } @Override protected void prepareEditor(EditorEx editor) { super.prepareEditor(editor); editor.setPlaceholder(XDebuggerBundle.message( "debugger.evaluate.expression.or.add.a.watch.hint", KeymapUtil.getShortcutText(new KeyboardShortcut(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), null)), KeymapUtil.getShortcutText(new KeyboardShortcut(XDebuggerEvaluationDialog.ADD_WATCH_KEYSTROKE, null)) )); editor.addFocusListener(new FocusChangeListener() { private final Set<FocusEvent.Cause> myCauses = Set.of( FocusEvent.Cause.UNKNOWN, FocusEvent.Cause.TRAVERSAL_FORWARD, FocusEvent.Cause.TRAVERSAL_BACKWARD ); @Override public void focusGained(@NotNull Editor editor, @NotNull FocusEvent event) { if (myCauses.contains(event.getCause())) { boolean shouldBeIgnored = myEvaluateComboBox.getComboBox().isPopupVisible(); if (!shouldBeIgnored) { DebuggerEvaluationStatisticsCollector.INPUT_FOCUS.log(getTree().getProject()); } } } }); } }; final JComponent editorComponent = myEvaluateComboBox.getEditorComponent(); editorComponent.getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT) .put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), "enterStroke"); editorComponent.getActionMap().put("enterStroke", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { // This listener overrides one from BasicComboBoxUI$Actions // Close popup manually instead of default handler if (myEvaluateComboBox.getComboBox().isPopupVisible()) { myEvaluateComboBox.getComboBox().setPopupVisible(false); } else { addExpressionResultNode(); } } }); myEvaluateComboBox.getComboBox().addPopupMenuListener(new PopupMenuListenerAdapter() { private int selectedIndexOnPopupOpen = -1; @Override public void popupMenuWillBecomeVisible(PopupMenuEvent e) { selectedIndexOnPopupOpen = myEvaluateComboBox.getComboBox().getSelectedIndex(); myEvaluateComboBox.requestFocusInEditor(); DebuggerEvaluationStatisticsCollector.HISTORY_SHOW.log(getTree().getProject()); } @Override public void popupMenuWillBecomeInvisible(PopupMenuEvent e) { if (myEvaluateComboBox.getComboBox().getSelectedIndex() != selectedIndexOnPopupOpen) { DebuggerEvaluationStatisticsCollector.HISTORY_CHOOSE.log(getTree().getProject()); } } }); addToWatchesActionRef.get() .registerCustomShortcutSet(new CustomShortcutSet(XDebuggerEvaluationDialog.ADD_WATCH_KEYSTROKE), editorComponent); JComponent component = myEvaluateComboBox.getComponent(); //component.setBackground(tree.getBackground()); component.setBorder(JBUI.Borders.customLine(JBColor.border(), 0, 0, 1, 0)); if (!Registry.is("debugger.new.tool.window.layout")) { XToggleEvaluateExpressionFieldAction.markAsEvaluateExpressionField(component); } return component; //} //return null; } @Override protected void beforeTreeBuild(@NotNull SessionEvent event) { if (event != SessionEvent.SETTINGS_CHANGED) { myRootNode.removeResultNode(); } } private void addExpressionResultNode() { XExpression expression = myEvaluateComboBox.getExpression(); if (!XDebuggerUtilImpl.isEmptyExpression(expression)) { myEvaluateComboBox.saveTextInHistory(); XDebugSession session = getSession(getTree()); myRootNode.addResultNode(session != null ? session.getCurrentStackFrame() : null, expression); DebuggerEvaluationStatisticsCollector.INLINE_EVALUATE.log(getTree().getProject()); } } @Override protected void buildTreeAndRestoreState(@NotNull XStackFrame stackFrame) { super.buildTreeAndRestoreState(stackFrame); if (myEvaluateComboBox != null) { myEvaluateComboBox.setSourcePosition(stackFrame.getSourcePosition()); } } private void installEditListeners() { final XDebuggerTree watchTree = getTree(); final Alarm quitePeriod = new Alarm(); final Alarm editAlarm = new Alarm(); final ClickListener mouseListener = new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent event, int clickCount) { if (!SwingUtilities.isLeftMouseButton(event) || ((event.getModifiers() & (InputEvent.SHIFT_MASK | InputEvent.ALT_MASK | InputEvent.CTRL_MASK | InputEvent.META_MASK)) !=0) ) { return false; } boolean sameRow = isAboveSelectedItem(event, watchTree, false); if (!sameRow || clickCount > 1) { editAlarm.cancelAllRequests(); return false; } final AnAction editWatchAction = ActionManager.getInstance().getAction(XDebuggerActions.XEDIT_WATCH); Presentation presentation = editWatchAction.getTemplatePresentation().clone(); DataContext context = DataManager.getInstance().getDataContext(watchTree); final AnActionEvent actionEvent = new AnActionEvent(null, context, "WATCH_TREE", presentation, ActionManager.getInstance(), 0); Runnable runnable = () -> editWatchAction.actionPerformed(actionEvent); if (editAlarm.isEmpty() && quitePeriod.isEmpty()) { editAlarm.addRequest(runnable, UIUtil.getMultiClickInterval()); } else { editAlarm.cancelAllRequests(); } return false; } }; final ClickListener mouseEmptySpaceListener = new DoubleClickListener() { @Override protected boolean onDoubleClick(@NotNull MouseEvent event) { if (!isAboveSelectedItem(event, watchTree, true)) { myRootNode.addNewWatch(); return true; } return false; } }; ListenerUtil.addClickListener(watchTree, mouseListener); ListenerUtil.addClickListener(watchTree, mouseEmptySpaceListener); final FocusListener focusListener = new FocusListener() { @Override public void focusGained(@NotNull FocusEvent e) { quitePeriod.addRequest(EmptyRunnable.getInstance(), UIUtil.getMultiClickInterval()); } @Override public void focusLost(@NotNull FocusEvent e) { editAlarm.cancelAllRequests(); } }; ListenerUtil.addFocusListener(watchTree, focusListener); final TreeSelectionListener selectionListener = new TreeSelectionListener() { @Override public void valueChanged(@NotNull TreeSelectionEvent e) { quitePeriod.addRequest(EmptyRunnable.getInstance(), UIUtil.getMultiClickInterval()); } }; watchTree.addTreeSelectionListener(selectionListener); myDisposables.add(new Disposable() { @Override public void dispose() { ListenerUtil.removeClickListener(watchTree, mouseListener); ListenerUtil.removeClickListener(watchTree, mouseEmptySpaceListener); ListenerUtil.removeFocusListener(watchTree, focusListener); watchTree.removeTreeSelectionListener(selectionListener); } }); } @Override public void dispose() { Disposer.dispose(myDisposables); DnDManager.getInstance().unregisterTarget(this, getTree()); super.dispose(); } private static boolean isAboveSelectedItem(MouseEvent event, XDebuggerTree watchTree, boolean fullWidth) { Rectangle bounds = watchTree.getRowBounds(watchTree.getLeadSelectionRow()); if (bounds != null) { if (fullWidth) { bounds.x = 0; } bounds.width = watchTree.getWidth(); if (bounds.contains(event.getPoint())) { return true; } } return false; } @Override public void addWatchExpression(@NotNull XExpression expression, int index, final boolean navigateToWatchNode) { addWatchExpression(expression, index, navigateToWatchNode, false); } public void addWatchExpression(@NotNull XExpression expression, int index, final boolean navigateToWatchNode, boolean noDuplicates) { ApplicationManager.getApplication().assertIsDispatchThread(); XDebugSession session = getSession(getTree()); boolean found = false; if (noDuplicates) { for (WatchNode child : myRootNode.getWatchChildren()) { if (child.getExpression().equals(expression)) { TreeUtil.selectNode(getTree(), child); found = true; } } } if (!found) { myRootNode.addWatchExpression(session != null ? session.getCurrentStackFrame() : null, expression, index, navigateToWatchNode); updateSessionData(); } if (navigateToWatchNode && session != null) { XDebugSessionTab.showWatchesView((XDebugSessionImpl)session); } } public void computeWatches() { myRootNode.computeWatches(); } @Override protected XValueContainerNode doCreateNewRootNode(@Nullable XStackFrame stackFrame) { if (inlineWatchesEnabled) { myRootNode = new InlineWatchesRootNode(getTree(), this, getExpressions(), getInlineExpressions(), stackFrame, myWatchesInVariables); } else { myRootNode = new WatchesRootNode(getTree(), this, getExpressions(), stackFrame, myWatchesInVariables); } return myRootNode; } @NotNull private List<InlineWatch> getInlineExpressions() { return getWatchesManager().getInlineWatches(); } private XDebuggerWatchesManager getWatchesManager() { return ((XDebuggerManagerImpl)XDebuggerManager.getInstance(getTree().getProject())) .getWatchesManager(); } @Override public void addInlineWatchExpression(@NotNull InlineWatch watch, int index, boolean navigateToWatchNode) { ApplicationManager.getApplication().assertIsDispatchThread(); XDebugSession session = getSession(getTree()); ((InlineWatchesRootNode)myRootNode).addInlineWatchExpression(session != null ? session.getCurrentStackFrame() : null, watch, index, navigateToWatchNode); if (navigateToWatchNode && session != null) { XDebugSessionTab.showWatchesView((XDebugSessionImpl)session); } } @Override public void removeInlineWatches(Collection<InlineWatch> watches) { InlineWatchesRootNode rootNode = (InlineWatchesRootNode)myRootNode; @SuppressWarnings("unchecked") List<? extends XDebuggerTreeNode> nodesToRemove = (List<? extends XDebuggerTreeNode>)ContainerUtil.filter(rootNode.getInlineWatchChildren(), node -> watches.contains(node.getWatch())); if (!nodesToRemove.isEmpty()) { removeInlineNodes(nodesToRemove, false); } } private void removeInlineNodes(List<? extends XDebuggerTreeNode> inlineWatches, boolean updateManager) { InlineWatchesRootNode rootNode = (InlineWatchesRootNode)myRootNode; List<? extends InlineWatchNode> inlineWatchChildren = rootNode.getInlineWatchChildren(); final int[] minIndex = {Integer.MAX_VALUE}; List<InlineWatchNode> toRemoveInlines = new ArrayList<>(); inlineWatches.forEach((node) -> { int index = inlineWatchChildren.indexOf(node); if (index != -1) { toRemoveInlines.add((InlineWatchNode)node); minIndex[0] = Math.min(minIndex[0], index); } }); rootNode.removeInlineChildren(toRemoveInlines); List<? extends InlineWatchNode> newChildren = rootNode.getInlineWatchChildren(); if (!newChildren.isEmpty()) { InlineWatchNode node = newChildren.get(Math.min(minIndex[0], newChildren.size() - 1)); TreeUtil.selectNode(getTree(), node); } if (updateManager) { getWatchesManager().inlineWatchesRemoved(ContainerUtil.map(toRemoveInlines, node -> node.getWatch()), this); } } @Override protected void addEmptyMessage(XValueContainerNode root) { if (myWatchesInVariables) { super.addEmptyMessage(root); } } @NotNull protected List<XExpression> getExpressions() { XDebuggerTree tree = getTree(); XDebugSession session = getSession(tree); List<XExpression> expressions; if (session != null) { expressions = ((XDebugSessionImpl)session).getSessionData().getWatchExpressions(); } else { XDebuggerTreeNode root = tree.getRoot(); expressions = root instanceof WatchesRootNode ? ((WatchesRootNode)root).getWatchExpressions() : Collections.emptyList(); } return expressions; } @Nullable @Override public Object getData(@NotNull @NonNls String dataId) { if (XWatchesView.DATA_KEY.is(dataId)) { return this; } return super.getData(dataId); } @Override public void removeWatches(List<? extends XDebuggerTreeNode> nodes) { ApplicationManager.getApplication().assertIsDispatchThread(); List<? extends XDebuggerTreeNode> ordinaryWatches = ContainerUtil.filter(nodes, node -> !(node instanceof InlineWatchNode)); List<? extends XDebuggerTreeNode> inlineWatches = ContainerUtil.filter(nodes, node -> node instanceof InlineWatchNode); if (!inlineWatches.isEmpty()) { removeInlineNodes(inlineWatches, true); } if (ordinaryWatches.isEmpty()) return; List<? extends WatchNode> children = myRootNode.getWatchChildren(); int minIndex = Integer.MAX_VALUE; List<XDebuggerTreeNode> toRemove = new ArrayList<>(); for (XDebuggerTreeNode node : ordinaryWatches) { @SuppressWarnings("SuspiciousMethodCalls") int index = children.indexOf(node); if (index != -1) { toRemove.add(node); minIndex = Math.min(minIndex, index); } } myRootNode.removeChildren(toRemove); List<? extends WatchNode> newChildren = myRootNode.getWatchChildren(); if (!newChildren.isEmpty()) { WatchNode node = newChildren.get(Math.min(minIndex, newChildren.size() - 1)); TreeUtil.selectNode(getTree(), node); } updateSessionData(); } @Override public void removeAllWatches() { ApplicationManager.getApplication().assertIsDispatchThread(); if (inlineWatchesEnabled) { List<? extends InlineWatchNode> children = ((InlineWatchesRootNode)myRootNode).getInlineWatchChildren(); if (!children.isEmpty()) { //noinspection unchecked removeInlineNodes((List<? extends XDebuggerTreeNode>)children, true); } } myRootNode.removeAllChildren(); updateSessionData(); } public void moveWatchUp(WatchNode node) { myRootNode.moveUp(node); updateSessionData(); } public void moveWatchDown(WatchNode node) { myRootNode.moveDown(node); updateSessionData(); } public void updateSessionData() { XDebugSession session = getSession(getTree()); if (session != null) { ((XDebugSessionImpl)session).setWatchExpressions(myRootNode.getWatchExpressions()); } else { XDebugSessionData data = getData(XDebugSessionData.DATA_KEY, getTree()); if (data != null) { data.setWatchExpressions(myRootNode.getWatchExpressions()); } } } @Override public boolean update(final DnDEvent aEvent) { Object object = aEvent.getAttachedObject(); boolean possible = false; if (object instanceof XValueNodeImpl[]) { possible = true; // do not add new watch if node is dragged to itself if (((XValueNodeImpl[])object).length == 1) { Point point = aEvent.getPoint(); XDebuggerTree tree = getTree(); TreePath path = tree.getClosestPathForLocation(point.x, point.y); if (path != null && path.getLastPathComponent() == ((XValueNodeImpl[])object)[0]) { // the same item is under pointer, filter out place below the tree Rectangle pathBounds = tree.getPathBounds(path); possible = pathBounds != null && pathBounds.y + pathBounds.height < point.y; } } } else if (object instanceof EventInfo) { possible = ((EventInfo)object).getTextForFlavor(DataFlavor.stringFlavor) != null; } aEvent.setDropPossible(possible, XDebuggerBundle.message("xdebugger.drop.text.add.to.watches")); return true; } @Override public void drop(DnDEvent aEvent) { Object object = aEvent.getAttachedObject(); if (object instanceof XValueNodeImpl[]) { for (XValueNodeImpl node : (XValueNodeImpl[])object) { DebuggerUIUtil.addToWatches(this, node); } } else if (object instanceof EventInfo) { String text = ((EventInfo)object).getTextForFlavor(DataFlavor.stringFlavor); if (text != null) { addWatchExpression(XExpressionImpl.fromText(text), -1, false); } } } }
/* * Seldon -- open source prediction engine * ======================================= * * Copyright 2011-2015 Seldon Technologies Ltd and Rummble Ltd (http://www.seldon.io/) * * ******************************************************************************************** * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ******************************************************************************************** */ package io.seldon.recommendation; import io.seldon.api.logging.CtrLogger; import io.seldon.memcache.MemCacheKeys; import io.seldon.memcache.MemCachePeer; import io.seldon.util.CollectionTools; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.apache.log4j.Logger; public class RecommendationUtils { private static final String REMOVE_IGNORED_RECS_OPTION_NAME = "io.seldon.algorithm.filter.removeignoredrecs"; private static Logger logger = Logger.getLogger(RecommendationUtils.class.getName()); private static final int MEMCACHE_EXCLUSIONS_EXPIRE_SECS = 1800; private static final int RECENT_RECS_EXPIRE_SECS = 1800; public static List<Long> getDiverseRecommendations(int numRecommendationsAsked,List<Long> recs,String client,String clientUserId,Set<Integer> dimensions) { List<Long> recsFinal = new ArrayList<>(recs.subList(0, Math.min(numRecommendationsAsked,recs.size()))); String rrkey = MemCacheKeys.getRecentRecsForUser(client, clientUserId, dimensions); Set<Integer> lastRecs = (Set<Integer>) MemCachePeer.get(rrkey); int hashCode = recsFinal.hashCode(); if (lastRecs != null) // only diversify recs if we have already shown recs previously recently { if (lastRecs.contains(hashCode)) { if (logger.isDebugEnabled()) logger.debug("Trying to diversity recs for user "+clientUserId+" client"+client+" #recs "+recs.size()); List<Long> shuffled = new ArrayList<>(recs); Collections.shuffle(shuffled); //shuffle shuffled = shuffled.subList(0, Math.min(numRecommendationsAsked,recs.size())); //limit to size of recs asked for recsFinal = new ArrayList<>(); // add back in original order for(Long r : recs) if (shuffled.contains(r)) recsFinal.add(r); hashCode = recsFinal.hashCode(); } else if (logger.isDebugEnabled()) logger.debug("Will not diversity recs for user "+clientUserId+" as hashcode "+hashCode+" not in "+ CollectionTools.join(lastRecs, ",")); } else if (logger.isDebugEnabled()) { logger.debug("Will not diversity recs for user "+clientUserId+" dimension as lasRecs is null"); } if (lastRecs == null) lastRecs = new HashSet<>(); lastRecs.add(hashCode); MemCachePeer.put(rrkey, lastRecs,RECENT_RECS_EXPIRE_SECS); return recsFinal; } /** * Create a new transient recommendations counter for the user by incrementing the current one. * @param client * @param userId * @param currentUUID * @param recs * @param strat *@param recTag @return */ public static String cacheRecommendationsAndCreateNewUUID(String client, String userId, Set<Integer> dimensions, String currentUUID, List<Long> recs, String algKey, Long currentItemId, int numRecentActions, ClientStrategy strat, String recTag) { String counterKey = MemCacheKeys.getRecommendationListUserCounter(client, dimensions, userId); Integer userRecCounter = (Integer) MemCachePeer.get(counterKey); if (userRecCounter == null) userRecCounter = 0; try { userRecCounter++; String recsList = CollectionTools.join(recs, ":"); String abTestingKey = strat.getName(userId, recTag); // TODO ab testing and recTag // if (algorithm != null) // abTestingKey = algorithm.getAbTestingKey(); CtrLogger.log(false,client, algKey, -1, userId,""+userRecCounter,currentItemId,numRecentActions,recsList,abTestingKey,recTag); MemCachePeer.put(MemCacheKeys.getRecommendationListUUID(client,userId,userRecCounter, recTag),new LastRecommendationBean(algKey, recs),MEMCACHE_EXCLUSIONS_EXPIRE_SECS); MemCachePeer.put(counterKey, userRecCounter,MEMCACHE_EXCLUSIONS_EXPIRE_SECS); } catch(NumberFormatException e) { logger.error("Can decode user UUID as integer: "+currentUUID); } return ""+userRecCounter; } public static <T extends Comparable<T>> Map<T,Double> normaliseScores(Map<T,Double> scores,int numRecommendations) { //limit map to recommendation size scores = CollectionTools.sortMapAndLimit(scores, numRecommendations); //Normalise counts double sum = 0; for(Map.Entry<T, Double> e : scores.entrySet()) sum = sum + e.getValue(); if (sum > 0) { for(Map.Entry<T, Double> e : scores.entrySet()) e.setValue(e.getValue()/sum); return scores; } else { logger.debug("Zero sum in counts - returning empty score map"); return new HashMap<>(); } } public static <T extends Comparable<T>> Map<T,Double> rescaleScoresToOne(Map<T,Double> scores,int numRecommendations) { //limit map to recommendation size scores = CollectionTools.sortMapAndLimit(scores, numRecommendations); //Normalise counts double max = 0; for(Map.Entry<T, Double> e : scores.entrySet()) if (e.getValue() > max) max = e.getValue(); if (max > 0) { for(Map.Entry<T, Double> e : scores.entrySet()) e.setValue(e.getValue()/max); return scores; } else { logger.debug("Zero sum in counts - returning empty score map"); return new HashMap<>(); } } public static class ValueComparator implements Comparator<Long> { Map<Long, Double> base; public ValueComparator(Map<Long, Double> base) { this.base = base; } // Note: this comparator imposes orderings that are inconsistent with equals. public int compare(Long a, Long b) { if (base.get(a) >= base.get(b)) { return -1; } else { return 1; } // returning 0 would merge keys } } public static Map<Long,Double> getTopK(Map<Long,Double> map,int k) { ValueComparator bvc = new ValueComparator(map); TreeMap<Long,Double> sorted_map = new TreeMap<Long,Double>(bvc); sorted_map.putAll(map); int i = 0; Map<Long,Double> r = new HashMap<>(k); double max =0; for(Map.Entry<Long, Double> e : sorted_map.entrySet()) { if (++i > k) break; else { if (i == 1) max = e.getValue(); r.put(e.getKey(), e.getValue()/max); } } return r; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.doReturn; import java.io.FileInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.HashSet; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.HdfsBlockLocation; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.hdfs.BlockReaderTestUtil; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.LogVerificationAppender; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetCache; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetCache.PageRounder; import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream; import org.apache.hadoop.hdfs.server.namenode.FSImage; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.protocol.BlockIdCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse; import org.apache.hadoop.hdfs.server.protocol.NNHAStatusHeartbeat; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.io.nativeio.NativeIO.POSIX.CacheManipulator; import org.apache.hadoop.io.nativeio.NativeIO.POSIX.NoMlockCacheManipulator; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MetricsAsserts; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import com.google.common.base.Supplier; import com.google.common.primitives.Ints; public class TestFsDatasetCache { private static final Log LOG = LogFactory.getLog(TestFsDatasetCache.class); // Most Linux installs allow a default of 64KB locked memory static final long CACHE_CAPACITY = 64 * 1024; // mlock always locks the entire page. So we don't need to deal with this // rounding, use the OS page size for the block size. private static final long PAGE_SIZE = NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize(); private static final long BLOCK_SIZE = PAGE_SIZE; private static Configuration conf; private static MiniDFSCluster cluster = null; private static FileSystem fs; private static NameNode nn; private static FSImage fsImage; private static DataNode dn; private static FsDatasetSpi<?> fsd; private static DatanodeProtocolClientSideTranslatorPB spyNN; private static final PageRounder rounder = new PageRounder(); private static CacheManipulator prevCacheManipulator; static { LogManager.getLogger(FsDatasetCache.class).setLevel(Level.DEBUG); } @Before public void setUp() throws Exception { conf = new HdfsConfiguration(); conf.setLong( DFSConfigKeys.DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS, 100); conf.setLong(DFSConfigKeys.DFS_CACHEREPORT_INTERVAL_MSEC_KEY, 500); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, CACHE_CAPACITY); conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); prevCacheManipulator = NativeIO.POSIX.getCacheManipulator(); NativeIO.POSIX.setCacheManipulator(new NoMlockCacheManipulator()); cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(1).build(); cluster.waitActive(); fs = cluster.getFileSystem(); nn = cluster.getNameNode(); fsImage = nn.getFSImage(); dn = cluster.getDataNodes().get(0); fsd = dn.getFSDataset(); spyNN = DataNodeTestUtils.spyOnBposToNN(dn, nn); } @After public void tearDown() throws Exception { // Verify that each test uncached whatever it cached. This cleanup is // required so that file descriptors are not leaked across tests. DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); if (fs != null) { fs.close(); } if (cluster != null) { cluster.shutdown(); } // Restore the original CacheManipulator NativeIO.POSIX.setCacheManipulator(prevCacheManipulator); } private static void setHeartbeatResponse(DatanodeCommand[] cmds) throws IOException { NNHAStatusHeartbeat ha = new NNHAStatusHeartbeat(HAServiceState.ACTIVE, fsImage.getLastAppliedOrWrittenTxId()); HeartbeatResponse response = new HeartbeatResponse(cmds, ha, null); doReturn(response).when(spyNN).sendHeartbeat( (DatanodeRegistration) any(), (StorageReport[]) any(), anyLong(), anyLong(), anyInt(), anyInt(), anyInt()); } private static DatanodeCommand[] cacheBlock(HdfsBlockLocation loc) { return cacheBlocks(new HdfsBlockLocation[] {loc}); } private static DatanodeCommand[] cacheBlocks(HdfsBlockLocation[] locs) { return new DatanodeCommand[] { getResponse(locs, DatanodeProtocol.DNA_CACHE) }; } private static DatanodeCommand[] uncacheBlock(HdfsBlockLocation loc) { return uncacheBlocks(new HdfsBlockLocation[] {loc}); } private static DatanodeCommand[] uncacheBlocks(HdfsBlockLocation[] locs) { return new DatanodeCommand[] { getResponse(locs, DatanodeProtocol.DNA_UNCACHE) }; } /** * Creates a cache or uncache DatanodeCommand from an array of locations */ private static DatanodeCommand getResponse(HdfsBlockLocation[] locs, int action) { String bpid = locs[0].getLocatedBlock().getBlock().getBlockPoolId(); long[] blocks = new long[locs.length]; for (int i=0; i<locs.length; i++) { blocks[i] = locs[i].getLocatedBlock().getBlock().getBlockId(); } return new BlockIdCommand(action, bpid, blocks); } private static long[] getBlockSizes(HdfsBlockLocation[] locs) throws Exception { long[] sizes = new long[locs.length]; for (int i=0; i<locs.length; i++) { HdfsBlockLocation loc = locs[i]; String bpid = loc.getLocatedBlock().getBlock().getBlockPoolId(); Block block = loc.getLocatedBlock().getBlock().getLocalBlock(); ExtendedBlock extBlock = new ExtendedBlock(bpid, block); FileInputStream blockInputStream = null; FileChannel blockChannel = null; try { blockInputStream = (FileInputStream)fsd.getBlockInputStream(extBlock, 0); blockChannel = blockInputStream.getChannel(); sizes[i] = blockChannel.size(); } finally { IOUtils.cleanup(LOG, blockChannel, blockInputStream); } } return sizes; } private void testCacheAndUncacheBlock() throws Exception { LOG.info("beginning testCacheAndUncacheBlock"); final int NUM_BLOCKS = 5; DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); assertEquals(0, fsd.getNumBlocksCached()); // Write a test file final Path testFile = new Path("/testCacheBlock"); final long testFileLen = BLOCK_SIZE*NUM_BLOCKS; DFSTestUtil.createFile(fs, testFile, testFileLen, (short)1, 0xABBAl); // Get the details of the written file HdfsBlockLocation[] locs = (HdfsBlockLocation[])fs.getFileBlockLocations(testFile, 0, testFileLen); assertEquals("Unexpected number of blocks", NUM_BLOCKS, locs.length); final long[] blockSizes = getBlockSizes(locs); // Check initial state final long cacheCapacity = fsd.getCacheCapacity(); long cacheUsed = fsd.getCacheUsed(); long current = 0; assertEquals("Unexpected cache capacity", CACHE_CAPACITY, cacheCapacity); assertEquals("Unexpected amount of cache used", current, cacheUsed); MetricsRecordBuilder dnMetrics; long numCacheCommands = 0; long numUncacheCommands = 0; // Cache each block in succession, checking each time for (int i=0; i<NUM_BLOCKS; i++) { setHeartbeatResponse(cacheBlock(locs[i])); current = DFSTestUtil.verifyExpectedCacheUsage( current + blockSizes[i], i + 1, fsd); dnMetrics = getMetrics(dn.getMetrics().name()); long cmds = MetricsAsserts.getLongCounter("BlocksCached", dnMetrics); assertTrue("Expected more cache requests from the NN (" + cmds + " <= " + numCacheCommands + ")", cmds > numCacheCommands); numCacheCommands = cmds; } // Uncache each block in succession, again checking each time for (int i=0; i<NUM_BLOCKS; i++) { setHeartbeatResponse(uncacheBlock(locs[i])); current = DFSTestUtil. verifyExpectedCacheUsage(current - blockSizes[i], NUM_BLOCKS - 1 - i, fsd); dnMetrics = getMetrics(dn.getMetrics().name()); long cmds = MetricsAsserts.getLongCounter("BlocksUncached", dnMetrics); assertTrue("Expected more uncache requests from the NN", cmds > numUncacheCommands); numUncacheCommands = cmds; } LOG.info("finishing testCacheAndUncacheBlock"); } @Test(timeout=600000) public void testCacheAndUncacheBlockSimple() throws Exception { testCacheAndUncacheBlock(); } /** * Run testCacheAndUncacheBlock with some failures injected into the mlock * call. This tests the ability of the NameNode to resend commands. */ @Test(timeout=600000) public void testCacheAndUncacheBlockWithRetries() throws Exception { // We don't have to save the previous cacheManipulator // because it will be reinstalled by the @After function. NativeIO.POSIX.setCacheManipulator(new NoMlockCacheManipulator() { private final Set<String> seenIdentifiers = new HashSet<String>(); @Override public void mlock(String identifier, ByteBuffer mmap, long length) throws IOException { if (seenIdentifiers.contains(identifier)) { // mlock succeeds the second time. LOG.info("mlocking " + identifier); return; } seenIdentifiers.add(identifier); throw new IOException("injecting IOException during mlock of " + identifier); } }); testCacheAndUncacheBlock(); } @Test(timeout=600000) public void testFilesExceedMaxLockedMemory() throws Exception { LOG.info("beginning testFilesExceedMaxLockedMemory"); // Create some test files that will exceed total cache capacity final int numFiles = 5; final long fileSize = CACHE_CAPACITY / (numFiles-1); final Path[] testFiles = new Path[numFiles]; final HdfsBlockLocation[][] fileLocs = new HdfsBlockLocation[numFiles][]; final long[] fileSizes = new long[numFiles]; for (int i=0; i<numFiles; i++) { testFiles[i] = new Path("/testFilesExceedMaxLockedMemory-" + i); DFSTestUtil.createFile(fs, testFiles[i], fileSize, (short)1, 0xDFAl); fileLocs[i] = (HdfsBlockLocation[])fs.getFileBlockLocations( testFiles[i], 0, fileSize); // Get the file size (sum of blocks) long[] sizes = getBlockSizes(fileLocs[i]); for (int j=0; j<sizes.length; j++) { fileSizes[i] += sizes[j]; } } // Cache the first n-1 files long total = 0; DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); for (int i=0; i<numFiles-1; i++) { setHeartbeatResponse(cacheBlocks(fileLocs[i])); total = DFSTestUtil.verifyExpectedCacheUsage( rounder.round(total + fileSizes[i]), 4 * (i + 1), fsd); } // nth file should hit a capacity exception final LogVerificationAppender appender = new LogVerificationAppender(); final Logger logger = Logger.getRootLogger(); logger.addAppender(appender); setHeartbeatResponse(cacheBlocks(fileLocs[numFiles-1])); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { int lines = appender.countLinesWithMessage( "more bytes in the cache: " + DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY); return lines > 0; } }, 500, 30000); // Also check the metrics for the failure assertTrue("Expected more than 0 failed cache attempts", fsd.getNumBlocksFailedToCache() > 0); // Uncache the n-1 files int curCachedBlocks = 16; for (int i=0; i<numFiles-1; i++) { setHeartbeatResponse(uncacheBlocks(fileLocs[i])); long uncachedBytes = rounder.round(fileSizes[i]); total -= uncachedBytes; curCachedBlocks -= uncachedBytes / BLOCK_SIZE; DFSTestUtil.verifyExpectedCacheUsage(total, curCachedBlocks, fsd); } LOG.info("finishing testFilesExceedMaxLockedMemory"); } @Test(timeout=600000) public void testUncachingBlocksBeforeCachingFinishes() throws Exception { LOG.info("beginning testUncachingBlocksBeforeCachingFinishes"); final int NUM_BLOCKS = 5; DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); // Write a test file final Path testFile = new Path("/testCacheBlock"); final long testFileLen = BLOCK_SIZE*NUM_BLOCKS; DFSTestUtil.createFile(fs, testFile, testFileLen, (short)1, 0xABBAl); // Get the details of the written file HdfsBlockLocation[] locs = (HdfsBlockLocation[])fs.getFileBlockLocations(testFile, 0, testFileLen); assertEquals("Unexpected number of blocks", NUM_BLOCKS, locs.length); final long[] blockSizes = getBlockSizes(locs); // Check initial state final long cacheCapacity = fsd.getCacheCapacity(); long cacheUsed = fsd.getCacheUsed(); long current = 0; assertEquals("Unexpected cache capacity", CACHE_CAPACITY, cacheCapacity); assertEquals("Unexpected amount of cache used", current, cacheUsed); NativeIO.POSIX.setCacheManipulator(new NoMlockCacheManipulator() { @Override public void mlock(String identifier, ByteBuffer mmap, long length) throws IOException { LOG.info("An mlock operation is starting on " + identifier); try { Thread.sleep(3000); } catch (InterruptedException e) { Assert.fail(); } } }); // Starting caching each block in succession. The usedBytes amount // should increase, even though caching doesn't complete on any of them. for (int i=0; i<NUM_BLOCKS; i++) { setHeartbeatResponse(cacheBlock(locs[i])); current = DFSTestUtil.verifyExpectedCacheUsage( current + blockSizes[i], i + 1, fsd); } setHeartbeatResponse(new DatanodeCommand[] { getResponse(locs, DatanodeProtocol.DNA_UNCACHE) }); // wait until all caching jobs are finished cancelling. current = DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); LOG.info("finishing testUncachingBlocksBeforeCachingFinishes"); } @Test(timeout=60000) public void testUncacheUnknownBlock() throws Exception { // Create a file Path fileName = new Path("/testUncacheUnknownBlock"); int fileLen = 4096; DFSTestUtil.createFile(fs, fileName, fileLen, (short)1, 0xFDFD); HdfsBlockLocation[] locs = (HdfsBlockLocation[])fs.getFileBlockLocations( fileName, 0, fileLen); // Try to uncache it without caching it first setHeartbeatResponse(uncacheBlocks(locs)); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { return fsd.getNumBlocksFailedToUncache() > 0; } }, 100, 10000); } @Test(timeout=60000) public void testPageRounder() throws Exception { // Write a small file Path fileName = new Path("/testPageRounder"); final int smallBlocks = 512; // This should be smaller than the page size assertTrue("Page size should be greater than smallBlocks!", PAGE_SIZE > smallBlocks); final int numBlocks = 5; final int fileLen = smallBlocks * numBlocks; FSDataOutputStream out = fs.create(fileName, false, 4096, (short)1, smallBlocks); out.write(new byte[fileLen]); out.close(); HdfsBlockLocation[] locs = (HdfsBlockLocation[])fs.getFileBlockLocations( fileName, 0, fileLen); // Cache the file and check the sizes match the page size setHeartbeatResponse(cacheBlocks(locs)); DFSTestUtil.verifyExpectedCacheUsage(PAGE_SIZE * numBlocks, numBlocks, fsd); // Uncache and check that it decrements by the page size too setHeartbeatResponse(uncacheBlocks(locs)); DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); } @Test(timeout=60000) public void testUncacheQuiesces() throws Exception { // Create a file Path fileName = new Path("/testUncacheQuiesces"); int fileLen = 4096; DFSTestUtil.createFile(fs, fileName, fileLen, (short)1, 0xFDFD); // Cache it DistributedFileSystem dfs = cluster.getFileSystem(); dfs.addCachePool(new CachePoolInfo("pool")); dfs.addCacheDirective(new CacheDirectiveInfo.Builder() .setPool("pool").setPath(fileName).setReplication((short)3).build()); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { MetricsRecordBuilder dnMetrics = getMetrics(dn.getMetrics().name()); long blocksCached = MetricsAsserts.getLongCounter("BlocksCached", dnMetrics); return blocksCached > 0; } }, 1000, 30000); // Uncache it dfs.removeCacheDirective(1); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { MetricsRecordBuilder dnMetrics = getMetrics(dn.getMetrics().name()); long blocksUncached = MetricsAsserts.getLongCounter("BlocksUncached", dnMetrics); return blocksUncached > 0; } }, 1000, 30000); // Make sure that no additional messages were sent Thread.sleep(10000); MetricsRecordBuilder dnMetrics = getMetrics(dn.getMetrics().name()); MetricsAsserts.assertCounter("BlocksCached", 1l, dnMetrics); MetricsAsserts.assertCounter("BlocksUncached", 1l, dnMetrics); } @Test(timeout=60000) public void testReCacheAfterUncache() throws Exception { final int TOTAL_BLOCKS_PER_CACHE = Ints.checkedCast(CACHE_CAPACITY / BLOCK_SIZE); BlockReaderTestUtil.enableHdfsCachingTracing(); Assert.assertEquals(0, CACHE_CAPACITY % BLOCK_SIZE); // Create a small file final Path SMALL_FILE = new Path("/smallFile"); DFSTestUtil.createFile(fs, SMALL_FILE, BLOCK_SIZE, (short)1, 0xcafe); // Create a file that will take up the whole cache final Path BIG_FILE = new Path("/bigFile"); DFSTestUtil.createFile(fs, BIG_FILE, TOTAL_BLOCKS_PER_CACHE * BLOCK_SIZE, (short)1, 0xbeef); final DistributedFileSystem dfs = cluster.getFileSystem(); dfs.addCachePool(new CachePoolInfo("pool")); final long bigCacheDirectiveId = dfs.addCacheDirective(new CacheDirectiveInfo.Builder() .setPool("pool").setPath(BIG_FILE).setReplication((short)1).build()); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { MetricsRecordBuilder dnMetrics = getMetrics(dn.getMetrics().name()); long blocksCached = MetricsAsserts.getLongCounter("BlocksCached", dnMetrics); if (blocksCached != TOTAL_BLOCKS_PER_CACHE) { LOG.info("waiting for " + TOTAL_BLOCKS_PER_CACHE + " to " + "be cached. Right now only " + blocksCached + " blocks are cached."); return false; } LOG.info(TOTAL_BLOCKS_PER_CACHE + " blocks are now cached."); return true; } }, 1000, 30000); // Try to cache a smaller file. It should fail. final long shortCacheDirectiveId = dfs.addCacheDirective(new CacheDirectiveInfo.Builder() .setPool("pool").setPath(SMALL_FILE).setReplication((short)1).build()); Thread.sleep(10000); MetricsRecordBuilder dnMetrics = getMetrics(dn.getMetrics().name()); Assert.assertEquals(TOTAL_BLOCKS_PER_CACHE, MetricsAsserts.getLongCounter("BlocksCached", dnMetrics)); // Uncache the big file and verify that the small file can now be // cached (regression test for HDFS-6107) dfs.removeCacheDirective(bigCacheDirectiveId); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { RemoteIterator<CacheDirectiveEntry> iter; try { iter = dfs.listCacheDirectives( new CacheDirectiveInfo.Builder().build()); CacheDirectiveEntry entry; do { entry = iter.next(); } while (entry.getInfo().getId() != shortCacheDirectiveId); if (entry.getStats().getFilesCached() != 1) { LOG.info("waiting for directive " + shortCacheDirectiveId + " to be cached. stats = " + entry.getStats()); return false; } LOG.info("directive " + shortCacheDirectiveId + " has been cached."); } catch (IOException e) { Assert.fail("unexpected exception" + e.toString()); } return true; } }, 1000, 30000); dfs.removeCacheDirective(shortCacheDirectiveId); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.backup.impl; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.BACKUP_ATTEMPTS_PAUSE_MS_KEY; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.BACKUP_MAX_ATTEMPTS_KEY; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.DEFAULT_BACKUP_ATTEMPTS_PAUSE_MS; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.DEFAULT_BACKUP_MAX_ATTEMPTS; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.JOB_NAME_CONF_KEY; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.BackupCopyJob; import org.apache.hadoop.hbase.backup.BackupInfo; import org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase; import org.apache.hadoop.hbase.backup.BackupInfo.BackupState; import org.apache.hadoop.hbase.backup.BackupRequest; import org.apache.hadoop.hbase.backup.BackupRestoreFactory; import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Full table backup implementation * */ @InterfaceAudience.Private public class FullTableBackupClient extends TableBackupClient { private static final Logger LOG = LoggerFactory.getLogger(FullTableBackupClient.class); public FullTableBackupClient() { } public FullTableBackupClient(final Connection conn, final String backupId, BackupRequest request) throws IOException { super(conn, backupId, request); } /** * Do snapshot copy. * @param backupInfo backup info * @throws Exception exception */ protected void snapshotCopy(BackupInfo backupInfo) throws Exception { LOG.info("Snapshot copy is starting."); // set overall backup phase: snapshot_copy backupInfo.setPhase(BackupPhase.SNAPSHOTCOPY); // call ExportSnapshot to copy files based on hbase snapshot for backup // ExportSnapshot only support single snapshot export, need loop for multiple tables case BackupCopyJob copyService = BackupRestoreFactory.getBackupCopyJob(conf); // number of snapshots matches number of tables float numOfSnapshots = backupInfo.getSnapshotNames().size(); LOG.debug("There are " + (int) numOfSnapshots + " snapshots to be copied."); for (TableName table : backupInfo.getTables()) { // Currently we simply set the sub copy tasks by counting the table snapshot number, we can // calculate the real files' size for the percentage in the future. // backupCopier.setSubTaskPercntgInWholeTask(1f / numOfSnapshots); int res; String[] args = new String[4]; args[0] = "-snapshot"; args[1] = backupInfo.getSnapshotName(table); args[2] = "-copy-to"; args[3] = backupInfo.getTableBackupDir(table); String jobname = "Full-Backup_" + backupInfo.getBackupId() + "_" + table.getNameAsString(); if (LOG.isDebugEnabled()) { LOG.debug("Setting snapshot copy job name to : " + jobname); } conf.set(JOB_NAME_CONF_KEY, jobname); LOG.debug("Copy snapshot " + args[1] + " to " + args[3]); res = copyService.copy(backupInfo, backupManager, conf, BackupType.FULL, args); // if one snapshot export failed, do not continue for remained snapshots if (res != 0) { LOG.error("Exporting Snapshot " + args[1] + " failed with return code: " + res + "."); throw new IOException("Failed of exporting snapshot " + args[1] + " to " + args[3] + " with reason code " + res); } conf.unset(JOB_NAME_CONF_KEY); LOG.info("Snapshot copy " + args[1] + " finished."); } } /** * Backup request execution. * * @throws IOException if the execution of the backup fails */ @Override public void execute() throws IOException { try (Admin admin = conn.getAdmin()) { // Begin BACKUP beginBackup(backupManager, backupInfo); String savedStartCode; boolean firstBackup; // do snapshot for full table backup savedStartCode = backupManager.readBackupStartCode(); firstBackup = savedStartCode == null || Long.parseLong(savedStartCode) == 0L; if (firstBackup) { // This is our first backup. Let's put some marker to system table so that we can hold the // logs while we do the backup. backupManager.writeBackupStartCode(0L); } // We roll log here before we do the snapshot. It is possible there is duplicate data // in the log that is already in the snapshot. But if we do it after the snapshot, we // could have data loss. // A better approach is to do the roll log on each RS in the same global procedure as // the snapshot. LOG.info("Execute roll log procedure for full backup ..."); Map<String, String> props = new HashMap<>(); props.put("backupRoot", backupInfo.getBackupRootDir()); admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE, LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props); newTimestamps = backupManager.readRegionServerLastLogRollResult(); if (firstBackup) { // Updates registered log files // We record ALL old WAL files as registered, because // this is a first full backup in the system and these // files are not needed for next incremental backup List<String> logFiles = BackupUtils.getWALFilesOlderThan(conf, newTimestamps); backupManager.recordWALFiles(logFiles); } // SNAPSHOT_TABLES: backupInfo.setPhase(BackupPhase.SNAPSHOT); for (TableName tableName : tableList) { String snapshotName = "snapshot_" + Long.toString(EnvironmentEdgeManager.currentTime()) + "_" + tableName.getNamespaceAsString() + "_" + tableName.getQualifierAsString(); snapshotTable(admin, tableName, snapshotName); backupInfo.setSnapshotName(tableName, snapshotName); } // SNAPSHOT_COPY: // do snapshot copy LOG.debug("snapshot copy for " + backupId); snapshotCopy(backupInfo); // Updates incremental backup table set backupManager.addIncrementalBackupTableSet(backupInfo.getTables()); // BACKUP_COMPLETE: // set overall backup status: complete. Here we make sure to complete the backup. // After this checkpoint, even if entering cancel process, will let the backup finished backupInfo.setState(BackupState.COMPLETE); // The table list in backupInfo is good for both full backup and incremental backup. // For incremental backup, it contains the incremental backup table set. backupManager.writeRegionServerLogTimestamp(backupInfo.getTables(), newTimestamps); HashMap<TableName, HashMap<String, Long>> newTableSetTimestampMap = backupManager.readLogTimestampMap(); Long newStartCode = BackupUtils.getMinValue(BackupUtils .getRSLogTimestampMins(newTableSetTimestampMap)); backupManager.writeBackupStartCode(newStartCode); // backup complete completeBackup(conn, backupInfo, backupManager, BackupType.FULL, conf); } catch (Exception e) { failBackup(conn, backupInfo, backupManager, e, "Unexpected BackupException : ", BackupType.FULL, conf); throw new IOException(e); } } protected void snapshotTable(Admin admin, TableName tableName, String snapshotName) throws IOException { int maxAttempts = conf.getInt(BACKUP_MAX_ATTEMPTS_KEY, DEFAULT_BACKUP_MAX_ATTEMPTS); int pause = conf.getInt(BACKUP_ATTEMPTS_PAUSE_MS_KEY, DEFAULT_BACKUP_ATTEMPTS_PAUSE_MS); int attempts = 0; while (attempts++ < maxAttempts) { try { admin.snapshot(snapshotName, tableName); return; } catch (IOException ee) { LOG.warn("Snapshot attempt " + attempts + " failed for table " + tableName + ", sleeping for " + pause + "ms", ee); if (attempts < maxAttempts) { try { Thread.sleep(pause); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } } } } throw new IOException("Failed to snapshot table "+ tableName); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/networksecurity/v1beta1/tls.proto package com.google.cloud.networksecurity.v1beta1; /** * * * <pre> * Specification of ValidationCA. Defines the mechanism to obtain the * Certificate Authority certificate to validate the peer certificate. * </pre> * * Protobuf type {@code google.cloud.networksecurity.v1beta1.ValidationCA} */ public final class ValidationCA extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.networksecurity.v1beta1.ValidationCA) ValidationCAOrBuilder { private static final long serialVersionUID = 0L; // Use ValidationCA.newBuilder() to construct. private ValidationCA(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ValidationCA() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ValidationCA(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ValidationCA( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.Builder subBuilder = null; if (typeCase_ == 2) { subBuilder = ((com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_).toBuilder(); } type_ = input.readMessage( com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom((com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_); type_ = subBuilder.buildPartial(); } typeCase_ = 2; break; } case 26: { com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.Builder subBuilder = null; if (typeCase_ == 3) { subBuilder = ((com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_) .toBuilder(); } type_ = input.readMessage( com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom( (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_); type_ = subBuilder.buildPartial(); } typeCase_ = 3; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networksecurity.v1beta1.TlsProto .internal_static_google_cloud_networksecurity_v1beta1_ValidationCA_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networksecurity.v1beta1.TlsProto .internal_static_google_cloud_networksecurity_v1beta1_ValidationCA_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networksecurity.v1beta1.ValidationCA.class, com.google.cloud.networksecurity.v1beta1.ValidationCA.Builder.class); } private int typeCase_ = 0; private java.lang.Object type_; public enum TypeCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GRPC_ENDPOINT(2), CERTIFICATE_PROVIDER_INSTANCE(3), TYPE_NOT_SET(0); private final int value; private TypeCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TypeCase valueOf(int value) { return forNumber(value); } public static TypeCase forNumber(int value) { switch (value) { case 2: return GRPC_ENDPOINT; case 3: return CERTIFICATE_PROVIDER_INSTANCE; case 0: return TYPE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public TypeCase getTypeCase() { return TypeCase.forNumber(typeCase_); } public static final int GRPC_ENDPOINT_FIELD_NUMBER = 2; /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> * * @return Whether the grpcEndpoint field is set. */ @java.lang.Override public boolean hasGrpcEndpoint() { return typeCase_ == 2; } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> * * @return The grpcEndpoint. */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.GrpcEndpoint getGrpcEndpoint() { if (typeCase_ == 2) { return (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_; } return com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance(); } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.GrpcEndpointOrBuilder getGrpcEndpointOrBuilder() { if (typeCase_ == 2) { return (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_; } return com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance(); } public static final int CERTIFICATE_PROVIDER_INSTANCE_FIELD_NUMBER = 3; /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> * * @return Whether the certificateProviderInstance field is set. */ @java.lang.Override public boolean hasCertificateProviderInstance() { return typeCase_ == 3; } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> * * @return The certificateProviderInstance. */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance getCertificateProviderInstance() { if (typeCase_ == 3) { return (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_; } return com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance(); } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.CertificateProviderInstanceOrBuilder getCertificateProviderInstanceOrBuilder() { if (typeCase_ == 3) { return (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_; } return com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (typeCase_ == 2) { output.writeMessage(2, (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_); } if (typeCase_ == 3) { output.writeMessage( 3, (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (typeCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_); } if (typeCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 3, (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.networksecurity.v1beta1.ValidationCA)) { return super.equals(obj); } com.google.cloud.networksecurity.v1beta1.ValidationCA other = (com.google.cloud.networksecurity.v1beta1.ValidationCA) obj; if (!getTypeCase().equals(other.getTypeCase())) return false; switch (typeCase_) { case 2: if (!getGrpcEndpoint().equals(other.getGrpcEndpoint())) return false; break; case 3: if (!getCertificateProviderInstance().equals(other.getCertificateProviderInstance())) return false; break; case 0: default: } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (typeCase_) { case 2: hash = (37 * hash) + GRPC_ENDPOINT_FIELD_NUMBER; hash = (53 * hash) + getGrpcEndpoint().hashCode(); break; case 3: hash = (37 * hash) + CERTIFICATE_PROVIDER_INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getCertificateProviderInstance().hashCode(); break; case 0: default: } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.networksecurity.v1beta1.ValidationCA prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specification of ValidationCA. Defines the mechanism to obtain the * Certificate Authority certificate to validate the peer certificate. * </pre> * * Protobuf type {@code google.cloud.networksecurity.v1beta1.ValidationCA} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.networksecurity.v1beta1.ValidationCA) com.google.cloud.networksecurity.v1beta1.ValidationCAOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networksecurity.v1beta1.TlsProto .internal_static_google_cloud_networksecurity_v1beta1_ValidationCA_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networksecurity.v1beta1.TlsProto .internal_static_google_cloud_networksecurity_v1beta1_ValidationCA_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networksecurity.v1beta1.ValidationCA.class, com.google.cloud.networksecurity.v1beta1.ValidationCA.Builder.class); } // Construct using com.google.cloud.networksecurity.v1beta1.ValidationCA.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); typeCase_ = 0; type_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.networksecurity.v1beta1.TlsProto .internal_static_google_cloud_networksecurity_v1beta1_ValidationCA_descriptor; } @java.lang.Override public com.google.cloud.networksecurity.v1beta1.ValidationCA getDefaultInstanceForType() { return com.google.cloud.networksecurity.v1beta1.ValidationCA.getDefaultInstance(); } @java.lang.Override public com.google.cloud.networksecurity.v1beta1.ValidationCA build() { com.google.cloud.networksecurity.v1beta1.ValidationCA result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.networksecurity.v1beta1.ValidationCA buildPartial() { com.google.cloud.networksecurity.v1beta1.ValidationCA result = new com.google.cloud.networksecurity.v1beta1.ValidationCA(this); if (typeCase_ == 2) { if (grpcEndpointBuilder_ == null) { result.type_ = type_; } else { result.type_ = grpcEndpointBuilder_.build(); } } if (typeCase_ == 3) { if (certificateProviderInstanceBuilder_ == null) { result.type_ = type_; } else { result.type_ = certificateProviderInstanceBuilder_.build(); } } result.typeCase_ = typeCase_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.networksecurity.v1beta1.ValidationCA) { return mergeFrom((com.google.cloud.networksecurity.v1beta1.ValidationCA) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.networksecurity.v1beta1.ValidationCA other) { if (other == com.google.cloud.networksecurity.v1beta1.ValidationCA.getDefaultInstance()) return this; switch (other.getTypeCase()) { case GRPC_ENDPOINT: { mergeGrpcEndpoint(other.getGrpcEndpoint()); break; } case CERTIFICATE_PROVIDER_INSTANCE: { mergeCertificateProviderInstance(other.getCertificateProviderInstance()); break; } case TYPE_NOT_SET: { break; } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.networksecurity.v1beta1.ValidationCA parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.networksecurity.v1beta1.ValidationCA) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int typeCase_ = 0; private java.lang.Object type_; public TypeCase getTypeCase() { return TypeCase.forNumber(typeCase_); } public Builder clearType() { typeCase_ = 0; type_ = null; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networksecurity.v1beta1.GrpcEndpoint, com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.Builder, com.google.cloud.networksecurity.v1beta1.GrpcEndpointOrBuilder> grpcEndpointBuilder_; /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> * * @return Whether the grpcEndpoint field is set. */ @java.lang.Override public boolean hasGrpcEndpoint() { return typeCase_ == 2; } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> * * @return The grpcEndpoint. */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.GrpcEndpoint getGrpcEndpoint() { if (grpcEndpointBuilder_ == null) { if (typeCase_ == 2) { return (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_; } return com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance(); } else { if (typeCase_ == 2) { return grpcEndpointBuilder_.getMessage(); } return com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance(); } } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ public Builder setGrpcEndpoint(com.google.cloud.networksecurity.v1beta1.GrpcEndpoint value) { if (grpcEndpointBuilder_ == null) { if (value == null) { throw new NullPointerException(); } type_ = value; onChanged(); } else { grpcEndpointBuilder_.setMessage(value); } typeCase_ = 2; return this; } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ public Builder setGrpcEndpoint( com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.Builder builderForValue) { if (grpcEndpointBuilder_ == null) { type_ = builderForValue.build(); onChanged(); } else { grpcEndpointBuilder_.setMessage(builderForValue.build()); } typeCase_ = 2; return this; } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ public Builder mergeGrpcEndpoint(com.google.cloud.networksecurity.v1beta1.GrpcEndpoint value) { if (grpcEndpointBuilder_ == null) { if (typeCase_ == 2 && type_ != com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance()) { type_ = com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.newBuilder( (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_) .mergeFrom(value) .buildPartial(); } else { type_ = value; } onChanged(); } else { if (typeCase_ == 2) { grpcEndpointBuilder_.mergeFrom(value); } grpcEndpointBuilder_.setMessage(value); } typeCase_ = 2; return this; } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ public Builder clearGrpcEndpoint() { if (grpcEndpointBuilder_ == null) { if (typeCase_ == 2) { typeCase_ = 0; type_ = null; onChanged(); } } else { if (typeCase_ == 2) { typeCase_ = 0; type_ = null; } grpcEndpointBuilder_.clear(); } return this; } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ public com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.Builder getGrpcEndpointBuilder() { return getGrpcEndpointFieldBuilder().getBuilder(); } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.GrpcEndpointOrBuilder getGrpcEndpointOrBuilder() { if ((typeCase_ == 2) && (grpcEndpointBuilder_ != null)) { return grpcEndpointBuilder_.getMessageOrBuilder(); } else { if (typeCase_ == 2) { return (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_; } return com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance(); } } /** * * * <pre> * gRPC specific configuration to access the gRPC server to * obtain the CA certificate. * </pre> * * <code>.google.cloud.networksecurity.v1beta1.GrpcEndpoint grpc_endpoint = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networksecurity.v1beta1.GrpcEndpoint, com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.Builder, com.google.cloud.networksecurity.v1beta1.GrpcEndpointOrBuilder> getGrpcEndpointFieldBuilder() { if (grpcEndpointBuilder_ == null) { if (!(typeCase_ == 2)) { type_ = com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.getDefaultInstance(); } grpcEndpointBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networksecurity.v1beta1.GrpcEndpoint, com.google.cloud.networksecurity.v1beta1.GrpcEndpoint.Builder, com.google.cloud.networksecurity.v1beta1.GrpcEndpointOrBuilder>( (com.google.cloud.networksecurity.v1beta1.GrpcEndpoint) type_, getParentForChildren(), isClean()); type_ = null; } typeCase_ = 2; onChanged(); ; return grpcEndpointBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance, com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.Builder, com.google.cloud.networksecurity.v1beta1.CertificateProviderInstanceOrBuilder> certificateProviderInstanceBuilder_; /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> * * @return Whether the certificateProviderInstance field is set. */ @java.lang.Override public boolean hasCertificateProviderInstance() { return typeCase_ == 3; } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> * * @return The certificateProviderInstance. */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance getCertificateProviderInstance() { if (certificateProviderInstanceBuilder_ == null) { if (typeCase_ == 3) { return (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_; } return com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance(); } else { if (typeCase_ == 3) { return certificateProviderInstanceBuilder_.getMessage(); } return com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance(); } } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ public Builder setCertificateProviderInstance( com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance value) { if (certificateProviderInstanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } type_ = value; onChanged(); } else { certificateProviderInstanceBuilder_.setMessage(value); } typeCase_ = 3; return this; } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ public Builder setCertificateProviderInstance( com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.Builder builderForValue) { if (certificateProviderInstanceBuilder_ == null) { type_ = builderForValue.build(); onChanged(); } else { certificateProviderInstanceBuilder_.setMessage(builderForValue.build()); } typeCase_ = 3; return this; } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ public Builder mergeCertificateProviderInstance( com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance value) { if (certificateProviderInstanceBuilder_ == null) { if (typeCase_ == 3 && type_ != com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance()) { type_ = com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.newBuilder( (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_) .mergeFrom(value) .buildPartial(); } else { type_ = value; } onChanged(); } else { if (typeCase_ == 3) { certificateProviderInstanceBuilder_.mergeFrom(value); } certificateProviderInstanceBuilder_.setMessage(value); } typeCase_ = 3; return this; } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ public Builder clearCertificateProviderInstance() { if (certificateProviderInstanceBuilder_ == null) { if (typeCase_ == 3) { typeCase_ = 0; type_ = null; onChanged(); } } else { if (typeCase_ == 3) { typeCase_ = 0; type_ = null; } certificateProviderInstanceBuilder_.clear(); } return this; } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ public com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.Builder getCertificateProviderInstanceBuilder() { return getCertificateProviderInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ @java.lang.Override public com.google.cloud.networksecurity.v1beta1.CertificateProviderInstanceOrBuilder getCertificateProviderInstanceOrBuilder() { if ((typeCase_ == 3) && (certificateProviderInstanceBuilder_ != null)) { return certificateProviderInstanceBuilder_.getMessageOrBuilder(); } else { if (typeCase_ == 3) { return (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_; } return com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance(); } } /** * * * <pre> * The certificate provider instance specification that will be passed to * the data plane, which will be used to load necessary credential * information. * </pre> * * <code> * .google.cloud.networksecurity.v1beta1.CertificateProviderInstance certificate_provider_instance = 3; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance, com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.Builder, com.google.cloud.networksecurity.v1beta1.CertificateProviderInstanceOrBuilder> getCertificateProviderInstanceFieldBuilder() { if (certificateProviderInstanceBuilder_ == null) { if (!(typeCase_ == 3)) { type_ = com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance .getDefaultInstance(); } certificateProviderInstanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance, com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance.Builder, com.google.cloud.networksecurity.v1beta1.CertificateProviderInstanceOrBuilder>( (com.google.cloud.networksecurity.v1beta1.CertificateProviderInstance) type_, getParentForChildren(), isClean()); type_ = null; } typeCase_ = 3; onChanged(); ; return certificateProviderInstanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.networksecurity.v1beta1.ValidationCA) } // @@protoc_insertion_point(class_scope:google.cloud.networksecurity.v1beta1.ValidationCA) private static final com.google.cloud.networksecurity.v1beta1.ValidationCA DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.networksecurity.v1beta1.ValidationCA(); } public static com.google.cloud.networksecurity.v1beta1.ValidationCA getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ValidationCA> PARSER = new com.google.protobuf.AbstractParser<ValidationCA>() { @java.lang.Override public ValidationCA parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ValidationCA(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ValidationCA> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ValidationCA> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.networksecurity.v1beta1.ValidationCA getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package cz.metacentrum.perun.webgui.tabs.memberstabs; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.event.dom.client.*; import com.google.gwt.regexp.shared.RegExp; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.cellview.client.CellTable; import com.google.gwt.user.client.ui.*; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.UiElements; import cz.metacentrum.perun.webgui.client.localization.ButtonTranslation; import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu; import cz.metacentrum.perun.webgui.client.resources.*; import cz.metacentrum.perun.webgui.json.GetEntityById; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.json.JsonUtils; import cz.metacentrum.perun.webgui.json.membersManager.CreateSpecificMember; import cz.metacentrum.perun.webgui.json.membersManager.FindUsersInVo; import cz.metacentrum.perun.webgui.json.membersManager.ValidateMemberAsync; import cz.metacentrum.perun.webgui.json.usersManager.CreatePassword; import cz.metacentrum.perun.webgui.json.usersManager.GenerateAccount; import cz.metacentrum.perun.webgui.json.usersManager.IsLoginAvailable; import cz.metacentrum.perun.webgui.json.usersManager.SetLogin; import cz.metacentrum.perun.webgui.model.*; import cz.metacentrum.perun.webgui.tabs.MembersTabs; import cz.metacentrum.perun.webgui.tabs.TabItem; import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl; import cz.metacentrum.perun.webgui.tabs.UrlMapper; import cz.metacentrum.perun.webgui.widgets.*; import cz.metacentrum.perun.webgui.widgets.CustomButton; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * Create service member in VO. * * @author Pavel Zlamal <256627@mail.muni.cz> */ public class CreateServiceMemberInVoTabItem implements TabItem, TabItemWithUrl { /** * vo id */ private int voId; private VirtualOrganization vo; /** * Perun web session */ private PerunWebSession session = PerunWebSession.getInstance(); /** * Content widget - should be simple panel */ private SimplePanel contentWidget = new SimplePanel(); /** * Title widget */ private Label titleWidget = new Label("Loading VO"); private String searchString = ""; /** * Constructor * * @param voId ID of VO into which member should be added */ public CreateServiceMemberInVoTabItem(int voId){ this.voId = voId; JsonCallbackEvents events = new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso){ vo = jso.cast(); } }; GetEntityById callback = new GetEntityById(PerunEntity.VIRTUAL_ORGANIZATION, voId, events); callback.retrieveData(); } public boolean isPrepared(){ return !(vo == null); } @Override public boolean isRefreshParentOnClose() { return false; } @Override public void onClose() { } /** * Constructor * * @param vo ID of VO into which member should be added */ public CreateServiceMemberInVoTabItem(VirtualOrganization vo){ this.vo = vo; this.voId = vo.getId(); } public Widget draw() { titleWidget.setText("Create service member"); final TabItem tab = this; // draw the main tab final VerticalPanel mainTab = new VerticalPanel(); mainTab.setSize("100%", "100%"); final ExtendedTextBox serviceUserName = new ExtendedTextBox(); final ExtendedTextBox serviceUserEmail = new ExtendedTextBox(); final ExtendedTextBox serviceUserLogin = new ExtendedTextBox(); final ExtendedPasswordBox serviceUserPassword = new ExtendedPasswordBox(); final ExtendedPasswordBox serviceUserPassword2 = new ExtendedPasswordBox(); final ListBox namespace = new ListBox(); final ExtendedTextBox certDN = new ExtendedTextBox(); final ExtendedTextBox cacertDN = new ExtendedTextBox(); final String serviceType = "SERVICE"; serviceUserPassword.getTextBox().setWidth("200px"); serviceUserPassword2.getTextBox().setWidth("200px"); final ExtendedTextBox.TextBoxValidator nameValidator = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (serviceUserName.getTextBox().getValue().trim().isEmpty()) { serviceUserName.setError("Name can't be empty!"); return false; } serviceUserName.setOk(); return true; } }; serviceUserName.setValidator(nameValidator); final ExtendedTextBox.TextBoxValidator loginValidator = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (namespace.getSelectedIndex() == 0) { // do not validate if namespace is not selected serviceUserLogin.getTextBox().setValue(null); serviceUserLogin.setOk(); return true; } if (serviceUserLogin.getTextBox().getValue().trim().isEmpty()) { serviceUserLogin.setError("Login can't be empty!"); return false; } RegExp regExp = RegExp.compile(Utils.LOGIN_VALUE_MATCHER); boolean match = regExp.test(serviceUserLogin.getTextBox().getValue().trim()); if (!match) { serviceUserLogin.setError("Invalid format!"); return false; } if (serviceUserLogin.isProcessing() || serviceUserLogin.isHardError()) { // keep original message return false; } serviceUserLogin.setOk(); return true; } }; serviceUserLogin.setValidator(loginValidator); final ExtendedTextBox.TextBoxValidator emailValidator = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (!JsonUtils.isValidEmail(serviceUserEmail.getTextBox().getValue().trim())) { serviceUserEmail.setError("Wrong email format!"); return false; } serviceUserEmail.setOk(); return true; } }; serviceUserEmail.setValidator(emailValidator); final ExtendedTextBox.TextBoxValidator validator = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (serviceUserPassword.getTextBox().getValue().trim().equals("")) { serviceUserPassword.setError("Password can't be empty!"); return false; } // einfra check if ("einfra".equals(namespace.getSelectedValue())) { RegExp regExp2 = RegExp.compile("^[\\x20-\\x7E]{1,}$"); if(regExp2.exec(serviceUserPassword.getTextBox().getValue()) == null){ serviceUserPassword.setError("Password <b>can`t contain accented characters (diacritics)</b> or non-printing and control characters!"); return false; } // check on login in password if login is longer than 2 chars // TODO - check for name/surname too String pass = serviceUserPassword.getTextBox().getValue(); String login = serviceUserLogin.getTextBox().getValue(); if (login.length() > 2) { if (Utils.normalizeString(pass).contains(Utils.normalizeString(login)) || Utils.normalizeString(pass).contains(Utils.normalizeString(Utils.reverseString((login))))) { serviceUserPassword.setError("Password <b>can't contain login, name or surname</b>, not even backwards!"); return false; } } // Check that password contains at least 3 of 4 character groups RegExp regExpDigit = RegExp.compile("^.*[0-9].*$"); RegExp regExpLower = RegExp.compile("^.*[a-z].*$"); RegExp regExpUpper = RegExp.compile("^.*[A-Z].*$"); RegExp regExpSpec = RegExp.compile("^.*[\\x20-\\x2F\\x3A-\\x40\\x5B-\\x60\\x7B-\\x7E].*$"); // FIXME - are those correct printable specific chars? int matchCounter = 0; if (regExpDigit.exec(serviceUserPassword.getTextBox().getValue()) != null) matchCounter++; if (regExpLower.exec(serviceUserPassword.getTextBox().getValue()) != null) matchCounter++; if (regExpUpper.exec(serviceUserPassword.getTextBox().getValue()) != null) matchCounter++; if (regExpSpec.exec(serviceUserPassword.getTextBox().getValue()) != null) matchCounter++; if(matchCounter < 3){ serviceUserPassword.setError("Password must consist of <b>at least 3 of 4</b> character groups<ul><li>lower-case letters</li><li>upper-case letters</li><li>digits</li><li>special characters</li></ul>"); return false; } // check length if (serviceUserPassword.getTextBox().getValue().length() < 10) { serviceUserPassword.setError("Password must be <b>at least 10 characters</b> long!"); return false; } } if (!serviceUserPassword.getTextBox().getValue().equals(serviceUserPassword2.getTextBox().getValue())) { serviceUserPassword.setOk(); serviceUserPassword2.setError("Password in both textboxes must be the same!"); return false; } serviceUserPassword.setOk(); return true; } }; serviceUserPassword.setValidator(validator); final ExtendedTextBox.TextBoxValidator validator2 = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (!serviceUserPassword2.getTextBox().getValue().equals(serviceUserPassword.getTextBox().getValue())) { serviceUserPassword2.setError("Password in both textboxes must be the same!"); return false; } else { serviceUserPassword2.setOk(); return true; } } }; serviceUserPassword2.setValidator(validator2); /* final ExtendedTextBox.TextBoxValidator validator = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (serviceUserPassword.getTextBox().getValue().trim().isEmpty()) { serviceUserPassword.setError("Password can't be empty !"); return false; } else if (!serviceUserPassword.getTextBox().getValue().equals(serviceUserPassword2.getTextBox().getValue())) { serviceUserPassword.setError("Password in both textboxes must be the same !"); return false; } else { serviceUserPassword.setOk(); serviceUserPassword2.setOk(); return true; } } }; serviceUserPassword.setValidator(validator); final ExtendedTextBox.TextBoxValidator validator2 = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if (serviceUserPassword2.getTextBox().getValue().trim().isEmpty()) { serviceUserPassword2.setError("Password can't be empty !"); return false; } else if (!serviceUserPassword2.getTextBox().getValue().equals(serviceUserPassword.getTextBox().getValue())) { serviceUserPassword2.setError("Password in both textboxes must be the same !"); return false; } else { serviceUserPassword2.setOk(); serviceUserPassword.setOk(); return true; } } }; serviceUserPassword2.setValidator(validator2); */ final ExtendedTextBox.TextBoxValidator certDNValidator = new ExtendedTextBox.TextBoxValidator() { @Override public boolean validateTextBox() { if ((certDN.getTextBox().getValue().trim().isEmpty() && cacertDN.getTextBox().getValue().trim().isEmpty()) || (!certDN.getTextBox().getValue().trim().isEmpty() && !cacertDN.getTextBox().getValue().trim().isEmpty())) { certDN.setOk(); cacertDN.setOk(); return true; } else { if (certDN.getTextBox().getValue().trim().isEmpty()) { certDN.setError("Value can't be empty!"); } if (cacertDN.getTextBox().getValue().trim().isEmpty()) { cacertDN.setError("Value can't be empty!"); } } return false; } }; certDN.setValidator(certDNValidator); cacertDN.setValidator(certDNValidator); // make value empty namespace.addItem("Not selected", ""); for (String name : Utils.getSupportedPasswordNamespaces()) { namespace.addItem(name.toUpperCase(), name); } final FlexTable layout = new FlexTable(); layout.setCellPadding(5); layout.setHTML(0, 0, "<h3>1. Create service identity</h3>"); layout.getFlexCellFormatter().setColSpan(0, 0, 3); layout.setHTML(1, 0, "<strong>Member's name: </strong>"); layout.setWidget(1, 1, serviceUserName); layout.setHTML(2, 0, "<strong>Member's email: </strong>"); layout.setWidget(2, 1, serviceUserEmail); layout.setHTML(3, 0, "<strong>Namespace: </strong>"); layout.setWidget(3, 1, namespace); layout.setHTML(4, 0, "<strong>Login: </strong>"); layout.setWidget(4, 1, serviceUserLogin); final Label label = new Label("You will be assigned with available login"); label.setVisible(false); layout.setWidget(5, 0, label); layout.getFlexCellFormatter().setStyleName(5, 0, "inputFormInlineComment"); layout.getFlexCellFormatter().setColSpan(5, 0, 2); layout.setHTML(6, 0, "<strong>Subject DN: </strong>"); layout.setWidget(6, 1, certDN); layout.setHTML(7, 0, "<strong>Issuer DN: </strong>"); layout.setWidget(7, 1, cacertDN); final FlexTable firstTabLayout = new FlexTable(); firstTabLayout.setSize("100%", "100%"); firstTabLayout.setVisible(false); final AddRemoveItemsTable<User> itemsTable = new AddRemoveItemsTable<User>(true); itemsTable.addItem(session.getUser()); final VerticalPanel secondTabPanel = new VerticalPanel(); secondTabPanel.setSize("100%", "100%"); secondTabPanel.setVisible(false); mainTab.add(layout); mainTab.add(firstTabLayout); mainTab.add(secondTabPanel); // disable login by default (first option) serviceUserLogin.getTextBox().setEnabled(false); final CustomButton cb = new CustomButton("Continue", SmallIcons.INSTANCE.addIcon(), new ClickHandler() { public void onClick(ClickEvent clickEvent) { // check if (!namespace.getSelectedValue().equals("mu") && namespace.getSelectedIndex() != 0 && !loginValidator.validateTextBox()) return; if (!nameValidator.validateTextBox()) return; if (!emailValidator.validateTextBox()) return; if (!certDNValidator.validateTextBox()) return; // change to lager tab session.getTabManager().changeStyleOfInnerTab(true); // first tab panel firstTabLayout.setHTML(0, 0, "<h3>2. Associate real users</h3>"); firstTabLayout.getFlexCellFormatter().setColSpan(0, 0, 2); layout.setVisible(false); firstTabLayout.setVisible(true); final FindUsersInVo callback = new FindUsersInVo(); // Service users can't own another Service or Guest (Sponsored) account. callback.hideService(true); // HORIZONTAL MENU TabMenu tabMenu = new TabMenu(); // get the table final CellTable<User> table = callback.getTable(); // search textbox ExtendedTextBox searchBox = tabMenu.addSearchWidget(new PerunSearchEvent() { @Override public void searchFor(String text) { callback.searchFor(text, voId); searchString = text; } }, ButtonTranslation.INSTANCE.searchUsers()); final CustomButton cb = new CustomButton("Continue", SmallIcons.INSTANCE.arrowRightIcon()); cb.addClickHandler(new ClickHandler() { public void onClick(ClickEvent clickEvent) { // check if (itemsTable.getList().isEmpty()) { new Confirm("No user associated",new HTML("You must associate at least one real user to service member."), true).show(); return; } // create member + user CreateSpecificMember request = new CreateSpecificMember(JsonCallbackEvents.disableButtonEvents(cb, new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso){ final Member member = jso.cast(); for (User u : itemsTable.getList()) { if (u.getId() == session.getUser().getId()) { // set local authz if one of associated users is us session.addEditableUser(member.getUserId()); } } if (namespace.getSelectedIndex() == 0) { // we didn't set login, hence skip password setting session.getTabManager().closeTab(tab, true); return; } // change to small tab session.getTabManager().changeStyleOfInnerTab(false); secondTabPanel.add(new HTML("<h3>3.Set password for: "+serviceUserLogin.getTextBox().getValue().trim()+"</h3>")); final CustomButton button = new CustomButton("Set password", SmallIcons.INSTANCE.keyIcon()); button.addClickHandler(new ClickHandler() { public void onClick(ClickEvent clickEvent) { if (!validator.validateTextBox() || !validator2.validateTextBox()) { // one of input boxes for passwords is wrong return; } final String namespaceValue = namespace.getSelectedValue(); if ("mu".equals(namespaceValue)) { final GenerateAccount req = new GenerateAccount(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents() { public void onFinished(JavaScriptObject jso) { BasicOverlayType basic = jso.cast(); final String login = basic.getCustomProperty("urn:perun:user:attribute-def:def:login-namespace:mu"); SetLogin setLogin = new SetLogin(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents(){ @Override public void onFinished(JavaScriptObject jso) { // VALIDATE PASSWORD - SET EXT SOURCES AND VALIDATE MEMBER CreatePassword req = new CreatePassword(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents(){ @Override public void onFinished(JavaScriptObject jso) { // validate member when all kerberos logins are set ValidateMemberAsync req2 = new ValidateMemberAsync(JsonCallbackEvents.closeTabDisableButtonEvents(button, tab, true)); req2.validateMemberAsync(member); } })); // validate login returned from account generation req.validatePassword(member.getUserId(), login, namespaceValue); // show assigned login UiElements.generateInfo("Assigned login", "You were assigned with login <b>"+login+"</b> in namespace MU."); } @Override public void onError(PerunError error) { UiElements.generateError(error, "Saving login failed", "You were assigned with login <b>"+login+"</b> in namespace MU but saving it to user failed. <b>Please copy your login and contact support at <a href=\"mailto:"+Utils.perunReportEmailAddress()+"\">"+Utils.perunReportEmailAddress()+"</a>.</b>"); // validate member when all logins are set ValidateMemberAsync req2 = new ValidateMemberAsync(JsonCallbackEvents.closeTabDisableButtonEvents(button, tab, true)); req2.validateMemberAsync(member); } })); setLogin.setLogin(member.getUserId(), "mu", login); } })); final Map<String, String> params = new HashMap<String, String>(); GetEntityById get = new GetEntityById(PerunEntity.RICH_MEMBER, member.getId(), JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents(){ @Override public void onFinished(JavaScriptObject jso) { RichMember rm = jso.cast(); params.put("urn:perun:user:attribute-def:core:firstName", rm.getUser().getFirstName()); params.put("urn:perun:user:attribute-def:core:lastName", rm.getUser().getLastName()); params.put("urn:perun:member:attribute-def:def:mail", serviceUserEmail.getTextBox().getValue().trim()); req.generateAccount(namespaceValue, serviceUserPassword.getTextBox().getValue(), params); } })); get.retrieveData(); } else { // create password which sets also user ext sources CreatePassword req = new CreatePassword(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso) { // validate member when all kerberos logins are set ValidateMemberAsync req2 = new ValidateMemberAsync(JsonCallbackEvents.closeTabDisableButtonEvents(button, tab, true)); req2.validateMemberAsync(member); } })); req.createPassword(member.getUserId(), serviceUserLogin.getTextBox().getValue().trim(), namespaceValue, serviceUserPassword.getTextBox().getValue()); } } }); final CustomButton skipButton = new CustomButton("Skip", SmallIcons.INSTANCE.arrowRightIcon()); skipButton.addClickHandler(new ClickHandler() { public void onClick(ClickEvent clickEvent) { final String namespaceValue = namespace.getSelectedValue(); if ("mu".equals(namespaceValue)) { final GenerateAccount req = new GenerateAccount(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents() { @Override public void onFinished(JavaScriptObject jso) { BasicOverlayType basic = jso.cast(); final String login = basic.getCustomProperty("urn:perun:user:attribute-def:def:login-namespace:mu"); SetLogin setLogin = new SetLogin(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents() { @Override public void onFinished(JavaScriptObject jso) { // VALIDATE PASSWORD - SET EXT SOURCES AND VALIDATE MEMBER CreatePassword req = new CreatePassword(JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents(){ @Override public void onFinished(JavaScriptObject jso) { // validate member when all kerberos logins are set ValidateMemberAsync req2 = new ValidateMemberAsync(JsonCallbackEvents.closeTabDisableButtonEvents(button, tab, true)); req2.validateMemberAsync(member); } })); // validate login returned from account generation req.validatePassword(member.getUserId(), login, namespaceValue); // show assigned login UiElements.generateInfo("Assigned login", "You were assigned with login <b>" + login + "</b> in namespace MU."); } @Override public void onError(PerunError error) { // validate member when all logins are set ValidateMemberAsync req2 = new ValidateMemberAsync(JsonCallbackEvents.closeTabDisableButtonEvents(button, tab, true)); req2.validateMemberAsync(member); } })); setLogin.setLogin(member.getUserId(), "mu", login); } })); final Map<String, String> params = new HashMap<String, String>(); GetEntityById get = new GetEntityById(PerunEntity.RICH_MEMBER, member.getId(), JsonCallbackEvents.disableButtonEvents(button, new JsonCallbackEvents() { @Override public void onFinished(JavaScriptObject jso) { RichMember rm = jso.cast(); params.put("urn:perun:user:attribute-def:core:firstName", rm.getUser().getFirstName()); params.put("urn:perun:user:attribute-def:core:lastName", rm.getUser().getLastName()); params.put("urn:perun:member:attribute-def:def:mail", serviceUserEmail.getTextBox().getValue().trim()); req.generateAccount(namespaceValue, null, params); } })); get.retrieveData(); } else { CreatePassword req = new CreatePassword(JsonCallbackEvents.disableButtonEvents(skipButton, new JsonCallbackEvents() { public void onFinished(JavaScriptObject jso) { // validate member when all kerberos logins are set ValidateMemberAsync req2 = new ValidateMemberAsync(JsonCallbackEvents.closeTabDisableButtonEvents(skipButton, tab, true)); req2.validateMemberAsync(member); } })); // set empty password for service member if "skipped" req.createRandomPassword(member.getUserId(), serviceUserLogin.getTextBox().getValue().trim(), namespaceValue); } } }); FlexTable ft = new FlexTable(); ft.setStyleName("inputFormFlexTable"); ft.setWidth("400px"); ft.setHTML(0, 0, "Password:"); ft.setWidget(0, 1, serviceUserPassword); ft.getFlexCellFormatter().setStyleName(0, 0, "itemName"); ft.setHTML(1, 0, "Re-type&nbsp;password:"); ft.setWidget(1, 1, serviceUserPassword2); ft.getFlexCellFormatter().setStyleName(1, 0, "itemName"); if ("einfra".equals(namespace.getSelectedValue())) { ft.setHTML(2, 0, "Password must <ul><li>contain only printing (non-accented) characters<li>be at least 10 characters long<li>consist of at least 3 of 4 character groups<ul><li>lower-case letters<li>upper-case letters<li>digits<li>special characters</ul></ul>"); } else { ft.setHTML(2, 0, "Please <b>avoid using accented characters</b>. It might not be supported by all backend components and services."); } ft.getFlexCellFormatter().setColSpan(2, 0, 2); ft.getCellFormatter().setStyleName(2, 0,"inputFormInlineComment"); ft.setWidget(3, 1, skipButton); ft.getFlexCellFormatter().addStyleName(3, 1, "align-right"); ft.setWidget(4, 1, button); ft.getFlexCellFormatter().addStyleName(4, 1, "align-right"); secondTabPanel.add(ft); firstTabLayout.setVisible(false); // hide 2nd panel secondTabPanel.setVisible(true); // show 3rd panel }; })); request.createMember(voId, serviceUserName.getTextBox().getValue().trim(), serviceUserEmail.getTextBox().getValue().trim(), itemsTable.getList(), namespace.getValue(namespace.getSelectedIndex()), serviceUserLogin.getTextBox().getValue().trim(), certDN.getTextBox().getValue().trim(), cacertDN.getTextBox().getValue().trim(), serviceType ); } }); // we have ourselves already assigned //cb.setEnabled(false); CustomButton button = TabMenu.getPredefinedButton(ButtonType.ADD, "Add selected users to service member"); button.addClickHandler(new ClickHandler() { public void onClick(ClickEvent clickEvent) { ArrayList<User> list = callback.getTableSelectedList(); if (UiElements.cantSaveEmptyListDialogBox(list)) { // skip self ArrayList<User> list2 = new ArrayList<User>(); for (User user : list) { if (user != null && user.getId() != session.getUser().getId()) { list2.add(user); } } itemsTable.addItems(list2); cb.setEnabled(true); callback.clearTableSelectedSet(); } } }); button.setEnabled(false); tabMenu.addWidget(button); JsonUtils.addTableManagedButton(callback, table, button); // add finish button to menu tabMenu.addWidget(cb); // if some text has been searched before if(!searchString.equals("")) { searchBox.getTextBox().setText(searchString); callback.searchFor(searchString, voId); } final ScrollPanel sp = new ScrollPanel(table); table.addStyleName("perun-table"); sp.addStyleName("perun-tableScrollPanel"); session.getUiElements().resizeSmallTabPanel(sp, 350, tab); firstTabLayout.setWidget(1, 0,tabMenu); firstTabLayout.setWidget(2, 0, sp); firstTabLayout.setHTML(1, 1,"<h3>To be associated:</h3>"); firstTabLayout.setWidget(2, 1, itemsTable); firstTabLayout.getFlexCellFormatter().setWidth(2, 0, "75%"); firstTabLayout.getFlexCellFormatter().setWidth(2, 1, "25%"); firstTabLayout.getFlexCellFormatter().setVerticalAlignment(2, 1, HasVerticalAlignment.ALIGN_TOP); firstTabLayout.getFlexCellFormatter().setVerticalAlignment(2, 0, HasVerticalAlignment.ALIGN_TOP); // actions when added items or removed items itemsTable.setEvents(new AddRemoveItemsTable.HandleItemsAction<User>() { @Override public void onAdd(User object) { cb.setEnabled(true); } @Override public void onRemove(User object) { if (object.equals(session.getUser())) { itemsTable.addItem(object); UiElements.generateInfo("Can't remove yourself", "<p>You can't remove yourself yet. You wouldn't be able to finish service member configuration. Please remove yourself afterwards."); } if (itemsTable.getList().isEmpty()) { cb.setEnabled(false); } } }); } }); // check login availability serviceUserLogin.getTextBox().addKeyUpHandler(new KeyUpHandler() { @Override public void onKeyUp(KeyUpEvent keyUpEvent) { if (keyUpEvent.isDownArrow() || keyUpEvent.isUpArrow() || keyUpEvent.isLeftArrow() || keyUpEvent.isRightArrow()) { // do not trigger when no text input return; } final String login = serviceUserLogin.getTextBox().getValue().trim(); final String loginNamespace = namespace.getValue(namespace.getSelectedIndex()); // trigger new validation on checked input or if previously was hard error if ((!login.isEmpty() && RegExp.compile(Utils.LOGIN_VALUE_MATCHER).test(login)) || serviceUserLogin.isHardError()) { new IsLoginAvailable(loginNamespace, login, new JsonCallbackEvents(){ @Override public void onFinished(JavaScriptObject jso) { // UPDATE RESULT ONLY IF CONTENT OF LOGIN BOX IS SAME AS ON CALLBACK START if (serviceUserLogin.getTextBox().getValue().trim().equals(login)) { BasicOverlayType bo = jso.cast(); serviceUserLogin.setProcessing(false); if (!bo.getBoolean()) { serviceUserLogin.setHardError("Login is already in use!"); } else { serviceUserLogin.removeHardError(); loginValidator.validateTextBox(); } } } @Override public void onLoadingStart(){ if (serviceUserLogin.getTextBox().getValue().trim().equals(login)) { serviceUserLogin.removeHardError(); serviceUserLogin.setProcessing(true); } } @Override public void onError(PerunError error) { // response is relevant to current value if (serviceUserLogin.getTextBox().getValue().trim().equals(login)) { if ("InvalidLoginException".equalsIgnoreCase(error.getName())) { serviceUserLogin.setProcessing(false); String text = error.getErrorInfo(); text = text.split(":", 2)[1]; text = (text == null || text.isEmpty()) ? error.getErrorInfo() : text; serviceUserLogin.setHardError(text); } else { // generic error serviceUserLogin.setProcessing(false); serviceUserLogin.setHardError("Unable to check if login is available!"); } } } }).retrieveData(); } } }); namespace.addChangeHandler(new ChangeHandler() { @Override public void onChange(ChangeEvent changeEvent) { if (namespace.getSelectedIndex() == 0) { // do not set login serviceUserLogin.getTextBox().setEnabled(false); serviceUserLogin.getTextBox().setValue(null); loginValidator.validateTextBox(); label.setVisible(false); } else if (namespace.getSelectedValue().equals("mu")) { serviceUserLogin.getTextBox().setEnabled(false); label.setVisible(true); } else { serviceUserLogin.getTextBox().setEnabled(true); label.setVisible(false); } if (namespace.getSelectedIndex() != 0) { // do not check login if not desired to set final String login = serviceUserLogin.getTextBox().getValue().trim(); final String loginNamespace = namespace.getValue(namespace.getSelectedIndex()); if ((!login.isEmpty() && RegExp.compile(Utils.LOGIN_VALUE_MATCHER).test(login)) || serviceUserLogin.isHardError()) { new IsLoginAvailable(loginNamespace, login, new JsonCallbackEvents() { @Override public void onFinished(JavaScriptObject jso) { // UPDATE RESULT ONLY IF CONTENT OF LOGIN BOX IS SAME AS ON CALLBACK START if (serviceUserLogin.getTextBox().getValue().trim().equals(login)) { serviceUserLogin.setProcessing(false); BasicOverlayType bo = jso.cast(); if (!bo.getBoolean()) { serviceUserLogin.setHardError("Login is already in use!"); } else { serviceUserLogin.removeHardError(); loginValidator.validateTextBox(); } } } @Override public void onLoadingStart() { if (serviceUserLogin.getTextBox().getValue().trim().equals(login)) { serviceUserLogin.removeHardError(); serviceUserLogin.setProcessing(true); loginValidator.validateTextBox(); } } @Override public void onError(PerunError error) { // response is relevant to current value if (serviceUserLogin.getTextBox().getValue().trim().equals(login)) { if ("InvalidLoginException".equalsIgnoreCase(error.getName())) { serviceUserLogin.setProcessing(false); String text = error.getErrorInfo(); text = text.split(":", 2)[1]; text = (text == null || text.isEmpty()) ? error.getErrorInfo() : text; serviceUserLogin.setHardError(text); } else { // generic error serviceUserLogin.setProcessing(false); serviceUserLogin.setHardError("Unable to check if login is available!"); } } } }).retrieveData(); } } } }); if (session.isPerunAdmin()) { layout.setWidget(9, 0, cb); layout.getFlexCellFormatter().setHorizontalAlignment(9, 0, HasHorizontalAlignment.ALIGN_RIGHT); layout.getFlexCellFormatter().setColSpan(9, 0, 2); } else { layout.setWidget(8, 0, cb); layout.getFlexCellFormatter().setHorizontalAlignment(8, 0, HasHorizontalAlignment.ALIGN_RIGHT); layout.getFlexCellFormatter().setColSpan(8, 0, 2); } this.contentWidget.setWidget(mainTab); return getWidget(); } public Widget getWidget() { return this.contentWidget; } public Widget getTitle() { return this.titleWidget; } public ImageResource getIcon() { return SmallIcons.INSTANCE.addIcon(); } @Override public int hashCode() { final int prime = 877; int result = 1; result = prime * result + voId; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CreateServiceMemberInVoTabItem other = (CreateServiceMemberInVoTabItem) obj; if (voId != other.voId) return false; return true; } public boolean multipleInstancesEnabled() { return false; } public void open() { session.getUiElements().getMenu().openMenu(MainMenu.VO_ADMIN); if(vo != null){ session.setActiveVo(vo); return; } session.setActiveVoId(voId); } public boolean isAuthorized() { if (session.isVoAdmin(voId)) { return true; } else { return false; } } public final static String URL = "add-service-member"; public String getUrl() { return URL; } public String getUrlWithParameters() { return MembersTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?vo=" + voId; } static public CreateServiceMemberInVoTabItem load(Map<String, String> parameters) { int gid = Integer.parseInt(parameters.get("vo")); return new CreateServiceMemberInVoTabItem(gid); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.MockConsumer; import org.apache.kafka.clients.consumer.OffsetResetStrategy; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.TimeoutException; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.errors.TaskMigratedException; import org.apache.kafka.streams.processor.StateRestoreListener; import org.apache.kafka.test.MockRestoreCallback; import org.apache.kafka.test.MockStateRestoreListener; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; import org.easymock.Mock; import org.easymock.MockType; import org.hamcrest.CoreMatchers; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_BATCH; import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_END; import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_START; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(EasyMockRunner.class) public class StoreChangelogReaderTest { @Mock(type = MockType.NICE) private RestoringTasks active; @Mock(type = MockType.NICE) private StreamTask task; private final MockStateRestoreListener callback = new MockStateRestoreListener(); private final CompositeRestoreListener restoreListener = new CompositeRestoreListener(callback); private final MockConsumer<byte[], byte[]> consumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); private final StateRestoreListener stateRestoreListener = new MockStateRestoreListener(); private final TopicPartition topicPartition = new TopicPartition("topic", 0); private final LogContext logContext = new LogContext("test-reader "); private final StoreChangelogReader changelogReader = new StoreChangelogReader(consumer, stateRestoreListener, logContext); @Before public void setUp() { restoreListener.setUserRestoreListener(stateRestoreListener); } @Test public void shouldRequestTopicsAndHandleTimeoutException() { final AtomicBoolean functionCalled = new AtomicBoolean(false); final MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(OffsetResetStrategy.EARLIEST) { @Override public Map<String, List<PartitionInfo>> listTopics() { functionCalled.set(true); throw new TimeoutException("KABOOM!"); } }; final StoreChangelogReader changelogReader = new StoreChangelogReader(consumer, stateRestoreListener, logContext); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName")); changelogReader.restore(active); assertTrue(functionCalled.get()); } @Test public void shouldThrowExceptionIfConsumerHasCurrentSubscription() { final StateRestorer mockRestorer = EasyMock.mock(StateRestorer.class); mockRestorer.setUserRestoreListener(stateRestoreListener); expect(mockRestorer.partition()).andReturn(new TopicPartition("sometopic", 0)).andReturn(new TopicPartition("sometopic", 0)); EasyMock.replay(mockRestorer); changelogReader.register(mockRestorer); consumer.subscribe(Collections.singleton("sometopic")); try { changelogReader.restore(active); fail("Should have thrown IllegalStateException"); } catch (final StreamsException expected) { // ok } } @Test public void shouldRestoreAllMessagesFromBeginningWhenCheckpointNull() { final int messages = 10; setupConsumer(messages, topicPartition); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName")); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(messages)); } @Test public void shouldRestoreMessagesFromCheckpoint() { final int messages = 10; setupConsumer(messages, topicPartition); changelogReader.register(new StateRestorer(topicPartition, restoreListener, 5L, Long.MAX_VALUE, true, "storeName")); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(5)); } @Test public void shouldClearAssignmentAtEndOfRestore() { final int messages = 1; setupConsumer(messages, topicPartition); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName")); changelogReader.restore(active); assertThat(consumer.assignment(), equalTo(Collections.<TopicPartition>emptySet())); } @Test public void shouldRestoreToLimitWhenSupplied() { setupConsumer(10, topicPartition); final StateRestorer restorer = new StateRestorer(topicPartition, restoreListener, null, 3, true, "storeName"); changelogReader.register(restorer); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(3)); assertThat(restorer.restoredOffset(), equalTo(3L)); } @Test public void shouldRestoreMultipleStores() { final TopicPartition one = new TopicPartition("one", 0); final TopicPartition two = new TopicPartition("two", 0); final MockRestoreCallback callbackOne = new MockRestoreCallback(); final MockRestoreCallback callbackTwo = new MockRestoreCallback(); final CompositeRestoreListener restoreListener1 = new CompositeRestoreListener(callbackOne); final CompositeRestoreListener restoreListener2 = new CompositeRestoreListener(callbackTwo); setupConsumer(10, topicPartition); setupConsumer(5, one); setupConsumer(3, two); changelogReader .register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName1")); changelogReader.register(new StateRestorer(one, restoreListener1, null, Long.MAX_VALUE, true, "storeName2")); changelogReader.register(new StateRestorer(two, restoreListener2, null, Long.MAX_VALUE, true, "storeName3")); expect(active.restoringTaskFor(one)).andReturn(null); expect(active.restoringTaskFor(two)).andReturn(null); replay(active); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(10)); assertThat(callbackOne.restored.size(), equalTo(5)); assertThat(callbackTwo.restored.size(), equalTo(3)); } @Test public void shouldRestoreAndNotifyMultipleStores() throws Exception { final TopicPartition one = new TopicPartition("one", 0); final TopicPartition two = new TopicPartition("two", 0); final MockStateRestoreListener callbackOne = new MockStateRestoreListener(); final MockStateRestoreListener callbackTwo = new MockStateRestoreListener(); final CompositeRestoreListener restoreListener1 = new CompositeRestoreListener(callbackOne); final CompositeRestoreListener restoreListener2 = new CompositeRestoreListener(callbackTwo); setupConsumer(10, topicPartition); setupConsumer(5, one); setupConsumer(3, two); changelogReader .register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName1")); changelogReader.register(new StateRestorer(one, restoreListener1, null, Long.MAX_VALUE, true, "storeName2")); changelogReader.register(new StateRestorer(two, restoreListener2, null, Long.MAX_VALUE, true, "storeName3")); expect(active.restoringTaskFor(one)).andReturn(null); expect(active.restoringTaskFor(two)).andReturn(null); replay(active); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(10)); assertThat(callbackOne.restored.size(), equalTo(5)); assertThat(callbackTwo.restored.size(), equalTo(3)); assertAllCallbackStatesExecuted(callback, "storeName1"); assertCorrectOffsetsReportedByListener(callback, 0L, 10L, 10L); assertAllCallbackStatesExecuted(callbackOne, "storeName2"); assertCorrectOffsetsReportedByListener(callbackOne, 0L, 5L, 5L); assertAllCallbackStatesExecuted(callbackTwo, "storeName3"); assertCorrectOffsetsReportedByListener(callbackTwo, 0L, 3L, 3L); } private void assertAllCallbackStatesExecuted(final MockStateRestoreListener restoreListener, final String storeName) { assertThat(restoreListener.storeNameCalledStates.get(RESTORE_START), equalTo(storeName)); assertThat(restoreListener.storeNameCalledStates.get(RESTORE_BATCH), equalTo(storeName)); assertThat(restoreListener.storeNameCalledStates.get(RESTORE_END), equalTo(storeName)); } private void assertCorrectOffsetsReportedByListener(final MockStateRestoreListener restoreListener, final long startOffset, final long batchOffset, final long endOffset) { assertThat(restoreListener.restoreStartOffset, equalTo(startOffset)); assertThat(restoreListener.restoredBatchOffset, equalTo(batchOffset)); assertThat(restoreListener.restoreEndOffset, equalTo(endOffset)); } @Test public void shouldNotRestoreAnythingWhenPartitionIsEmpty() { final StateRestorer restorer = new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"); setupConsumer(0, topicPartition); changelogReader.register(restorer); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(0)); assertThat(restorer.restoredOffset(), equalTo(0L)); } @Test public void shouldNotRestoreAnythingWhenCheckpointAtEndOffset() { final Long endOffset = 10L; setupConsumer(endOffset, topicPartition); final StateRestorer restorer = new StateRestorer(topicPartition, restoreListener, endOffset, Long.MAX_VALUE, true, "storeName"); changelogReader.register(restorer); changelogReader.restore(active); assertThat(callback.restored.size(), equalTo(0)); assertThat(restorer.restoredOffset(), equalTo(endOffset)); } @Test public void shouldReturnRestoredOffsetsForPersistentStores() { setupConsumer(10, topicPartition); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName")); changelogReader.restore(active); final Map<TopicPartition, Long> restoredOffsets = changelogReader.restoredOffsets(); assertThat(restoredOffsets, equalTo(Collections.singletonMap(topicPartition, 10L))); } @Test public void shouldNotReturnRestoredOffsetsForNonPersistentStore() { setupConsumer(10, topicPartition); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, false, "storeName")); changelogReader.restore(active); final Map<TopicPartition, Long> restoredOffsets = changelogReader.restoredOffsets(); assertThat(restoredOffsets, equalTo(Collections.<TopicPartition, Long>emptyMap())); } @Test public void shouldIgnoreNullKeysWhenRestoring() { assignPartition(3, topicPartition); final byte[] bytes = new byte[0]; consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), 0, bytes, bytes)); consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), 1, (byte[]) null, bytes)); consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), 2, bytes, bytes)); consumer.assign(Collections.singletonList(topicPartition)); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, false, "storeName")); changelogReader.restore(active); assertThat(callback.restored, CoreMatchers.equalTo(Utils.mkList(KeyValue.pair(bytes, bytes), KeyValue.pair(bytes, bytes)))); } @Test public void shouldCompleteImmediatelyWhenEndOffsetIs0() { final Collection<TopicPartition> expected = Collections.singleton(topicPartition); setupConsumer(0, topicPartition); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "store")); final Collection<TopicPartition> restored = changelogReader.restore(active); assertThat(restored, equalTo(expected)); } @Test public void shouldRestorePartitionsRegisteredPostInitialization() { final MockRestoreCallback callbackTwo = new MockRestoreCallback(); final CompositeRestoreListener restoreListener2 = new CompositeRestoreListener(callbackTwo); setupConsumer(1, topicPartition); consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 10L)); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, false, "storeName")); final TopicPartition postInitialization = new TopicPartition("other", 0); expect(active.restoringTaskFor(topicPartition)).andReturn(null); expect(active.restoringTaskFor(topicPartition)).andReturn(null); expect(active.restoringTaskFor(postInitialization)).andReturn(null); replay(active); assertTrue(changelogReader.restore(active).isEmpty()); addRecords(9, topicPartition, 1); setupConsumer(3, postInitialization); consumer.updateBeginningOffsets(Collections.singletonMap(postInitialization, 0L)); consumer.updateEndOffsets(Collections.singletonMap(postInitialization, 3L)); changelogReader.register(new StateRestorer(postInitialization, restoreListener2, null, Long.MAX_VALUE, false, "otherStore")); final Collection<TopicPartition> expected = Utils.mkSet(topicPartition, postInitialization); consumer.assign(expected); assertThat(changelogReader.restore(active), equalTo(expected)); assertThat(callback.restored.size(), equalTo(10)); assertThat(callbackTwo.restored.size(), equalTo(3)); } @Test public void shouldThrowTaskMigratedExceptionIfEndOffsetGetsExceededDuringRestore() { final int messages = 10; setupConsumer(messages, topicPartition); consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 5L)); changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName")); expect(active.restoringTaskFor(topicPartition)).andReturn(task); replay(active); try { changelogReader.restore(active); fail("Should have thrown TaskMigratedException"); } catch (final TaskMigratedException expected) { /* ignore */ } } private void setupConsumer(final long messages, final TopicPartition topicPartition) { assignPartition(messages, topicPartition); addRecords(messages, topicPartition, 0); consumer.assign(Collections.<TopicPartition>emptyList()); } private void addRecords(final long messages, final TopicPartition topicPartition, final int startingOffset) { for (int i = 0; i < messages; i++) { consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), startingOffset + i, new byte[0], new byte[0])); } } private void assignPartition(final long messages, final TopicPartition topicPartition) { consumer.updatePartitions(topicPartition.topic(), Collections.singletonList( new PartitionInfo(topicPartition.topic(), topicPartition.partition(), null, null, null))); consumer.updateBeginningOffsets(Collections.singletonMap(topicPartition, 0L)); consumer.updateEndOffsets(Collections.singletonMap(topicPartition, Math.max(0, messages))); consumer.assign(Collections.singletonList(topicPartition)); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.http.impl.io; import java.io.IOException; import java.io.OutputStream; import java.util.List; import org.apache.hc.core5.function.Supplier; import org.apache.hc.core5.http.FormattedHeader; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.StreamClosedException; import org.apache.hc.core5.http.io.SessionOutputBuffer; import org.apache.hc.core5.http.message.BasicLineFormatter; import org.apache.hc.core5.util.Args; import org.apache.hc.core5.util.CharArrayBuffer; /** * Implements chunked transfer coding. The content is sent in small chunks. * Entities transferred using this output stream can be of unlimited length. * Writes are buffered to an internal buffer (2048 default size). * <p> * Note that this class NEVER closes the underlying stream, even when close * gets called. Instead, the stream will be marked as closed and no further * output will be permitted. * * * @since 4.0 */ public class ChunkedOutputStream extends OutputStream { private final SessionOutputBuffer buffer; private final OutputStream outputStream; private final byte[] cache; private int cachePosition = 0; private boolean wroteLastChunk = false; private boolean closed = false; private final CharArrayBuffer lineBuffer; private final Supplier<List<? extends Header>> trailerSupplier; /** * Default constructor. * * @param buffer Session output buffer * @param outputStream Output stream * @param chunkSizeHint minimal chunk size hint * @param trailerSupplier Trailer supplier. May be {@code null} * * @since 5.0 */ public ChunkedOutputStream( final SessionOutputBuffer buffer, final OutputStream outputStream, final int chunkSizeHint, final Supplier<List<? extends Header>> trailerSupplier) { super(); this.buffer = Args.notNull(buffer, "Session output buffer"); this.outputStream = Args.notNull(outputStream, "Output stream"); this.cache = new byte[chunkSizeHint > 0 ? chunkSizeHint : 2048]; this.lineBuffer = new CharArrayBuffer(32); this.trailerSupplier = trailerSupplier; } /** * Constructor with no trailers. * * @param buffer Session output buffer * @param outputStream Output stream * @param chunkSizeHint minimal chunk size hint */ public ChunkedOutputStream(final SessionOutputBuffer buffer, final OutputStream outputStream, final int chunkSizeHint) { this(buffer, outputStream, chunkSizeHint, null); } /** * Writes the cache out onto the underlying stream */ private void flushCache() throws IOException { if (this.cachePosition > 0) { this.lineBuffer.clear(); this.lineBuffer.append(Integer.toHexString(this.cachePosition)); this.buffer.writeLine(this.lineBuffer, this.outputStream); this.buffer.write(this.cache, 0, this.cachePosition, this.outputStream); this.lineBuffer.clear(); this.buffer.writeLine(this.lineBuffer, this.outputStream); this.cachePosition = 0; } } /** * Writes the cache and bufferToAppend to the underlying stream * as one large chunk */ private void flushCacheWithAppend(final byte[] bufferToAppend, final int off, final int len) throws IOException { this.lineBuffer.clear(); this.lineBuffer.append(Integer.toHexString(this.cachePosition + len)); this.buffer.writeLine(this.lineBuffer, this.outputStream); this.buffer.write(this.cache, 0, this.cachePosition, this.outputStream); this.buffer.write(bufferToAppend, off, len, this.outputStream); this.lineBuffer.clear(); this.buffer.writeLine(this.lineBuffer, this.outputStream); this.cachePosition = 0; } private void writeClosingChunk() throws IOException { // Write the final chunk. this.lineBuffer.clear(); this.lineBuffer.append('0'); this.buffer.writeLine(this.lineBuffer, this.outputStream); writeTrailers(); this.lineBuffer.clear(); this.buffer.writeLine(this.lineBuffer, this.outputStream); } private void writeTrailers() throws IOException { final List<? extends Header> trailers = this.trailerSupplier != null ? this.trailerSupplier.get() : null; if (trailers != null) { for (int i = 0; i < trailers.size(); i++) { final Header header = trailers.get(i); if (header instanceof FormattedHeader) { final CharArrayBuffer chbuffer = ((FormattedHeader) header).getBuffer(); this.buffer.writeLine(chbuffer, this.outputStream); } else { this.lineBuffer.clear(); BasicLineFormatter.INSTANCE.formatHeader(this.lineBuffer, header); this.buffer.writeLine(this.lineBuffer, this.outputStream); } } } } // ----------------------------------------------------------- Public Methods /** * Must be called to ensure the internal cache is flushed and the closing * chunk is written. * @throws IOException in case of an I/O error */ public void finish() throws IOException { if (!this.wroteLastChunk) { flushCache(); writeClosingChunk(); this.wroteLastChunk = true; } } // -------------------------------------------- OutputStream Methods @Override public void write(final int b) throws IOException { if (this.closed) { throw new StreamClosedException(); } this.cache[this.cachePosition] = (byte) b; this.cachePosition++; if (this.cachePosition == this.cache.length) { flushCache(); } } /** * Writes the array. If the array does not fit within the buffer, it is * not split, but rather written out as one large chunk. */ @Override public void write(final byte[] b) throws IOException { write(b, 0, b.length); } /** * Writes the array. If the array does not fit within the buffer, it is * not split, but rather written out as one large chunk. */ @Override public void write(final byte[] src, final int off, final int len) throws IOException { if (this.closed) { throw new StreamClosedException(); } if (len >= this.cache.length - this.cachePosition) { flushCacheWithAppend(src, off, len); } else { System.arraycopy(src, off, cache, this.cachePosition, len); this.cachePosition += len; } } /** * Flushes the content buffer and the underlying stream. */ @Override public void flush() throws IOException { flushCache(); this.buffer.flush(this.outputStream); } /** * Finishes writing to the underlying stream, but does NOT close the underlying stream. */ @Override public void close() throws IOException { if (!this.closed) { this.closed = true; finish(); this.buffer.flush(this.outputStream); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.unit.core.postoffice.impl; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.filter.Filter; import org.apache.activemq.artemis.core.paging.PagingStore; import org.apache.activemq.artemis.core.paging.cursor.PageSubscription; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.server.Consumer; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.RoutingContext; import org.apache.activemq.artemis.core.server.ServerConsumer; import org.apache.activemq.artemis.core.server.impl.AckReason; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.utils.ReferenceCounter; import org.apache.activemq.artemis.utils.collections.LinkedListIterator; import org.apache.activemq.artemis.utils.critical.CriticalComponentImpl; import org.apache.activemq.artemis.utils.critical.EmptyCriticalAnalyzer; public class FakeQueue extends CriticalComponentImpl implements Queue { @Override public void setPurgeOnNoConsumers(boolean value) { } @Override public boolean isEnabled() { return false; } @Override public void setEnabled(boolean value) { } @Override public PagingStore getPagingStore() { return null; } @Override public int durableUp(Message message) { return 1; } @Override public int durableDown(Message message) { return 1; } @Override public void refUp(MessageReference messageReference) { } @Override public void refDown(MessageReference messageReference) { } @Override public int getConsumersBeforeDispatch() { return 0; } @Override public void setConsumersBeforeDispatch(int consumersBeforeDispatch) { } @Override public void removeAddress() throws Exception { } @Override public long getDelayBeforeDispatch() { return 0; } @Override public void setDelayBeforeDispatch(long delayBeforeDispatch) { } @Override public long getDispatchStartTime() { return 0; } @Override public boolean isDispatching() { return false; } @Override public void setDispatching(boolean dispatching) { } @Override public boolean allowsReferenceCallback() { return false; } @Override public boolean isExclusive() { // no-op return false; } @Override public void setExclusive(boolean value) { // no-op } @Override public boolean isLastValue() { // no-op return false; } @Override public SimpleString getLastValueKey() { return null; } @Override public boolean isNonDestructive() { return false; } @Override public void setNonDestructive(boolean nonDestructive) { } @Override public void setMaxConsumer(int maxConsumers) { } @Override public int getGroupBuckets() { return 0; } @Override public void setGroupBuckets(int groupBuckets) { } @Override public boolean isGroupRebalance() { return false; } @Override public void setGroupRebalance(boolean groupRebalance) { } @Override public boolean isGroupRebalancePauseDispatch() { return false; } @Override public void setGroupRebalancePauseDispatch(boolean groupRebalancePauseDisptach) { } @Override public SimpleString getGroupFirstKey() { return null; } @Override public void setGroupFirstKey(SimpleString groupFirstKey) { } @Override public boolean isConfigurationManaged() { return false; } @Override public void setConfigurationManaged(boolean configurationManaged) { } @Override public boolean isInternalQueue() { // no-op return false; } @Override public boolean sendToDeadLetterAddress(Transaction tx, MessageReference ref) throws Exception { return false; } @Override public void deleteQueue(boolean removeConsumers) throws Exception { } @Override public void unproposed(SimpleString groupID) { } @Override public void reloadPause(long recordID) { } @Override public void recheckRefCount(OperationContext context) { } @Override public boolean isPersistedPause() { return false; } @Override public int retryMessages(Filter filter) throws Exception { return 0; } @Override public void setConsumersRefCount(ReferenceCounter referenceCounter) { } @Override public void setInternalQueue(boolean internalQueue) { // no-op } @Override public long getAcknowledgeAttempts() { return 0; } @Override public void cancel(Transaction tx, MessageReference ref, boolean ignoreRedeliveryCheck) { // no-op } PageSubscription subs; @Override public boolean isDirectDeliver() { // no-op return false; } @Override public void close() { // no-op } public void forceCheckQueueSize() { // no-op } @Override public void reload(MessageReference ref) { // no-op } @Override public void pause(boolean persist) { } @Override public boolean flushExecutor() { return true; } @Override public void addHead(MessageReference ref, boolean scheduling) { // no-op } @Override public void addSorted(MessageReference ref, boolean scheduling) { } @Override public void addHead(List<MessageReference> ref, boolean scheduling) { // no-op } @Override public void addTail(MessageReference ref, boolean direct) { // no-op } @Override public void addTail(MessageReference ref) { // no-op } @Override public void resetAllIterators() { // no-op } private final SimpleString name; private final long id; private long messageCount; public FakeQueue(final SimpleString name) { this(name, 0); } public FakeQueue(final SimpleString name, final long id) { super(EmptyCriticalAnalyzer.getInstance(), 1); this.name = name; this.id = id; } @Override public void acknowledge(final MessageReference ref) throws Exception { // no-op } @Override public void acknowledge(final MessageReference ref, ServerConsumer consumer) throws Exception { // no-op } @Override public void acknowledge(MessageReference ref, AckReason reason, ServerConsumer consumer) throws Exception { // no-op } @Override public void acknowledge(final Transaction tx, final MessageReference ref) throws Exception { // no-op } @Override public void acknowledge(Transaction tx, MessageReference ref, AckReason reason, ServerConsumer consumer) throws Exception { // no-op } @Override public void addConsumer(final Consumer consumer) throws Exception { // no-op } @Override public void addLingerSession(String sessionId) { } @Override public void removeLingerSession(String sessionId) { } @Override public void addRedistributor(final long delay) { // no-op } @Override public void cancel(final MessageReference reference, final long timeBase, boolean sorted) throws Exception { // no-op } @Override public void cancel(final Transaction tx, final MessageReference ref) { // no-op } @Override public void cancelRedistributor() throws Exception { // no-op } @Override public boolean changeReferencePriority(final long messageID, final byte newPriority) throws Exception { // no-op return false; } @Override public int changeReferencesPriority(Filter filter, byte newPriority) throws Exception { // no-op return 0; } @Override public Pair<Boolean, Boolean> checkRedelivery(final MessageReference ref, final long timeBase, final boolean check) throws Exception { // no-op return new Pair<>(false, false); } @Override public int deleteAllReferences() throws Exception { // no-op return 0; } @Override public int deleteMatchingReferences(final Filter filter) throws Exception { // no-op return 0; } @Override public boolean deleteReference(final long messageID) throws Exception { // no-op return false; } @Override public void deliverAsync() { // no-op } @Override public void expire(final MessageReference ref) throws Exception { // no-op } @Override public void expire(final MessageReference ref, final ServerConsumer consumer) throws Exception { // no-op } @Override public boolean expireReference(final long messageID) throws Exception { // no-op return false; } @Override public void expireReferences() throws Exception { // no-op } @Override public int expireReferences(final Filter filter) throws Exception { // no-op return 0; } @Override public int getConsumerCount() { // no-op return 0; } @Override public long getConsumerRemovedTimestamp() { return 0; } @Override public void setRingSize(long ringSize) { } @Override public long getRingSize() { return 0; } @Override public ReferenceCounter getConsumersRefCount() { return null; //To change body of implemented methods use File | Settings | File Templates. } @Override public void addSorted(List<MessageReference> refs, boolean scheduling) { } @Override public Set<Consumer> getConsumers() { // no-op return null; } @Override public Map<SimpleString, Consumer> getGroups() { return null; } @Override public void resetGroup(SimpleString groupID) { } @Override public void resetAllGroups() { } @Override public int getGroupCount() { return 0; } @Override public int getDeliveringCount() { // no-op return 0; } @Override public Filter getFilter() { // no-op return null; } @Override public void setFilter(Filter filter) { } @Override public long getMessageCount() { return messageCount; } @Override public long getPersistentSize() { return 0; } @Override public long getDurableMessageCount() { return 0; } @Override public long getDurablePersistentSize() { return 0; } public void setMessageCount(long messageCount) { this.messageCount = messageCount; } @Override public long getMessagesAdded() { // no-op return 0; } @Override public long getMessagesAcknowledged() { // no-op return 0; } @Override public long getMessagesExpired() { // no-op return 0; } @Override public long getMessagesKilled() { // no-op return 0; } @Override public long getMessagesReplaced() { // no-op return 0; } @Override public void resetMessagesAdded() { // no-op } @Override public void resetMessagesAcknowledged() { // no-op } @Override public void resetMessagesExpired() { // no-op } @Override public void resetMessagesKilled() { // no-op } @Override public void incrementMesssagesAdded() { } @Override public void deliverScheduledMessages() { } @Override public SimpleString getName() { return name; } @Override public SimpleString getAddress() { // no-op return null; } @Override public long getID() { return id; } @Override public MessageReference getReference(final long id1) { // no-op return null; } @Override public int getScheduledCount() { // no-op return 0; } @Override public long getScheduledSize() { // no-op return 0; } @Override public List<MessageReference> getScheduledMessages() { // no-op return null; } @Override public boolean isDurableMessage() { // no-op return false; } @Override public boolean isDurable() { // no-op return false; } @Override public boolean isPaused() { // no-op return false; } @Override public boolean isAutoDelete() { // no-op return false; } @Override public long getAutoDeleteDelay() { // no-op return -1; } @Override public long getAutoDeleteMessageCount() { // no-op return -1; } @Override public boolean isTemporary() { // no-op return false; } @Override public boolean isAutoCreated() { return false; } @Override public boolean isPurgeOnNoConsumers() { return false; } @Override public int getMaxConsumers() { return -1; } @Override public LinkedListIterator<MessageReference> iterator() { // no-op return null; } @Override public int moveReferences(final Filter filter, final SimpleString toAddress, Binding binding) throws Exception { // no-op return 0; } @Override public void pause() { // no-op } @Override public void reacknowledge(final Transaction tx, final MessageReference ref) throws Exception { // no-op } @Override public void referenceHandled(MessageReference ref) { // no-op } @Override public void removeConsumer(final Consumer consumer) { } public MessageReference removeFirstReference(final long id1) throws Exception { // no-op return null; } @Override public MessageReference removeReferenceWithID(final long id1) throws Exception { // no-op return null; } @Override public void resume() { // no-op } @Override public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception { // no-op return false; } @Override public int sendMessagesToDeadLetterAddress(Filter filter) throws Exception { // no-op return 0; } @Override public SimpleString getExpiryAddress() { return null; } @Override public void route(final Message message, final RoutingContext context) throws Exception { // no-op } @Override public void routeWithAck(Message message, RoutingContext context) { } @Override public boolean hasMatchingConsumer(final Message message) { // no-op return false; } @Override public Executor getExecutor() { // no-op return null; } public void addLast(MessageReference ref, boolean direct) { // no-op } @Override public PageSubscription getPageSubscription() { return subs; } @Override public RoutingType getRoutingType() { return ActiveMQDefaultConfiguration.getDefaultRoutingType(); } @Override public void setRoutingType(RoutingType routingType) { } public void setPageSubscription(PageSubscription sub) { this.subs = sub; if (subs != null) { sub.setQueue(this); } } @Override public boolean moveReference(long messageID, SimpleString toAddress, Binding binding, boolean rejectDuplicates) throws Exception { // no-op return false; } @Override public int deleteAllReferences(int flushLimit) throws Exception { return 0; } @Override public int deleteMatchingReferences(int flushLImit, Filter filter, AckReason reason) throws Exception { return 0; } @Override public int moveReferences(int flushLimit, Filter filter, SimpleString toAddress, boolean rejectDuplicates, Binding binding) throws Exception { return 0; } @Override public void forceDelivery() { // no-op } @Override public void deleteQueue() throws Exception { // no-op } /* (non-Javadoc) * @see org.apache.activemq.artemis.core.server.Queue#destroyPaging() */ @Override public void destroyPaging() { } /* (non-Javadoc) * @see org.apache.activemq.artemis.core.server.Queue#getDeliveringMessages() */ @Override public Map<String, List<MessageReference>> getDeliveringMessages() { return null; } @Override public LinkedListIterator<MessageReference> browserIterator() { // TODO Auto-generated method stub return null; } @Override public void postAcknowledge(MessageReference ref, AckReason reason) { } @Override public float getRate() { return 0.0f; } @Override public SimpleString getUser() { return null; } @Override public void setUser(SimpleString user) { // no-op } @Override public long getDeliveringSize() { return 0; } @Override public int getDurableDeliveringCount() { return 0; } @Override public long getDurableDeliveringSize() { return 0; } @Override public int getDurableScheduledCount() { return 0; } @Override public long getDurableScheduledSize() { return 0; } }
package de.adorsys.oauth.client.protocol; import com.nimbusds.oauth2.sdk.AccessTokenResponse; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; import com.nimbusds.oauth2.sdk.AuthorizationRequest; import com.nimbusds.oauth2.sdk.AuthorizationSuccessResponse; import com.nimbusds.oauth2.sdk.ResponseType; import com.nimbusds.oauth2.sdk.ResponseType.Value; import com.nimbusds.oauth2.sdk.TokenRequest; import com.nimbusds.oauth2.sdk.auth.ClientSecretBasic; import com.nimbusds.oauth2.sdk.auth.Secret; import com.nimbusds.oauth2.sdk.http.HTTPResponse; import com.nimbusds.oauth2.sdk.id.ClientID; import com.nimbusds.oauth2.sdk.token.AccessToken; import com.nimbusds.oauth2.sdk.token.BearerAccessToken; import com.nimbusds.oauth2.sdk.token.Tokens; import org.apache.commons.lang3.StringUtils; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URLEncodedUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * OAuthProtocol */ public class OAuthProtocol { private static final Logger LOG = LoggerFactory.getLogger(OAuthProtocol.class); private URI authEndpoint; private URI tokenEndpoint; private ClientID clientId; private ClientSecretBasic clientSecretBasic; private String clientSecretValue; public static OAuthProtocol from(Map<String, String> properties) { OAuthProtocol oauthProtocol = new OAuthProtocol(); oauthProtocol.setAuthEndpoint(properties.get("authEndpoint")); oauthProtocol.setTokenEndpoint(properties.get("tokenEndpoint")); oauthProtocol.setClientId(properties.get("clientId")); oauthProtocol.setClientSecretValue(properties.get("clientSecret")); return oauthProtocol.initialize(); } /** * extractURI */ public URI extractURI(HttpServletRequest request) { try { String query = request.getQueryString() == null ? "" : "?" + request.getQueryString(); return new URL(request.getScheme(), request.getServerName(), request.getServerPort(), request.getRequestURI() + query).toURI(); } catch (Exception e) { throw new IllegalStateException(e); } } private URI removeCodeParameterFromUri(URI uri) { try { String query = ""; if (uri.getQuery() != null) { List<NameValuePair> params = URLEncodedUtils.parse(uri, "UTF-8"); List<NameValuePair> parmsWithoutCode = new ArrayList<>(); for (NameValuePair param : params) { if (!"code".equalsIgnoreCase(param.getName())) { parmsWithoutCode.add(param); } } if (parmsWithoutCode.size() > 0) { query = "?" + URLEncodedUtils.format(parmsWithoutCode, "UTF-8"); } } return new URL(uri.getScheme(), uri.getHost(), uri.getPort(), uri.getPath() + query).toURI(); } catch (Exception e) { return uri; } } /** * OAuthProtocol builder */ public void setAuthEndpoint(String authEndpoint) { try { this.authEndpoint = new URI(authEndpoint); } catch (URISyntaxException e) { throw new IllegalStateException("Invalid authEndpoint " + e.getMessage()); } } public void setTokenEndpoint(String tokenEndpoint) { try { this.tokenEndpoint = new URI(tokenEndpoint); } catch (URISyntaxException e) { throw new IllegalStateException("Invalid tokenEndpoint " + e.getMessage()); } } public void setClientId(String clientId) { this.clientId = new ClientID(clientId); } public void setClientSecretValue(String clientSecretValue) { this.clientSecretValue = clientSecretValue; } public OAuthProtocol initialize() { if (authEndpoint == null || tokenEndpoint == null || clientId == null) { throw new IllegalStateException("Endpoint/ClientId missing"); } if (clientSecretValue != null) { clientSecretBasic = new ClientSecretBasic(clientId, new Secret(clientSecretValue)); } return this; } /** * resolveAccessToken: auth header and query param supported (form param not supported) */ public AccessToken resolveAccessToken(HttpServletRequest request) { String queryParam = request.getParameter("access_token"); if (StringUtils.isNotEmpty(queryParam)) { return new BearerAccessToken(queryParam); } String authorization = request.getHeader("Authorization"); if (authorization != null && authorization.contains("Bearer")) { try { return BearerAccessToken.parse(authorization); } catch (Exception e) { LOG.debug("invalid authorization-header {}", authorization); } } return null; } /** * check if an authorization code is available and change this code to an access token */ public AccessTokenResponse runAuthorizationCodeFlow(URI requestURI) { AuthorizationCode authorizationCode = resolveAuthorizationCode(requestURI); if (authorizationCode == null) { return null; } return handleAuthorization(authorizationCode, requestURI); } /** * ask the authEndpoint for an authorization code */ public void doAuthorizationRequest(HttpServletResponse response, URI requestURI) { URI requestUriWithoutCode = removeCodeParameterFromUri(requestURI); AuthorizationRequest authorizationRequest = new AuthorizationRequest.Builder(new ResponseType(Value.CODE), clientId).endpointURI(authEndpoint) .redirectionURI(requestUriWithoutCode).build(); String redirect = String.format("%s?%s", authorizationRequest.toHTTPRequest().getURL(), authorizationRequest.toHTTPRequest().getQuery()); LOG.debug("redirect to {}", redirect); try { response.sendRedirect(redirect); } catch (IOException e) { throw new IllegalStateException(e); } } /** * parse URI for authorization code */ private AuthorizationCode resolveAuthorizationCode(URI requestURI) { try { AuthorizationSuccessResponse response = AuthorizationSuccessResponse.parse(requestURI); return response.getAuthorizationCode(); } catch (Exception e) { LOG.trace("invalid authorization-response {}", requestURI); } return null; } /** * handleAuthorization - ask tokenEndpoint for access token */ private AccessTokenResponse handleAuthorization(AuthorizationCode authorizationCode, URI redirect) { URI requestUriWithoutCode = removeCodeParameterFromUri(redirect); TokenRequest tokenRequest = clientSecretBasic == null ? new TokenRequest(tokenEndpoint, clientId, new AuthorizationCodeGrant(authorizationCode, requestUriWithoutCode)) : new TokenRequest(tokenEndpoint, clientSecretBasic, new AuthorizationCodeGrant(authorizationCode, requestUriWithoutCode)); try { HTTPResponse tokenResponse = tokenRequest.toHTTPRequest().send(); tokenResponse.indicatesSuccess(); return AccessTokenResponse.parse(tokenResponse); } catch (Exception e) { LOG.error(e.getClass().getSimpleName() + " " + e.getMessage()); } return null; } @Override public String toString() { return String.format("authEndpoint=%s tokenEndpoint=%s clientId=%s", authEndpoint, tokenEndpoint, clientId); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.indexing.kafka; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.http.client.HttpClient; import io.druid.java.util.http.client.Request; import io.druid.java.util.http.client.response.FullResponseHandler; import io.druid.java.util.http.client.response.FullResponseHolder; import io.druid.indexing.common.RetryPolicy; import io.druid.indexing.common.RetryPolicyConfig; import io.druid.indexing.common.RetryPolicyFactory; import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexer.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.java.util.common.IAE; import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.Execs; import io.druid.segment.realtime.firehose.ChatHandlerResource; import org.jboss.netty.channel.ChannelException; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Period; import javax.ws.rs.core.MediaType; import java.io.IOException; import java.net.Socket; import java.net.URI; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.Callable; public class KafkaIndexTaskClient { public static class NoTaskLocationException extends RuntimeException { public NoTaskLocationException(String message) { super(message); } } public static class TaskNotRunnableException extends RuntimeException { public TaskNotRunnableException(String message) { super(message); } } public static final int MAX_RETRY_WAIT_SECONDS = 10; private static final int MIN_RETRY_WAIT_SECONDS = 2; private static final EmittingLogger log = new EmittingLogger(KafkaIndexTaskClient.class); private static final String BASE_PATH = "/druid/worker/v1/chat"; private static final int TASK_MISMATCH_RETRY_DELAY_SECONDS = 5; private static final TreeMap EMPTY_TREE_MAP = new TreeMap(); private final HttpClient httpClient; private final ObjectMapper jsonMapper; private final TaskInfoProvider taskInfoProvider; private final Duration httpTimeout; private final RetryPolicyFactory retryPolicyFactory; private final ListeningExecutorService executorService; private final long numRetries; public KafkaIndexTaskClient( HttpClient httpClient, ObjectMapper jsonMapper, TaskInfoProvider taskInfoProvider, String dataSource, int numThreads, Duration httpTimeout, long numRetries ) { this.httpClient = httpClient; this.jsonMapper = jsonMapper; this.taskInfoProvider = taskInfoProvider; this.httpTimeout = httpTimeout; this.numRetries = numRetries; this.retryPolicyFactory = createRetryPolicyFactory(); this.executorService = MoreExecutors.listeningDecorator( Execs.multiThreaded( numThreads, StringUtils.format( "KafkaIndexTaskClient-%s-%%d", dataSource ) ) ); } public void close() { executorService.shutdownNow(); } public boolean stop(final String id, final boolean publish) { log.debug("Stop task[%s] publish[%s]", id, publish); try { final FullResponseHolder response = submitRequest( id, HttpMethod.POST, "stop", publish ? "publish=true" : null, true ); return response.getStatus().getCode() / 100 == 2; } catch (NoTaskLocationException e) { return false; } catch (TaskNotRunnableException e) { log.info("Task [%s] couldn't be stopped because it is no longer running", id); return true; } catch (Exception e) { log.warn(e, "Exception while stopping task [%s]", id); return false; } } public boolean resume(final String id) { log.debug("Resume task[%s]", id); try { final FullResponseHolder response = submitRequest(id, HttpMethod.POST, "resume", null, true); return response.getStatus().getCode() / 100 == 2; } catch (NoTaskLocationException e) { return false; } } public Map<Integer, Long> pause(final String id) { return pause(id, 0); } public Map<Integer, Long> pause(final String id, final long timeout) { log.debug("Pause task[%s] timeout[%d]", id, timeout); try { final FullResponseHolder response = submitRequest( id, HttpMethod.POST, "pause", timeout > 0 ? StringUtils.format("timeout=%d", timeout) : null, true ); if (response.getStatus().equals(HttpResponseStatus.OK)) { log.info("Task [%s] paused successfully", id); return jsonMapper.readValue(response.getContent(), new TypeReference<Map<Integer, Long>>() { }); } final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy(); while (true) { if (getStatus(id) == KafkaIndexTask.Status.PAUSED) { return getCurrentOffsets(id, true); } final Duration delay = retryPolicy.getAndIncrementRetryDelay(); if (delay == null) { log.error("Task [%s] failed to pause, aborting", id); throw new ISE("Task [%s] failed to pause, aborting", id); } else { final long sleepTime = delay.getMillis(); log.info( "Still waiting for task [%s] to pause; will try again in [%s]", id, new Duration(sleepTime).toString() ); Thread.sleep(sleepTime); } } } catch (NoTaskLocationException e) { log.error("Exception [%s] while pausing Task [%s]", e.getMessage(), id); return ImmutableMap.of(); } catch (IOException | InterruptedException e) { log.error("Exception [%s] while pausing Task [%s]", e.getMessage(), id); throw Throwables.propagate(e); } } public KafkaIndexTask.Status getStatus(final String id) { log.debug("GetStatus task[%s]", id); try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "status", null, true); return jsonMapper.readValue(response.getContent(), KafkaIndexTask.Status.class); } catch (NoTaskLocationException e) { return KafkaIndexTask.Status.NOT_STARTED; } catch (IOException e) { throw Throwables.propagate(e); } } public DateTime getStartTime(final String id) { log.debug("GetStartTime task[%s]", id); try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "time/start", null, true); return response.getContent() == null || response.getContent().isEmpty() ? null : jsonMapper.readValue(response.getContent(), DateTime.class); } catch (NoTaskLocationException e) { return null; } catch (IOException e) { throw Throwables.propagate(e); } } public Map<Integer, Long> getCurrentOffsets(final String id, final boolean retry) { log.debug("GetCurrentOffsets task[%s] retry[%s]", id, retry); try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "offsets/current", null, retry); return jsonMapper.readValue(response.getContent(), new TypeReference<Map<Integer, Long>>() { }); } catch (NoTaskLocationException e) { return ImmutableMap.of(); } catch (IOException e) { throw Throwables.propagate(e); } } public TreeMap<Integer, Map<Integer, Long>> getCheckpoints(final String id, final boolean retry) { log.debug("GetCheckpoints task[%s] retry[%s]", id, retry); try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "checkpoints", null, retry); return jsonMapper.readValue(response.getContent(), new TypeReference<TreeMap<Integer, TreeMap<Integer, Long>>>() { }); } catch (NoTaskLocationException e) { return EMPTY_TREE_MAP; } catch (IOException e) { throw Throwables.propagate(e); } } public ListenableFuture<TreeMap<Integer, Map<Integer, Long>>> getCheckpointsAsync( final String id, final boolean retry ) { return executorService.submit( () -> getCheckpoints(id, retry) ); } public Map<Integer, Long> getEndOffsets(final String id) { log.debug("GetEndOffsets task[%s]", id); try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "offsets/end", null, true); return jsonMapper.readValue(response.getContent(), new TypeReference<Map<Integer, Long>>() { }); } catch (NoTaskLocationException e) { return ImmutableMap.of(); } catch (IOException e) { throw Throwables.propagate(e); } } public boolean setEndOffsets( final String id, final Map<Integer, Long> endOffsets, final boolean resume, final boolean finalize ) { log.debug("SetEndOffsets task[%s] endOffsets[%s] resume[%s] finalize[%s]", id, endOffsets, resume, finalize); try { final FullResponseHolder response = submitRequest( id, HttpMethod.POST, "offsets/end", StringUtils.format("resume=%s&finish=%s", resume, finalize), jsonMapper.writeValueAsBytes(endOffsets), true ); return response.getStatus().getCode() / 100 == 2; } catch (NoTaskLocationException e) { return false; } catch (IOException e) { throw Throwables.propagate(e); } } public ListenableFuture<Boolean> stopAsync(final String id, final boolean publish) { return executorService.submit( new Callable<Boolean>() { @Override public Boolean call() throws Exception { return stop(id, publish); } } ); } public ListenableFuture<Boolean> resumeAsync(final String id) { return executorService.submit( new Callable<Boolean>() { @Override public Boolean call() throws Exception { return resume(id); } } ); } public ListenableFuture<Map<Integer, Long>> pauseAsync(final String id) { return pauseAsync(id, 0); } public ListenableFuture<Map<Integer, Long>> pauseAsync(final String id, final long timeout) { return executorService.submit( new Callable<Map<Integer, Long>>() { @Override public Map<Integer, Long> call() throws Exception { return pause(id, timeout); } } ); } public ListenableFuture<KafkaIndexTask.Status> getStatusAsync(final String id) { return executorService.submit( new Callable<KafkaIndexTask.Status>() { @Override public KafkaIndexTask.Status call() throws Exception { return getStatus(id); } } ); } public ListenableFuture<DateTime> getStartTimeAsync(final String id) { return executorService.submit( new Callable<DateTime>() { @Override public DateTime call() throws Exception { return getStartTime(id); } } ); } public ListenableFuture<Map<Integer, Long>> getCurrentOffsetsAsync(final String id, final boolean retry) { return executorService.submit( new Callable<Map<Integer, Long>>() { @Override public Map<Integer, Long> call() throws Exception { return getCurrentOffsets(id, retry); } } ); } public ListenableFuture<Map<Integer, Long>> getEndOffsetsAsync(final String id) { return executorService.submit( new Callable<Map<Integer, Long>>() { @Override public Map<Integer, Long> call() throws Exception { return getEndOffsets(id); } } ); } public ListenableFuture<Boolean> setEndOffsetsAsync( final String id, final Map<Integer, Long> endOffsets, final boolean resume, final boolean finalize ) { return executorService.submit( new Callable<Boolean>() { @Override public Boolean call() throws Exception { return setEndOffsets(id, endOffsets, resume, finalize); } } ); } @VisibleForTesting RetryPolicyFactory createRetryPolicyFactory() { // Retries [numRetries] times before giving up; this should be set long enough to handle any temporary // unresponsiveness such as network issues, if a task is still in the process of starting up, or if the task is in // the middle of persisting to disk and doesn't respond immediately. return new RetryPolicyFactory( new RetryPolicyConfig() .setMinWait(Period.seconds(MIN_RETRY_WAIT_SECONDS)) .setMaxWait(Period.seconds(MAX_RETRY_WAIT_SECONDS)) .setMaxRetryCount(numRetries) ); } @VisibleForTesting void checkConnection(String host, int port) throws IOException { new Socket(host, port).close(); } private FullResponseHolder submitRequest(String id, HttpMethod method, String pathSuffix, String query, boolean retry) { return submitRequest(id, method, pathSuffix, query, new byte[0], retry); } private FullResponseHolder submitRequest( String id, HttpMethod method, String pathSuffix, String query, byte[] content, boolean retry ) { final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy(); while (true) { FullResponseHolder response = null; Request request = null; TaskLocation location = TaskLocation.unknown(); String path = StringUtils.format("%s/%s/%s", BASE_PATH, id, pathSuffix); Optional<TaskStatus> status = taskInfoProvider.getTaskStatus(id); if (!status.isPresent() || !status.get().isRunnable()) { throw new TaskNotRunnableException(StringUtils.format( "Aborting request because task [%s] is not runnable", id )); } String host = location.getHost(); String scheme = ""; int port = -1; try { location = taskInfoProvider.getTaskLocation(id); if (location.equals(TaskLocation.unknown())) { throw new NoTaskLocationException(StringUtils.format("No TaskLocation available for task [%s]", id)); } host = location.getHost(); scheme = location.getTlsPort() >= 0 ? "https" : "http"; port = location.getTlsPort() >= 0 ? location.getTlsPort() : location.getPort(); // Netty throws some annoying exceptions if a connection can't be opened, which happens relatively frequently // for tasks that happen to still be starting up, so test the connection first to keep the logs clean. checkConnection(host, port); try { URI serviceUri = new URI( scheme, null, host, port, path, query, null ); request = new Request(method, serviceUri.toURL()); // used to validate that we are talking to the correct worker request.addHeader(ChatHandlerResource.TASK_ID_HEADER, id); if (content.length > 0) { request.setContent(MediaType.APPLICATION_JSON, content); } log.debug("HTTP %s: %s", method.getName(), serviceUri.toString()); response = httpClient.go(request, new FullResponseHandler(Charsets.UTF_8), httpTimeout).get(); } catch (Exception e) { Throwables.propagateIfInstanceOf(e.getCause(), IOException.class); Throwables.propagateIfInstanceOf(e.getCause(), ChannelException.class); throw Throwables.propagate(e); } int responseCode = response.getStatus().getCode(); if (responseCode / 100 == 2) { return response; } else if (responseCode == 400) { // don't bother retrying if it's a bad request throw new IAE("Received 400 Bad Request with body: %s", response.getContent()); } else { throw new IOE("Received status [%d]", responseCode); } } catch (IOException | ChannelException e) { // Since workers are free to move tasks around to different ports, there is a chance that a task may have been // moved but our view of its location has not been updated yet from ZK. To detect this case, we send a header // identifying our expected recipient in the request; if this doesn't correspond to the worker we messaged, the // worker will return an HTTP 404 with its ID in the response header. If we get a mismatching task ID, then // we will wait for a short period then retry the request indefinitely, expecting the task's location to // eventually be updated. final Duration delay; if (response != null && response.getStatus().equals(HttpResponseStatus.NOT_FOUND)) { String headerId = response.getResponse().headers().get(ChatHandlerResource.TASK_ID_HEADER); if (headerId != null && !headerId.equals(id)) { log.warn( "Expected worker to have taskId [%s] but has taskId [%s], will retry in [%d]s", id, headerId, TASK_MISMATCH_RETRY_DELAY_SECONDS ); delay = Duration.standardSeconds(TASK_MISMATCH_RETRY_DELAY_SECONDS); } else { delay = retryPolicy.getAndIncrementRetryDelay(); } } else { delay = retryPolicy.getAndIncrementRetryDelay(); } String urlForLog = (request != null ? request.getUrl().toString() : StringUtils.format( "%s://%s:%d%s", scheme, host, port, path )); if (!retry) { // if retry=false, we probably aren't too concerned if the operation doesn't succeed (i.e. the request was // for informational purposes only) so don't log a scary stack trace log.info("submitRequest failed for [%s], with message [%s]", urlForLog, e.getMessage()); Throwables.propagate(e); } else if (delay == null) { log.warn(e, "Retries exhausted for [%s], last exception:", urlForLog); Throwables.propagate(e); } else { try { final long sleepTime = delay.getMillis(); log.debug( "Bad response HTTP [%s] from [%s]; will try again in [%s] (body/exception: [%s])", (response != null ? response.getStatus().getCode() : "no response"), urlForLog, new Duration(sleepTime).toString(), (response != null ? response.getContent() : e.getMessage()) ); Thread.sleep(sleepTime); } catch (InterruptedException e2) { Throwables.propagate(e2); } } } catch (NoTaskLocationException e) { log.info("No TaskLocation available for task [%s], this task may not have been assigned to a worker yet or " + "may have already completed", id); throw e; } catch (Exception e) { log.warn(e, "Exception while sending request"); throw e; } } } }
package com.mapswithme.maps.downloader; import android.location.Location; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.TextView; import com.mapswithme.maps.MwmActivity; import com.mapswithme.maps.R; import com.mapswithme.maps.base.BaseMwmFragment; import com.mapswithme.maps.base.OnBackPressListener; import com.mapswithme.maps.location.LocationHelper; import com.mapswithme.maps.widget.WheelProgressView; import com.mapswithme.util.UiUtils; import com.mapswithme.util.Utils; import com.mapswithme.util.statistics.Statistics; public class MigrationFragment extends BaseMwmFragment implements OnBackPressListener, MigrationController.Container { private TextView mError; private TextView mPrepare; private WheelProgressView mProgress; private Button mButtonPrimary; private Button mButtonSecondary; private final View.OnClickListener mButtonClickListener = new View.OnClickListener() { @Override public void onClick(final View v) { MapManager.warnOn3g(getActivity(), null, new Runnable() { @Override public void run() { boolean keepOld = (v == mButtonPrimary); Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOADER_MIGRATION_STARTED, Statistics.params().add(Statistics.EventParam.TYPE, keepOld ? "all_maps" : "current_map")); MigrationController.get().start(keepOld); } }); } }; @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_migrate, container, false); } private void checkConnection() { Utils.checkConnection(getActivity(), R.string.common_check_internet_connection_dialog, new Utils.Proc<Boolean>() { @Override public void invoke(Boolean result) { if (result) return; if (getActivity() instanceof MwmActivity) ((MwmActivity) getActivity()).closeSidePanel(); else getActivity().finish(); } }); } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); checkConnection(); } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); mError = (TextView) view.findViewById(R.id.error); mPrepare = (TextView) view.findViewById(R.id.preparation); mProgress = (WheelProgressView) view.findViewById(R.id.wheel_progress); mButtonPrimary = (Button) view.findViewById(R.id.button_primary); mButtonSecondary = (Button) view.findViewById(R.id.button_secondary); mButtonPrimary.setOnClickListener(mButtonClickListener); mButtonSecondary.setOnClickListener(mButtonClickListener); UiUtils.updateAccentButton(mButtonPrimary); mProgress.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { MigrationController.get().cancel(); } }); Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOADER_MIGRATION_DIALOG_SEEN); } @Override public void onResume() { super.onResume(); MigrationController.get().restore(); } @Override public void onStart() { super.onStart(); MigrationController.get().attach(this); } @Override public void onStop() { super.onStop(); MigrationController.get().detach(); } @Override public void setReadyState() { UiUtils.show(mButtonPrimary); UiUtils.hide(mPrepare, mProgress, mError); Location loc = LocationHelper.INSTANCE.getLastKnownLocation(); UiUtils.showIf(loc != null, mButtonSecondary); } @Override public void setProgressState(String countryName) { UiUtils.show(mPrepare, mProgress); UiUtils.hide(mError, mButtonPrimary, mButtonSecondary); mPrepare.setText(String.format("%1$2s %2$s", getString(R.string.downloader_downloading), countryName)); } @Override public void setErrorState(int code) { setReadyState(); UiUtils.show(mError); @StringRes int text; switch (code) { case CountryItem.ERROR_OOM: text = R.string.migration_no_space_message; break; case CountryItem.ERROR_NO_INTERNET: text = R.string.common_check_internet_connection_dialog; break; default: text = R.string.country_status_download_failed; } mError.setText(text); } @Override public void onComplete() { if (!isAdded()) return; if (getActivity() instanceof MwmActivity) ((MwmActivity) getActivity()).showDownloader(false); else getActivity().recreate(); Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOADER_MIGRATION_COMPLETE); } @Override public void setProgress(int percents) { mProgress.setPending(percents == 0); if (percents > 0) mProgress.setProgress(percents); } @Override public boolean onBackPressed() { return false; } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.sshd; import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import com.google.gerrit.extensions.events.LifecycleListener; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.PeerDaemonUser; import com.google.gerrit.server.audit.SshAuditEvent; import com.google.gerrit.server.config.ConfigKey; import com.google.gerrit.server.config.ConfigUpdatedEvent; import com.google.gerrit.server.config.ConfigUpdatedEvent.ConfigUpdateEntry; import com.google.gerrit.server.config.ConfigUpdatedEvent.UpdateResult; import com.google.gerrit.server.config.GerritConfigListener; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.group.GroupAuditService; import com.google.gerrit.server.ioutil.HexFormat; import com.google.gerrit.server.util.SystemLog; import com.google.gerrit.server.util.time.TimeUtil; import com.google.gerrit.sshd.SshScope.Context; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import org.apache.log4j.AsyncAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.spi.LoggingEvent; import org.eclipse.jgit.lib.Config; @Singleton class SshLog implements LifecycleListener, GerritConfigListener { private static final Logger log = Logger.getLogger(SshLog.class); private static final String JSON_SUFFIX = ".json"; protected static final String LOG_NAME = "sshd_log"; protected static final String P_SESSION = "session"; protected static final String P_USER_NAME = "userName"; protected static final String P_ACCOUNT_ID = "accountId"; protected static final String P_WAIT = "queueWaitTime"; protected static final String P_EXEC = "executionTime"; protected static final String P_STATUS = "status"; protected static final String P_AGENT = "agent"; protected static final String P_MESSAGE = "message"; private final Provider<SshSession> session; private final Provider<Context> context; private volatile AsyncAppender async; private final GroupAuditService auditService; private final SystemLog systemLog; private final boolean json; private final boolean text; private final Object lock = new Object(); @Inject SshLog( final Provider<SshSession> session, final Provider<Context> context, SystemLog systemLog, @GerritServerConfig Config config, GroupAuditService auditService) { this.session = session; this.context = context; this.auditService = auditService; this.systemLog = systemLog; this.json = config.getBoolean("log", "jsonLogging", false); this.text = config.getBoolean("log", "textLogging", true) || !json; if (config.getBoolean("sshd", "requestLog", true)) { enableLogging(); } } /** @return true if a change in state has occurred */ public boolean enableLogging() { synchronized (lock) { if (async == null) { async = new AsyncAppender(); if (text) { async.addAppender(systemLog.createAsyncAppender(LOG_NAME, new SshLogLayout())); } if (json) { async.addAppender( systemLog.createAsyncAppender(LOG_NAME + JSON_SUFFIX, new SshLogJsonLayout())); } return true; } return false; } } /** @return true if a change in state has occurred */ public boolean disableLogging() { synchronized (lock) { if (async != null) { async.close(); async = null; return true; } return false; } } @Override public void start() {} @Override public void stop() { disableLogging(); } void onLogin() { LoggingEvent entry = log("LOGIN FROM " + session.get().getRemoteAddressAsString()); if (async != null) { async.append(entry); } audit(context.get(), "0", "LOGIN"); } void onAuthFail(SshSession sd) { final LoggingEvent event = new LoggingEvent( // Logger.class.getName(), // fqnOfCategoryClass log, // logger TimeUtil.nowMs(), // when Level.INFO, // level "AUTH FAILURE FROM " + sd.getRemoteAddressAsString(), // message text "SSHD", // thread name null, // exception information null, // current NDC string null, // caller location null // MDC properties ); event.setProperty(P_SESSION, id(sd.getSessionId())); event.setProperty(P_USER_NAME, sd.getUsername()); final String error = sd.getAuthenticationError(); if (error != null) { event.setProperty(P_STATUS, error); } if (async != null) { async.append(event); } audit(null, "FAIL", "AUTH"); } void onExecute(DispatchCommand dcmd, int exitValue, SshSession sshSession) { onExecute(dcmd, exitValue, sshSession, null); } void onExecute(DispatchCommand dcmd, int exitValue, SshSession sshSession, String message) { final Context ctx = context.get(); ctx.finished = TimeUtil.nowMs(); String cmd = extractWhat(dcmd); final LoggingEvent event = log(cmd); event.setProperty(P_WAIT, (ctx.started - ctx.created) + "ms"); event.setProperty(P_EXEC, (ctx.finished - ctx.started) + "ms"); final String status; switch (exitValue) { case BaseCommand.STATUS_CANCEL: status = "killed"; break; case BaseCommand.STATUS_NOT_FOUND: status = "not-found"; break; case BaseCommand.STATUS_NOT_ADMIN: status = "not-admin"; break; default: status = String.valueOf(exitValue); break; } event.setProperty(P_STATUS, status); String peerAgent = sshSession.getPeerAgent(); if (peerAgent != null) { event.setProperty(P_AGENT, peerAgent); } if (message != null) { event.setProperty(P_MESSAGE, message); } if (async != null) { async.append(event); } audit(context.get(), status, dcmd); } private ListMultimap<String, ?> extractParameters(DispatchCommand dcmd) { if (dcmd == null) { return MultimapBuilder.hashKeys(0).arrayListValues(0).build(); } String[] cmdArgs = dcmd.getArguments(); String paramName = null; int argPos = 0; ListMultimap<String, String> parms = MultimapBuilder.hashKeys().arrayListValues().build(); for (int i = 2; i < cmdArgs.length; i++) { String arg = cmdArgs[i]; // -- stop parameters parsing if (arg.equals("--")) { for (i++; i < cmdArgs.length; i++) { parms.put("$" + argPos++, cmdArgs[i]); } break; } // --param=value int eqPos = arg.indexOf('='); if (arg.startsWith("--") && eqPos > 0) { parms.put(arg.substring(0, eqPos), arg.substring(eqPos + 1)); continue; } // -p value or --param value if (arg.startsWith("-")) { if (paramName != null) { parms.put(paramName, null); } paramName = arg; continue; } // value if (paramName == null) { parms.put("$" + argPos++, arg); } else { parms.put(paramName, arg); paramName = null; } } if (paramName != null) { parms.put(paramName, null); } return parms; } void onLogout() { LoggingEvent entry = log("LOGOUT"); if (async != null) { async.append(entry); } audit(context.get(), "0", "LOGOUT"); } private LoggingEvent log(String msg) { final SshSession sd = session.get(); final CurrentUser user = sd.getUser(); final LoggingEvent event = new LoggingEvent( // Logger.class.getName(), // fqnOfCategoryClass log, // logger TimeUtil.nowMs(), // when Level.INFO, // level msg, // message text Thread.currentThread().getName(), // thread name null, // exception information null, // current NDC string null, // caller location null // MDC properties ); event.setProperty(P_SESSION, id(sd.getSessionId())); String userName = "-"; String accountId = "-"; if (user != null && user.isIdentifiedUser()) { IdentifiedUser u = user.asIdentifiedUser(); userName = u.getUserName().orElse(null); accountId = "a/" + u.getAccountId().toString(); } else if (user instanceof PeerDaemonUser) { userName = PeerDaemonUser.USER_NAME; } event.setProperty(P_USER_NAME, userName); event.setProperty(P_ACCOUNT_ID, accountId); return event; } private static String id(int id) { return HexFormat.fromInt(id); } void audit(Context ctx, Object result, String cmd) { audit(ctx, result, cmd, null); } void audit(Context ctx, Object result, DispatchCommand cmd) { audit(ctx, result, extractWhat(cmd), extractParameters(cmd)); } private void audit(Context ctx, Object result, String cmd, ListMultimap<String, ?> params) { String sessionId; CurrentUser currentUser; long created; if (ctx == null) { sessionId = null; currentUser = null; created = TimeUtil.nowMs(); } else { SshSession session = ctx.getSession(); sessionId = HexFormat.fromInt(session.getSessionId()); currentUser = session.getUser(); created = ctx.created; } auditService.dispatch(new SshAuditEvent(sessionId, currentUser, cmd, created, params, result)); } private String extractWhat(DispatchCommand dcmd) { if (dcmd == null) { return "Command was already destroyed"; } StringBuilder commandName = new StringBuilder(dcmd.getCommandName()); String[] args = dcmd.getArguments(); for (int i = 1; i < args.length; i++) { commandName.append(".").append(args[i]); } return commandName.toString(); } @Override public Multimap<UpdateResult, ConfigUpdateEntry> configUpdated(ConfigUpdatedEvent event) { ConfigKey sshdRequestLog = ConfigKey.create("sshd", "requestLog"); if (!event.isValueUpdated(sshdRequestLog)) { return ConfigUpdatedEvent.NO_UPDATES; } boolean stateUpdated; try { boolean enabled = event.getNewConfig().getBoolean("sshd", "requestLog", true); if (enabled) { stateUpdated = enableLogging(); } else { stateUpdated = disableLogging(); } return stateUpdated ? event.accept(sshdRequestLog) : ConfigUpdatedEvent.NO_UPDATES; } catch (IllegalArgumentException iae) { return event.reject(sshdRequestLog); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.actions; import com.intellij.execution.*; import com.intellij.execution.configurations.ConfigurationFactory; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.psi.PsiElement; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * Supports creating run configurations from context (by right-clicking a code element in the source editor or the project view). Typically, * run configurations that can be created from context should extend the {@link com.intellij.execution.configurations.LocatableConfigurationBase} class. * * @since 13 * @author yole */ public abstract class RunConfigurationProducer<T extends RunConfiguration> { public static final ExtensionPointName<RunConfigurationProducer> EP_NAME = ExtensionPointName.create("com.intellij.runConfigurationProducer"); @NotNull public static List<RunConfigurationProducer<?>> getProducers(@NotNull Project project) { RunConfigurationProducerService runConfigurationProducerService = RunConfigurationProducerService.getInstance(project); RunConfigurationProducer[] allProducers = Extensions.getExtensions(EP_NAME); List<RunConfigurationProducer<?>> result = ContainerUtil.newArrayListWithCapacity(allProducers.length); for (RunConfigurationProducer producer : allProducers) { if (!runConfigurationProducerService.isIgnored(producer)) { result.add(producer); } } return result; } private final ConfigurationFactory myConfigurationFactory; protected RunConfigurationProducer(final ConfigurationFactory configurationFactory) { myConfigurationFactory = configurationFactory; } protected RunConfigurationProducer(final ConfigurationType configurationType) { myConfigurationFactory = configurationType.getConfigurationFactories()[0]; } public ConfigurationFactory getConfigurationFactory() { return myConfigurationFactory; } public ConfigurationType getConfigurationType() { return myConfigurationFactory.getType(); } /** * Creates a run configuration from the context. * * @param context contains the information about a location in the source code. * @return a container with a prepared run configuration and the context element from which it was created, or null if the context is * not applicable to this run configuration producer. */ @Nullable public ConfigurationFromContext createConfigurationFromContext(ConfigurationContext context) { final RunnerAndConfigurationSettings settings = cloneTemplateConfiguration(context); final Ref<PsiElement> locationRef = new Ref<PsiElement>(context.getPsiLocation()); if (!setupConfigurationFromContext((T)settings.getConfiguration(), context, locationRef)) { return null; } return new ConfigurationFromContextImpl(this, settings, locationRef.get()); } /** * Sets up a configuration based on the specified context. * * @param configuration a clone of the template run configuration of the specified type * @param context contains the information about a location in the source code. * @param sourceElement a reference to the source element for the run configuration (by default contains the element at caret, * can be updated by the producer to point to a higher-level element in the tree). * * @return true if the context is applicable to this run configuration producer, false if the context is not applicable and the * configuration should be discarded. */ protected abstract boolean setupConfigurationFromContext(T configuration, ConfigurationContext context, Ref<PsiElement> sourceElement); /** * Checks if the specified configuration was created from the specified context. * @param configuration a configuration instance. * @param context contains the information about a location in the source code. * @return true if this configuration was created from the specified context, false otherwise. */ public abstract boolean isConfigurationFromContext(T configuration, ConfigurationContext context); /** * When two configurations are created from the same context by two different producers, checks if the configuration created by * this producer should be discarded in favor of the other one. * * @param self a configuration created by this producer. * @param other a configuration created by another producer. * @return true if the configuration created by this producer is at least as good as the other one; false if this configuration * should be discarded and the other one should be used instead. * @see #shouldReplace(ConfigurationFromContext, ConfigurationFromContext) */ public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) { return true; } /** * When two configurations are created from the same context by two different producers, checks if the configuration created by * this producer should replace the other one, that is if the other one should be discarded. * * <p>This is the same relationship as {@link #isPreferredConfiguration(ConfigurationFromContext, ConfigurationFromContext)} but * specified from the "replacement" side. * * @param self a configuration created by this producer. * @param other a configuration created by another producer. * @return true if the other configuration should be discarded, false otherwise. * @see #isPreferredConfiguration(ConfigurationFromContext, ConfigurationFromContext) */ public boolean shouldReplace(ConfigurationFromContext self, ConfigurationFromContext other) { return false; } /** * Called before a configuration created from context by this producer is first executed. Can be used to show additional UI for * customizing the created configuration. * * @param configuration a configuration created by this producer. * @param context the context * @param startRunnable the runnable that needs to be called after additional customization is complete. */ public void onFirstRun(ConfigurationFromContext configuration, ConfigurationContext context, Runnable startRunnable) { startRunnable.run(); } /** * Searches the list of existing run configurations to find one created from this context. Returns one if found, or tries to create * a new configuration from this context if not found. * * @param context contains the information about a location in the source code. * @return a configuration (new or existing) matching the context, or null if the context is not applicable to this producer. */ @Nullable public ConfigurationFromContext findOrCreateConfigurationFromContext(ConfigurationContext context) { Location location = context.getLocation(); if (location == null) { return null; } ConfigurationFromContext fromContext = createConfigurationFromContext(context); if (fromContext != null) { final PsiElement psiElement = fromContext.getSourceElement(); final Location<PsiElement> _location = PsiLocation.fromPsiElement(psiElement, location.getModule()); if (_location != null) { // replace with existing configuration if any final RunManager runManager = RunManager.getInstance(context.getProject()); final ConfigurationType type = fromContext.getConfigurationType(); final List<RunnerAndConfigurationSettings> configurations = runManager.getConfigurationSettingsList(type); final RunnerAndConfigurationSettings settings = findExistingConfiguration(context); if (settings != null) { fromContext.setConfigurationSettings(settings); } else { final ArrayList<String> currentNames = new ArrayList<String>(); for (RunnerAndConfigurationSettings configurationSettings : configurations) { currentNames.add(configurationSettings.getName()); } RunConfiguration configuration = fromContext.getConfiguration(); String name = configuration.getName(); assert name != null : configuration; configuration.setName(RunManager.suggestUniqueName(name, currentNames)); } } } return fromContext; } /** * Searches the list of existing run configurations to find one created from this context. Returns one if found. * * @param context contains the information about a location in the source code. * @return an existing configuration matching the context, or null if no such configuration is found. */ @Nullable public RunnerAndConfigurationSettings findExistingConfiguration(ConfigurationContext context) { final RunManager runManager = RunManager.getInstance(context.getProject()); final List<RunnerAndConfigurationSettings> configurations = runManager.getConfigurationSettingsList(myConfigurationFactory.getType()); for (RunnerAndConfigurationSettings configurationSettings : configurations) { if (isConfigurationFromContext((T) configurationSettings.getConfiguration(), context)) { return configurationSettings; } } return null; } protected RunnerAndConfigurationSettings cloneTemplateConfiguration(@NotNull final ConfigurationContext context) { final RunConfiguration original = context.getOriginalConfiguration(myConfigurationFactory.getType()); if (original != null) { return RunManager.getInstance(context.getProject()).createConfiguration(original.clone(), myConfigurationFactory); } return RunManager.getInstance(context.getProject()).createRunConfiguration("", myConfigurationFactory); } @NotNull public static <T extends RunConfigurationProducer> T getInstance(Class<? extends T> aClass) { for (RunConfigurationProducer producer : Extensions.getExtensions(EP_NAME)) { if (aClass.isInstance(producer)) { return (T)producer; } } assert false : aClass; return null; } @Nullable public RunConfiguration createLightConfiguration(@NotNull final ConfigurationContext context) { RunConfiguration configuration = myConfigurationFactory.createTemplateConfiguration(context.getProject()); if (!setupConfigurationFromContext((T)configuration, context, new Ref<PsiElement>(context.getPsiLocation()))) { return null; } return configuration; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql.parser; import com.google.common.collect.ImmutableSet; import io.airlift.log.Logger; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.NoViableAltException; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer; import org.antlr.v4.runtime.RuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.Vocabulary; import org.antlr.v4.runtime.atn.ATN; import org.antlr.v4.runtime.atn.ATNState; import org.antlr.v4.runtime.atn.NotSetTransition; import org.antlr.v4.runtime.atn.PrecedencePredicateTransition; import org.antlr.v4.runtime.atn.RuleStartState; import org.antlr.v4.runtime.atn.RuleStopState; import org.antlr.v4.runtime.atn.RuleTransition; import org.antlr.v4.runtime.atn.Transition; import org.antlr.v4.runtime.atn.WildcardTransition; import org.antlr.v4.runtime.misc.IntervalSet; import java.util.ArrayDeque; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import static com.google.common.base.MoreObjects.firstNonNull; import static java.lang.String.format; import static org.antlr.v4.runtime.atn.ATNState.RULE_START; class ErrorHandler extends BaseErrorListener { private static final Logger LOG = Logger.get(ErrorHandler.class); private final Map<Integer, String> specialRules; private final Map<Integer, String> specialTokens; private final Set<Integer> ignoredRules; private ErrorHandler(Map<Integer, String> specialRules, Map<Integer, String> specialTokens, Set<Integer> ignoredRules) { this.specialRules = new HashMap<>(specialRules); this.specialTokens = specialTokens; this.ignoredRules = new HashSet<>(ignoredRules); } @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String message, RecognitionException e) { try { Parser parser = (Parser) recognizer; ATN atn = parser.getATN(); ATNState currentState; Token currentToken; RuleContext context; if (e != null) { currentState = atn.states.get(e.getOffendingState()); currentToken = e.getOffendingToken(); context = e.getCtx(); if (e instanceof NoViableAltException) { currentToken = ((NoViableAltException) e).getStartToken(); } } else { currentState = atn.states.get(parser.getState()); currentToken = parser.getCurrentToken(); context = parser.getContext(); } Analyzer analyzer = new Analyzer(parser, specialRules, specialTokens, ignoredRules); Result result = analyzer.process(currentState, currentToken.getTokenIndex(), context); // pick the candidate tokens associated largest token index processed (i.e., the path that consumed the most input) String expected = result.getExpected().stream() .sorted() .collect(Collectors.joining(", ")); message = format("mismatched input '%s'. Expecting: %s", parser.getTokenStream().get(result.getErrorTokenIndex()).getText(), expected); } catch (Exception exception) { LOG.error(exception, "Unexpected failure when handling parsing error. This is likely a bug in the implementation"); } throw new ParsingException(message, e, line, charPositionInLine + 1); } private static class ParsingState { public final ATNState state; public final int tokenIndex; public final boolean suppressed; public final Parser parser; public ParsingState(ATNState state, int tokenIndex, boolean suppressed, Parser parser) { this.state = state; this.tokenIndex = tokenIndex; this.suppressed = suppressed; this.parser = parser; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ParsingState that = (ParsingState) o; return tokenIndex == that.tokenIndex && state.equals(that.state); } @Override public int hashCode() { return Objects.hash(state, tokenIndex); } @Override public String toString() { Token token = parser.getTokenStream().get(tokenIndex); String text = firstNonNull(token.getText(), "?"); if (text != null) { text = text.replace("\\", "\\\\"); text = text.replace("\n", "\\n"); text = text.replace("\r", "\\r"); text = text.replace("\t", "\\t"); } return format( "%s%s:%s @ %s:<%s>:%s", suppressed ? "-" : "+", parser.getRuleNames()[state.ruleIndex], state.stateNumber, tokenIndex, parser.getVocabulary().getSymbolicName(token.getType()), text); } } private static class Analyzer { private final Parser parser; private final ATN atn; private final Vocabulary vocabulary; private final Map<Integer, String> specialRules; private final Map<Integer, String> specialTokens; private final Set<Integer> ignoredRules; private final TokenStream stream; private int furthestTokenIndex = -1; private final Set<String> candidates = new HashSet<>(); private final Map<ParsingState, Set<Integer>> memo = new HashMap<>(); public Analyzer( Parser parser, Map<Integer, String> specialRules, Map<Integer, String> specialTokens, Set<Integer> ignoredRules) { this.parser = parser; this.stream = parser.getTokenStream(); this.atn = parser.getATN(); this.vocabulary = parser.getVocabulary(); this.specialRules = specialRules; this.specialTokens = specialTokens; this.ignoredRules = ignoredRules; } public Result process(ATNState currentState, int tokenIndex, RuleContext context) { RuleStartState startState = atn.ruleToStartState[currentState.ruleIndex]; if (isReachable(currentState, startState)) { // We've been dropped inside a rule in a state that's reachable via epsilon transitions. This is, // effectively, equivalent to starting at the beginning (or immediately outside) the rule. // In that case, backtrack to the beginning to be able to take advantage of logic that remaps // some rules to well-known names for reporting purposes currentState = startState; } Set<Integer> endTokens = process(new ParsingState(currentState, tokenIndex, false, parser), 0); Set<Integer> nextTokens = new HashSet<>(); while (!endTokens.isEmpty() && context.invokingState != -1) { for (int endToken : endTokens) { ATNState nextState = ((RuleTransition) atn.states.get(context.invokingState).transition(0)).followState; nextTokens.addAll(process(new ParsingState(nextState, endToken, false, parser), 0)); } context = context.parent; endTokens = nextTokens; } return new Result(furthestTokenIndex, candidates); } private boolean isReachable(ATNState target, RuleStartState from) { Deque<ATNState> activeStates = new ArrayDeque<>(); activeStates.add(from); while (!activeStates.isEmpty()) { ATNState current = activeStates.pop(); if (current.stateNumber == target.stateNumber) { return true; } for (int i = 0; i < current.getNumberOfTransitions(); i++) { Transition transition = current.transition(i); if (transition.isEpsilon()) { activeStates.push(transition.target); } } } return false; } private Set<Integer> process(ParsingState start, int precedence) { Set<Integer> result = memo.get(start); if (result != null) { return result; } ImmutableSet.Builder<Integer> endTokens = ImmutableSet.builder(); // Simulates the ATN by consuming input tokens and walking transitions. // The ATN can be in multiple states (similar to an NFA) Deque<ParsingState> activeStates = new ArrayDeque<>(); activeStates.add(start); while (!activeStates.isEmpty()) { ParsingState current = activeStates.pop(); ATNState state = current.state; int tokenIndex = current.tokenIndex; boolean suppressed = current.suppressed; while (stream.get(tokenIndex).getChannel() == Token.HIDDEN_CHANNEL) { // Ignore whitespace tokenIndex++; } int currentToken = stream.get(tokenIndex).getType(); if (state.getStateType() == RULE_START) { int rule = state.ruleIndex; if (specialRules.containsKey(rule)) { if (!suppressed) { record(tokenIndex, specialRules.get(rule)); } suppressed = true; } else if (ignoredRules.contains(rule)) { // TODO expand ignored rules like we expand special rules continue; } } if (state instanceof RuleStopState) { endTokens.add(tokenIndex); continue; } for (int i = 0; i < state.getNumberOfTransitions(); i++) { Transition transition = state.transition(i); if (transition instanceof RuleTransition) { RuleTransition ruleTransition = (RuleTransition) transition; for (int endToken : process(new ParsingState(ruleTransition.target, tokenIndex, suppressed, parser), ruleTransition.precedence)) { activeStates.push(new ParsingState(ruleTransition.followState, endToken, suppressed, parser)); } } else if (transition instanceof PrecedencePredicateTransition) { if (precedence < ((PrecedencePredicateTransition) transition).precedence) { activeStates.push(new ParsingState(transition.target, tokenIndex, suppressed, parser)); } } else if (transition.isEpsilon()) { activeStates.push(new ParsingState(transition.target, tokenIndex, suppressed, parser)); } else if (transition instanceof WildcardTransition) { throw new UnsupportedOperationException("not yet implemented: wildcard transition"); } else { IntervalSet labels = transition.label(); if (transition instanceof NotSetTransition) { labels = labels.complement(IntervalSet.of(Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType)); } // Surprisingly, TokenStream (i.e. BufferedTokenStream) may not have loaded all the tokens from the // underlying stream. TokenStream.get() does not force tokens to be buffered -- it just returns what's // in the current buffer, or fail with an IndexOutOfBoundsError. Since Antlr decided the error occurred // within the current set of buffered tokens, stop when we reach the end of the buffer. if (labels.contains(currentToken) && tokenIndex < stream.size() - 1) { activeStates.push(new ParsingState(transition.target, tokenIndex + 1, false, parser)); } else { if (!suppressed) { record(tokenIndex, getTokenNames(labels)); } } } } } result = endTokens.build(); memo.put(start, result); return result; } private void record(int tokenIndex, String label) { record(tokenIndex, ImmutableSet.of(label)); } private void record(int tokenIndex, Set<String> labels) { if (tokenIndex >= furthestTokenIndex) { if (tokenIndex > furthestTokenIndex) { candidates.clear(); furthestTokenIndex = tokenIndex; } candidates.addAll(labels); } } private Set<String> getTokenNames(IntervalSet tokens) { Set<String> names = new HashSet<>(); for (int i = 0; i < tokens.size(); i++) { int token = tokens.get(i); if (token == Token.EOF) { names.add("<EOF>"); } else { names.add(specialTokens.getOrDefault(token, vocabulary.getDisplayName(token))); } } return names; } } public static Builder builder() { return new Builder(); } public static class Builder { private final Map<Integer, String> specialRules = new HashMap<>(); private final Map<Integer, String> specialTokens = new HashMap<>(); private final Set<Integer> ignoredRules = new HashSet<>(); public Builder specialRule(int ruleId, String name) { specialRules.put(ruleId, name); return this; } public Builder specialToken(int tokenId, String name) { specialTokens.put(tokenId, name); return this; } public Builder ignoredRule(int ruleId) { ignoredRules.add(ruleId); return this; } public ErrorHandler build() { return new ErrorHandler(specialRules, specialTokens, ignoredRules); } } private static class Result { private final int errorTokenIndex; private final Set<String> expected; public Result(int errorTokenIndex, Set<String> expected) { this.errorTokenIndex = errorTokenIndex; this.expected = expected; } public int getErrorTokenIndex() { return errorTokenIndex; } public Set<String> getExpected() { return expected; } } }
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.keycloak.testsuite.actions; import org.junit.Assert; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.keycloak.events.Details; import org.keycloak.events.Event; import org.keycloak.events.EventType; import org.keycloak.models.RealmModel; import org.keycloak.models.utils.TimeBasedOTP; import org.keycloak.representations.idm.CredentialRepresentation; import org.keycloak.services.managers.RealmManager; import org.keycloak.testsuite.AssertEvents; import org.keycloak.testsuite.OAuthClient; import org.keycloak.testsuite.pages.AccountTotpPage; import org.keycloak.testsuite.pages.AppPage; import org.keycloak.testsuite.pages.AppPage.RequestType; import org.keycloak.testsuite.pages.LoginConfigTotpPage; import org.keycloak.testsuite.pages.LoginPage; import org.keycloak.testsuite.pages.LoginTotpPage; import org.keycloak.testsuite.pages.RegisterPage; import org.keycloak.testsuite.rule.KeycloakRule; import org.keycloak.testsuite.rule.KeycloakRule.KeycloakSetup; import org.keycloak.testsuite.rule.WebResource; import org.keycloak.testsuite.rule.WebRule; import org.keycloak.testsuite.utils.CredentialHelper; import org.openqa.selenium.WebDriver; /** * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> */ public class RequiredActionTotpSetupTest { @ClassRule public static KeycloakRule keycloakRule = new KeycloakRule(new KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel defaultRealm, RealmModel appRealm) { CredentialHelper.setRequiredCredential(CredentialRepresentation.TOTP, appRealm); appRealm.addRequiredCredential(CredentialRepresentation.TOTP); appRealm.setResetPasswordAllowed(true); } }); @Rule public AssertEvents events = new AssertEvents(keycloakRule); @Rule public WebRule webRule = new WebRule(this); @WebResource protected WebDriver driver; @WebResource protected AppPage appPage; @WebResource protected LoginPage loginPage; @WebResource protected LoginTotpPage loginTotpPage; @WebResource protected LoginConfigTotpPage totpPage; @WebResource protected AccountTotpPage accountTotpPage; @WebResource protected OAuthClient oauth; @WebResource protected RegisterPage registerPage; protected TimeBasedOTP totp = new TimeBasedOTP(); @Test public void setupTotpRegister() { loginPage.open(); loginPage.clickRegister(); registerPage.register("firstName", "lastName", "email@mail.com", "setupTotp", "password", "password"); String userId = events.expectRegister("setupTotp", "email@mail.com").assertEvent().getUserId(); totpPage.assertCurrent(); totpPage.configure(totp.generate(totpPage.getTotpSecret())); String sessionId = events.expectRequiredAction(EventType.UPDATE_TOTP).user(userId).detail(Details.USERNAME, "setuptotp").assertEvent().getSessionId(); Assert.assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); events.expectLogin().user(userId).session(sessionId).detail(Details.USERNAME, "setuptotp").assertEvent(); } @Test public void setupTotpExisting() { loginPage.open(); loginPage.login("test-user@localhost", "password"); totpPage.assertCurrent(); String totpSecret = totpPage.getTotpSecret(); totpPage.configure(totp.generate(totpSecret)); String sessionId = events.expectRequiredAction(EventType.UPDATE_TOTP).assertEvent().getSessionId(); Assert.assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); Event loginEvent = events.expectLogin().session(sessionId).assertEvent(); oauth.openLogout(); events.expectLogout(loginEvent.getSessionId()).assertEvent(); loginPage.open(); loginPage.login("test-user@localhost", "password"); String src = driver.getPageSource(); loginTotpPage.login(totp.generate(totpSecret)); Assert.assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); events.expectLogin().assertEvent(); } @Test public void setupTotpRegisteredAfterTotpRemoval() { // Register new user loginPage.open(); loginPage.clickRegister(); registerPage.register("firstName2", "lastName2", "email2@mail.com", "setupTotp2", "password2", "password2"); String userId = events.expectRegister("setupTotp2", "email2@mail.com").assertEvent().getUserId(); // Configure totp totpPage.assertCurrent(); String totpCode = totpPage.getTotpSecret(); totpPage.configure(totp.generate(totpCode)); // After totp config, user should be on the app page Assert.assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); events.expectRequiredAction(EventType.UPDATE_TOTP).user(userId).detail(Details.USERNAME, "setuptotp2").assertEvent(); Event loginEvent = events.expectLogin().user(userId).detail(Details.USERNAME, "setuptotp2").assertEvent(); // Logout oauth.openLogout(); events.expectLogout(loginEvent.getSessionId()).user(userId).assertEvent(); // Try to login after logout loginPage.open(); loginPage.login("setupTotp2", "password2"); // Totp is already configured, thus one-time password is needed, login page should be loaded Assert.assertTrue(loginPage.isCurrent()); Assert.assertFalse(totpPage.isCurrent()); // Login with one-time password loginTotpPage.login(totp.generate(totpCode)); loginEvent = events.expectLogin().user(userId).detail(Details.USERNAME, "setuptotp2").assertEvent(); // Open account page accountTotpPage.open(); accountTotpPage.assertCurrent(); // Remove google authentificator accountTotpPage.removeTotp(); events.expectAccount(EventType.REMOVE_TOTP).user(userId).assertEvent(); // Logout oauth.openLogout(); events.expectLogout(loginEvent.getSessionId()).user(userId).assertEvent(); // Try to login loginPage.open(); loginPage.login("setupTotp2", "password2"); // Since the authentificator was removed, it has to be set up again totpPage.assertCurrent(); totpPage.configure(totp.generate(totpPage.getTotpSecret())); String sessionId = events.expectRequiredAction(EventType.UPDATE_TOTP).user(userId).detail(Details.USERNAME, "setuptotp2").assertEvent().getSessionId(); Assert.assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); events.expectLogin().user(userId).session(sessionId).detail(Details.USERNAME, "setuptotp2").assertEvent(); } }
package io.lqd.sdk; /** * * Thanks to: * https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/util/ISO8601Utils.java */ import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Locale; import java.util.TimeZone; /** * Utilities methods for manipulating dates in iso8601 format. This is much much faster and GC friendly than * using SimpleDateFormat so highly suitable if you (un)serialize lots of date objects. */ public class ISO8601Utils { /** * ID to represent the 'GMT' string */ private static final String GMT_ID = "GMT"; /** * The GMT timezone */ private static final TimeZone TIMEZONE_GMT = TimeZone.getTimeZone(GMT_ID); /* /********************************************************** /* Static factories /********************************************************** */ /** * Accessor for static GMT timezone instance. */ public static TimeZone timeZoneGMT() { return TIMEZONE_GMT; } /* /********************************************************** /* Formatting /********************************************************** */ /** * Format a date into 'yyyy-MM-ddThh:mm:ssZ' (GMT timezone, no milliseconds precision) * * @param date the date to format * @return the date formatted as 'yyyy-MM-ddThh:mm:ssZ' */ public static String format(Date date) { return format(date, false, TIMEZONE_GMT); } /** * Format a date into 'yyyy-MM-ddThh:mm:ss[.sss]Z' (GMT timezone) * * @param date the date to format * @param millis true to include millis precision otherwise false * @return the date formatted as 'yyyy-MM-ddThh:mm:ss[.sss]Z' */ public static String format(Date date, boolean millis) { return format(date, millis, TIMEZONE_GMT); } /** * Format date into yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm] * * @param date the date to format * @param millis true to include millis precision otherwise false * @param tz timezone to use for the formatting (GMT will produce 'Z') * @return the date formatted as yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm] */ public static String format(Date date, boolean millis, TimeZone tz) { Calendar calendar = new GregorianCalendar(tz, Locale.US); calendar.setTime(date); // estimate capacity of buffer as close as we can (yeah, that's pedantic ;) int capacity = "yyyy-MM-ddThh:mm:ss".length(); capacity += millis ? ".sss".length() : 0; capacity += tz.getRawOffset() == 0 ? "Z".length() : "+hh:mm".length(); StringBuilder formatted = new StringBuilder(capacity); padInt(formatted, calendar.get(Calendar.YEAR), "yyyy".length()); formatted.append('-'); padInt(formatted, calendar.get(Calendar.MONTH) + 1, "MM".length()); formatted.append('-'); padInt(formatted, calendar.get(Calendar.DAY_OF_MONTH), "dd".length()); formatted.append('T'); padInt(formatted, calendar.get(Calendar.HOUR_OF_DAY), "hh".length()); formatted.append(':'); padInt(formatted, calendar.get(Calendar.MINUTE), "mm".length()); formatted.append(':'); padInt(formatted, calendar.get(Calendar.SECOND), "ss".length()); if (millis) { formatted.append('.'); padInt(formatted, calendar.get(Calendar.MILLISECOND), "sss".length()); } int offset = tz.getOffset(calendar.getTimeInMillis()); if (offset != 0) { int hours = Math.abs((offset / (60 * 1000)) / 60); int minutes = Math.abs((offset / (60 * 1000)) % 60); formatted.append(offset < 0 ? '-' : '+'); padInt(formatted, hours, "hh".length()); formatted.append(':'); padInt(formatted, minutes, "mm".length()); } else { formatted.append('Z'); } return formatted.toString(); } /* /********************************************************** /* Parsing /********************************************************** */ /** * Parse a date from ISO-8601 formatted string. It expects a format yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm] * * @param date ISO string to parse in the appropriate format. * @return the parsed date * @throws IllegalArgumentException if the date is not in the appropriate format */ public static Date parse(String date) { Exception fail = null; try { int offset = 0; // extract year int year = parseInt(date, offset, offset += 4); checkOffset(date, offset, '-'); // extract month int month = parseInt(date, offset += 1, offset += 2); checkOffset(date, offset, '-'); // extract day int day = parseInt(date, offset += 1, offset += 2); checkOffset(date, offset, 'T'); // extract hours, minutes, seconds and milliseconds int hour = parseInt(date, offset += 1, offset += 2); checkOffset(date, offset, ':'); int minutes = parseInt(date, offset += 1, offset += 2); checkOffset(date, offset, ':'); int seconds = parseInt(date, offset += 1, offset += 2); // milliseconds can be optional in the format int milliseconds = 0; // always use 0 otherwise returned date will include millis of current time if (date.charAt(offset) == '.') { checkOffset(date, offset, '.'); milliseconds = parseInt(date, offset += 1, offset += 3); } // extract timezone String timezoneId; char timezoneIndicator = date.charAt(offset); if (timezoneIndicator == '+' || timezoneIndicator == '-') { timezoneId = GMT_ID + date.substring(offset); } else if (timezoneIndicator == 'Z') { timezoneId = GMT_ID; } else { throw new IndexOutOfBoundsException("Invalid time zone indicator " + timezoneIndicator); } TimeZone timezone = TimeZone.getTimeZone(timezoneId); if (!timezone.getID().equals(timezoneId)) { throw new IndexOutOfBoundsException(); } Calendar calendar = new GregorianCalendar(timezone); calendar.setLenient(false); calendar.set(Calendar.YEAR, year); calendar.set(Calendar.MONTH, month - 1); calendar.set(Calendar.DAY_OF_MONTH, day); calendar.set(Calendar.HOUR_OF_DAY, hour); calendar.set(Calendar.MINUTE, minutes); calendar.set(Calendar.SECOND, seconds); calendar.set(Calendar.MILLISECOND, milliseconds); return calendar.getTime(); } catch (IndexOutOfBoundsException e) { fail = e; } catch (NumberFormatException e) { fail = e; } catch (IllegalArgumentException e) { fail = e; } String input = (date == null) ? null : ('"'+date+"'"); throw new IllegalArgumentException("Failed to parse date ["+input +"]: "+fail.getMessage(), fail); } /** * Check if the expected character exist at the given offset of the * * @param value the string to check at the specified offset * @param offset the offset to look for the expected character * @param expected the expected character * @throws IndexOutOfBoundsException if the expected character is not found */ private static void checkOffset(String value, int offset, char expected) throws IndexOutOfBoundsException { char found = value.charAt(offset); if (found != expected) { throw new IndexOutOfBoundsException("Expected '" + expected + "' character but found '" + found + "'"); } } /** * Parse an integer located between 2 given offsets in a string * * @param value the string to parse * @param beginIndex the start index for the integer in the string * @param endIndex the end index for the integer in the string * @return the int * @throws NumberFormatException if the value is not a number */ private static int parseInt(String value, int beginIndex, int endIndex) throws NumberFormatException { if (beginIndex < 0 || endIndex > value.length() || beginIndex > endIndex) { throw new NumberFormatException(value); } // use same logic as in Integer.parseInt() but less generic we're not supporting negative values int i = beginIndex; int result = 0; int digit; if (i < endIndex) { digit = Character.digit(value.charAt(i++), 10); if (digit < 0) { throw new NumberFormatException("Invalid number: " + value); } result = -digit; } while (i < endIndex) { digit = Character.digit(value.charAt(i++), 10); if (digit < 0) { throw new NumberFormatException("Invalid number: " + value); } result *= 10; result -= digit; } return -result; } /** * Zero pad a number to a specified length * * @param buffer buffer to use for padding * @param value the integer value to pad if necessary. * @param length the length of the string we should zero pad */ private static void padInt(StringBuilder buffer, int value, int length) { String strValue = Integer.toString(value); for (int i = length - strValue.length(); i > 0; i--) { buffer.append('0'); } buffer.append(strValue); } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.heliosapm.tsdblite.sub; import java.nio.charset.Charset; import java.util.Map; import java.util.concurrent.ExecutorService; import javax.management.MBeanServer; import javax.management.MBeanServerDelegate; import javax.management.MBeanServerNotification; import javax.management.Notification; import javax.management.NotificationFilter; import javax.management.NotificationListener; import javax.management.ObjectName; import org.cliffc.high_scale_lib.NonBlockingHashMapLong; import com.google.common.hash.Funnel; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.hash.PrimitiveSink; import com.heliosapm.tsdblite.jmx.ManagedDefaultExecutorServiceFactory; import com.heliosapm.tsdblite.metric.AppMetricMXBean; import com.heliosapm.tsdblite.metric.MetricCache; import com.heliosapm.tsdblite.metric.Trace; import com.heliosapm.utils.jmx.notifcations.ProxySubscriptionService; import com.heliosapm.utils.tuples.NVP; import io.netty.channel.Channel; import io.netty.channel.group.ChannelGroup; import io.netty.channel.group.DefaultChannelGroup; import io.netty.util.concurrent.DefaultEventExecutor; /** * <p>Title: Subscription</p> * <p>Description: </p> * <p>Company: Helios Development Group LLC</p> * @author Whitehead (nwhitehead AT heliosdev DOT org) * <p><code>com.heliosapm.tsdblite.sub.Subscription</code></p> */ public class Subscription implements NotificationFilter, NotificationListener { /** The subscription pattern */ final ObjectName pattern; /** The subscription type */ final SubscriptionEvent subType; /** The subscription long hash code */ final long hashCode; /** The channels that are subscribed to this subscription */ final ChannelGroup channelGroup = new DefaultChannelGroup("Subscription", subscriptionEventExecutor); /** Indicates if the object name is an actual pattern or if it refers to a single ObjectName */ final boolean wildcard; /** The metric MBeanServer */ final MBeanServer mbeanServer; /** The proxy subscription service */ final ProxySubscriptionService proxySubservice; final boolean forObjectName; final boolean forNewObjectName; /** The UTF8 character set */ public static final Charset UTF8 = Charset.forName("UTF8"); /** The hashing function to compute hashes for subscription */ private static final HashFunction SUBSCRIPTION_HASHER = Hashing.murmur3_128(); /** A placeholder subscription */ private static final Subscription PLACEHOLDER = new Subscription(); /** A cache of subscriptions keyed by the subscription long hash code */ private static final NonBlockingHashMapLong<Subscription> subscriptions = new NonBlockingHashMapLong<Subscription>(128, false); /** Executor service for the subscription channel group */ private static final ExecutorService subscriptionChannelService = new ManagedDefaultExecutorServiceFactory("subscription").newExecutorService(4); /** The event executor for the subscription channel group */ private static final DefaultEventExecutor subscriptionEventExecutor = new DefaultEventExecutor(subscriptionChannelService); private static enum SubscriptionFunnel implements Funnel<NVP<ObjectName, SubscriptionEvent>> { /** The funnel instance */ INSTANCE; /** * {@inheritDoc} * @see com.google.common.hash.Funnel#funnel(java.lang.Object, com.google.common.hash.PrimitiveSink) */ @Override public void funnel(final NVP<ObjectName, SubscriptionEvent> sub, final PrimitiveSink into) { try { final ObjectName on = sub.getKey(); final int subType = sub.getValue().ordinal(); final boolean patternList = on.isPropertyListPattern(); into.putString(on.getDomain().trim(), UTF8); for(Map.Entry<String, String> entry : on.getKeyPropertyList().entrySet()) { into.putString(entry.getKey().trim(), UTF8); into.putString(entry.getValue().trim(), UTF8); } if(patternList) into.putString("*", UTF8); into.putInt(subType); } catch (Exception ex) { throw new RuntimeException("Failed to extract hashcode from sub [" + sub.getKey() + "/" + sub.getValue() + "]", ex); } } } /** * Acquires the Subscription for the passed pattern and subscription type * @param pattern The subscription pattern * @param subType the subscription type * @return the Subscription */ static Subscription get(final ObjectName pattern, final SubscriptionEvent subType, final ProxySubscriptionService proxySubservice) { if(pattern==null) throw new IllegalArgumentException("The passed ObjectName was null"); if(subType==null) throw new IllegalArgumentException("The passed SubscriptionEvent was null"); final long hashCode = SUBSCRIPTION_HASHER.hashObject(new NVP<ObjectName, SubscriptionEvent>(pattern, subType), SubscriptionFunnel.INSTANCE).padToLong(); Subscription sub = subscriptions.put(hashCode, PLACEHOLDER); if(sub==null || sub==PLACEHOLDER) { sub = new Subscription(pattern, subType, hashCode, proxySubservice); subscriptions.replace(hashCode, sub); } return sub; } private Subscription() { pattern = null; subType = null; hashCode = -1; wildcard = false; mbeanServer = null; forObjectName = false; forNewObjectName = false; proxySubservice = null; } /** * Creates a new Subscription */ private Subscription(final ObjectName pattern, final SubscriptionEvent subType, final long hashCode, final ProxySubscriptionService proxySubservice) { this.pattern = pattern; this.subType = subType; this.hashCode = hashCode; this.wildcard = pattern.isPattern(); forObjectName = subType.ordinal() < 3; forNewObjectName = subType.ordinal() < 2; mbeanServer = MetricCache.getInstance().getMetricMBeanServerInstance(); this.proxySubservice = proxySubservice; if(forObjectName) { try { mbeanServer.addNotificationListener(MBeanServerDelegate.DELEGATE_NAME, this, this, null); } catch (Exception ex) { throw new RuntimeException("Failed to register subscription listener [" + toString() + "]", ex); } } else if(subType==SubscriptionEvent.DATA4METRICS) { try { proxySubservice.subscribe(pattern, null, this, this, null); } catch (Exception ex) { throw new RuntimeException("Failed to register subscription listener [" + toString() + "]", ex); } } } /** * {@inheritDoc} * @see java.lang.Object#toString() */ @Override public String toString() { return new StringBuilder("Subscription [").append(subType).append(":").append(pattern).append("]").toString(); } /** * Indicates if this subscription is a wildcard * @return true if this subscription is a wildcard, false otherwise */ public boolean isWildcard() { return wildcard; } public void onNewMetric(final ObjectName on) { channelGroup.write(on); } public void onExpiredMetric(final ObjectName on) { channelGroup.write(on); } public void onMetricSubmission(final Trace trace) { channelGroup.write(trace); } /** * Adds a channel to this subscription * @param channel The channel to add * @return true if the channel was added, false otherwise */ public boolean addChannel(final Channel channel) { if(channel!=null && channel.isOpen()) { return channelGroup.add(channel); } return false; } /** * {@inheritDoc} * @see javax.management.NotificationListener#handleNotification(javax.management.Notification, java.lang.Object) */ @Override public void handleNotification(final Notification n, final Object handback) { if(forObjectName) { MBeanServerNotification m = (MBeanServerNotification)n; if(MBeanServerNotification.REGISTRATION_NOTIFICATION.equals(m.getType())) { onNewMetric(m.getMBeanName()); } else { onExpiredMetric(m.getMBeanName()); } } else { onMetricSubmission((Trace)n.getUserData()); } } /** * {@inheritDoc} * @see javax.management.NotificationFilter#isNotificationEnabled(javax.management.Notification) */ @Override public boolean isNotificationEnabled(Notification n) { if(forObjectName) { if(n instanceof MBeanServerNotification) { final ObjectName on = ((MBeanServerNotification)n).getMBeanName(); return (wildcard && pattern.apply(on)); } } else { final Object source = n.getSource(); if(source instanceof ObjectName) { final ObjectName on = (ObjectName)source; if(pattern.apply(on)) { if(AppMetricMXBean.NOTIF_NEW_METRIC.equals(n.getType())) return true; } } } return false; } /** * {@inheritDoc} * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (hashCode ^ (hashCode >>> 32)); return result; } /** * {@inheritDoc} * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Subscription other = (Subscription) obj; if (hashCode != other.hashCode) return false; return true; } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skylarkbuildapi.apple; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.skylarkbuildapi.FileApi; import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable; import com.google.devtools.build.lib.skylarkinterface.SkylarkModule; import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory; import com.google.devtools.build.lib.skylarkinterface.SkylarkValue; import com.google.devtools.build.lib.syntax.SkylarkList; import com.google.devtools.build.lib.syntax.SkylarkNestedSet; /** * An interface for an info type that provides all compiling and linking information in the * transitive closure of its deps that are needed for building Objective-C rules. */ @SkylarkModule( name = "ObjcProvider", category = SkylarkModuleCategory.PROVIDER, doc = "A provider for compilation and linking of objc." ) public interface ObjcProviderApi<FileApiT extends FileApi> extends SkylarkValue { @SkylarkCallable(name = "define", structField = true, doc = "A set of strings from 'defines' attributes. These are to be passed as '-D' flags to " + "all invocations of the compiler for this target and all depending targets." ) public NestedSet<String> define(); @SkylarkCallable( name = "dynamic_framework_file", structField = true, doc = "The library files in .framework directories belonging to a dynamically linked " + "framework.") public NestedSet<FileApiT> dynamicFrameworkFile(); @SkylarkCallable(name = "exported_debug_artifacts", structField = true, doc = "Debug files that should be exported by the top-level target." ) public NestedSet<FileApiT> exportedDebugArtifacts(); @SkylarkCallable(name = "framework_search_path_only", structField = true, doc = "Exec paths of .framework directories corresponding to frameworks to include " + "in search paths, but not to link." ) public SkylarkNestedSet frameworkSearchPathOnly(); @SkylarkCallable(name = "force_load_library", structField = true, doc = "Libraries to load with -force_load." ) public NestedSet<FileApiT> forceLoadLibrary(); @SkylarkCallable(name = "header", structField = true, doc = "All header files. These may be either public or private headers." ) public NestedSet<FileApiT> header(); @SkylarkCallable( name = "direct_headers", structField = true, doc = "Header files from this target directly (no transitive headers). " + "These may be either public or private headers.") public SkylarkList<FileApiT> directHeaders(); @SkylarkCallable(name = "imported_library", structField = true, doc = "Imported precompiled static libraries (.a files) to be linked into the binary." ) public NestedSet<FileApiT> importedLibrary(); @SkylarkCallable(name = "include", structField = true, doc = "Include search paths specified with '-I' on the command line. Also known as " + "header search paths (and distinct from <em>user</em> header search paths)." ) public SkylarkNestedSet include(); @SkylarkCallable(name = "include_system", structField = true, doc = "System include search paths (typically specified with -isystem)." ) public SkylarkNestedSet includeSystem(); @SkylarkCallable(name = "iquote", structField = true, doc = "User header search paths (typically specified with -iquote)." ) public SkylarkNestedSet iquote(); @SkylarkCallable(name = "j2objc_library", structField = true, doc = "Static libraries that are built from J2ObjC-translated Java code." ) public NestedSet<FileApiT> j2objcLibrary(); @SkylarkCallable(name = "jre_library", structField = true, doc = "J2ObjC JRE emulation libraries and their dependencies." ) public NestedSet<FileApiT> jreLibrary(); @SkylarkCallable(name = "library", structField = true, doc = "Library (.a) files compiled by dependencies of the current target." ) public NestedSet<FileApiT> library(); @SkylarkCallable(name = "link_inputs", structField = true, doc = "Link time artifacts from dependencies that do not fall into any other category such " + "as libraries or archives. This catch-all provides a way to add arbitrary data (e.g. " + "Swift AST files) to the linker. The rule that adds these is also responsible to " + "add the necessary linker flags to 'linkopt'." ) public NestedSet<FileApiT> linkInputs(); @SkylarkCallable(name = "linked_binary", structField = true, doc = "Single-architecture linked binaries to be combined for the final multi-architecture " + "binary." ) public NestedSet<FileApiT> linkedBinary(); @SkylarkCallable(name = "linkmap_file", structField = true, doc = "Single-architecture link map for a binary." ) public NestedSet<FileApiT> linkmapFile(); @SkylarkCallable(name = "linkopt", structField = true, doc = "Linking options." ) public NestedSet<String> linkopt(); @SkylarkCallable( name = "merge_zip", structField = true, doc = "Merge zips to include in the bundle. The entries of these zip files are included " + "in the final bundle with the same path. The entries in the merge zips should not " + "include the bundle root path (e.g. 'Foo.app').") public NestedSet<FileApiT> mergeZip(); @SkylarkCallable(name = "module_map", structField = true, doc = "Clang module maps, used to enforce proper use of private header files." ) public NestedSet<FileApiT> moduleMap(); @SkylarkCallable( name = "direct_module_maps", structField = true, doc = "Module map files from this target directly (no transitive module maps). " + "Used to enforce proper use of private header files and for Swift compilation.") public SkylarkList<FileApiT> directModuleMaps(); @SkylarkCallable(name = "multi_arch_dynamic_libraries", structField = true, doc = "Combined-architecture dynamic libraries to include in the final bundle." ) public NestedSet<FileApiT> multiArchDynamicLibraries(); @SkylarkCallable(name = "multi_arch_linked_archives", structField = true, doc = "Combined-architecture archives to include in the final bundle." ) public NestedSet<FileApiT> multiArchLinkedArchives(); @SkylarkCallable(name = "multi_arch_linked_binaries", structField = true, doc = "Combined-architecture binaries to include in the final bundle." ) public NestedSet<FileApiT> multiArchLinkedBinaries(); @SkylarkCallable(name = "sdk_dylib", structField = true, doc = "Names of SDK .dylib libraries to link with. For instance, 'libz' or 'libarchive'." ) public NestedSet<String> sdkDylib(); @SkylarkCallable(name = "sdk_framework", structField = true, doc = "Names of SDK frameworks to link with (e.g. 'AddressBook', 'QuartzCore')." ) public SkylarkNestedSet sdkFramework(); @SkylarkCallable(name = "source", structField = true, doc = "All transitive source files." ) public NestedSet<FileApiT> source(); @SkylarkCallable( name = "direct_sources", structField = true, doc = "All direct source files from this target (no transitive files).") public SkylarkList<FileApiT> directSources(); @SkylarkCallable( name = "static_framework_file", structField = true, doc = "The library files in .framework directories that should be statically linked.") public NestedSet<FileApiT> staticFrameworkFile(); @SkylarkCallable(name = "umbrella_header", structField = true, doc = "Clang umbrella header. Public headers are #included in umbrella headers to be " + "compatible with J2ObjC segmented headers." ) public NestedSet<FileApiT> umbrellaHeader(); @SkylarkCallable( name = "weak_sdk_framework", structField = true, doc = "Names of SDK frameworks to weakly link with. For instance, 'MediaAccessibility'. " + "In difference to regularly linked SDK frameworks, symbols from weakly linked " + "frameworks do not cause an error if they are not present at runtime.") public SkylarkNestedSet weakSdkFramework(); @SkylarkCallable( name = "dynamic_framework_names", structField = true, doc = "Returns all names of dynamic frameworks in this provider.") public NestedSet<String> dynamicFrameworkNames(); @SkylarkCallable( name = "dynamic_framework_paths", structField = true, doc = "Returns all framework paths to dynamic frameworks in this provider.") public NestedSet<String> dynamicFrameworkPaths(); @SkylarkCallable( name = "static_framework_names", structField = true, doc = "Returns all names of static frameworks in this provider.") public NestedSet<String> staticFrameworkNames(); @SkylarkCallable( name = "static_framework_paths", structField = true, doc = "Returns all framework paths to static frameworks in this provider.") public NestedSet<String> staticFrameworkPaths(); }
/* * Copyright 2015 West Coast Informatics, LLC */ package org.ihtsdo.otf.refset.model; import static org.junit.Assert.assertTrue; import org.apache.log4j.Logger; import org.ihtsdo.otf.refset.helpers.ConfigUtility; import org.ihtsdo.otf.refset.helpers.CopyConstructorTester; import org.ihtsdo.otf.refset.helpers.EqualsHashcodeTester; import org.ihtsdo.otf.refset.helpers.GetterSetterTester; import org.ihtsdo.otf.refset.helpers.XmlSerializationTester; import org.ihtsdo.otf.refset.jpa.helpers.NullableFieldTester; import org.ihtsdo.otf.refset.rf2.Concept; import org.ihtsdo.otf.refset.rf2.ModuleDependencyRefSetMember; import org.ihtsdo.otf.refset.rf2.jpa.ConceptJpa; import org.ihtsdo.otf.refset.rf2.jpa.ModuleDependencyRefSetMemberJpa; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * Unit testing for {@link ModuleDependencyRefSetMemberJpa}. */ public class ModelUnit018Test { /** The model object to test. */ private ModuleDependencyRefSetMemberJpa object; /** The test fixture c1. */ private Concept c1; /** The test fixture c2. */ private Concept c2; /** * Setup class. */ @BeforeClass public static void setupClass() { // do nothing } /** * Setup. */ @Before public void setup() { object = new ModuleDependencyRefSetMemberJpa(); // Set up some objects c1 = new ConceptJpa(); c1.setId(1L); c1.setTerminologyId("1"); c1.setDefinitionStatusId("1"); c2 = new ConceptJpa(); c2.setId(2L); c2.setTerminologyId("2"); c2.setDefinitionStatusId("2"); } /** * Test getter and setter methods of model object. * * @throws Exception the exception */ @Test public void testModelGetSet018() throws Exception { Logger.getLogger(getClass()).debug("TEST testModelGetSet009"); GetterSetterTester tester = new GetterSetterTester(object); tester.test(); } /** * Test equals and hascode methods. * * @throws Exception the exception */ @Test public void testModelEqualsHashcode018() throws Exception { Logger.getLogger(getClass()).debug("TEST testModelEqualsHashcode018"); EqualsHashcodeTester tester = new EqualsHashcodeTester(object); tester.include("active"); tester.include("moduleId"); tester.include("terminology"); tester.include("terminologyId"); tester.include("version"); tester.include("refSetId"); tester.include("concept"); // needed for generic refset class tester.include("component"); tester.include("sourceEffectiveTime"); tester.include("targetEffectiveTime"); // Set up some objects tester.proxy(Concept.class, 1, c1); tester.proxy(Concept.class, 2, c2); assertTrue(tester.testIdentitiyFieldEquals()); assertTrue(tester.testNonIdentitiyFieldEquals()); assertTrue(tester.testIdentityFieldNotEquals()); assertTrue(tester.testIdentitiyFieldHashcode()); assertTrue(tester.testNonIdentitiyFieldHashcode()); assertTrue(tester.testIdentityFieldDifferentHashcode()); } /** * Test copy constructor. * * @throws Exception the exception */ @Test public void testModelCopy018() throws Exception { Logger.getLogger(getClass()).debug("TEST testModelCopy009"); CopyConstructorTester tester = new CopyConstructorTester(object); // Set up some objects tester.proxy(Concept.class, 1, c1); tester.proxy(Concept.class, 2, c2); assertTrue(tester.testCopyConstructor(ModuleDependencyRefSetMember.class)); } /** * Test XML serialization. * * @throws Exception the exception */ @Test public void testModelXmlSerialization018() throws Exception { Logger.getLogger(getClass()).debug("TEST testModelXmlTransient018"); XmlSerializationTester tester = new XmlSerializationTester(object); // Set up some objects Concept c = new ConceptJpa(); c.setId(1L); c.setTerminology("1"); c.setTerminologyId("1"); c.setVersion("1"); c.setDefaultPreferredName("1"); tester.proxy(Concept.class, 1, c); assertTrue(tester.testXmlSerialization()); } /** * Test concept reference in XML serialization. * * @throws Exception the exception */ @Test public void testModelXmlTransient018() throws Exception { Logger.getLogger(getClass()).debug("TEST testModelXmlTransient018"); Concept c = new ConceptJpa(); c.setId(1L); c.setTerminologyId("1"); c.setDefaultPreferredName("1"); ModuleDependencyRefSetMember member = new ModuleDependencyRefSetMemberJpa(); member.setId(1L); member.setTerminologyId("1"); member.setConcept(c); String xml = ConfigUtility.getStringForGraph(member); assertTrue(xml.contains("<conceptId>")); assertTrue(xml.contains("<conceptTerminologyId>")); assertTrue(xml.contains("<conceptPreferredName>")); } /** * Test not null fields. * * @throws Exception the exception */ @Test public void testModelNotNullField018() throws Exception { Logger.getLogger(getClass()).debug("TEST testModelNotNullField018"); NullableFieldTester tester = new NullableFieldTester(object); tester.include("lastModified"); tester.include("lastModifiedBy"); tester.include("active"); tester.include("published"); tester.include("publishable"); tester.include("moduleId"); tester.include("terminologyId"); tester.include("terminology"); tester.include("version"); tester.include("refSetId"); tester.include("sourceEffectiveTime"); tester.include("targetEffectiveTime"); assertTrue(tester.testNotNullFields()); } /** * Teardown. */ @After public void teardown() { // do nothing } /** * Teardown class. */ @AfterClass public static void teardownClass() { // do nothing } }
/* * Copyright (C) 2005-2008 Jive Software, 2022 Ignite Realtime Foundation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.audit.spi; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.openfire.audit.AuditManager; import org.jivesoftware.openfire.audit.Auditor; import org.jivesoftware.openfire.container.BasicModule; import org.jivesoftware.openfire.interceptor.InterceptorManager; import org.jivesoftware.openfire.interceptor.PacketInterceptor; import org.jivesoftware.openfire.session.Session; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.PropertyEventDispatcher; import org.jivesoftware.util.PropertyEventListener; import org.xmpp.packet.JID; import org.xmpp.packet.Packet; import java.io.File; import java.time.Duration; import java.util.*; /** * Implementation of the AuditManager interface. */ public class AuditManagerImpl extends BasicModule implements AuditManager, PropertyEventListener { private boolean enabled; private boolean auditMessage; private boolean auditPresence; private boolean auditIQ; private boolean auditXPath; private List<String> xpath = new LinkedList<>(); private AuditorImpl auditor = null; /** * Max size in bytes that all audit log files may have. When the limit is reached * oldest audit log files will be removed until total size is under the limit. */ private int maxTotalSize; /** * Max size in bytes that each audit log file may have. Once the limit has been * reached a new audit file will be created. */ private int maxFileSize; /** * Max duration to keep audit information. Once the limit has been reached * audit files that contain information that exceed the limit will be deleted. */ private Duration retention; /** * the time between successive executions of the task that will save * the queued audited packets to a permanent store. */ private Duration logTimeout; private String logDir; private Collection<String> ignoreList = new ArrayList<>(); private static final int MAX_TOTAL_SIZE = 1000; private static final int MAX_FILE_SIZE = 10; private static final Duration MAX_DAYS = Duration.ofDays(-1); private static final Duration DEFAULT_LOG_TIMEOUT = Duration.ofMinutes(2); private AuditorInterceptor interceptor; public AuditManagerImpl() { super("Audit Manager"); } @Override public boolean isEnabled() { return enabled; } @Override public void setEnabled(boolean enabled) { this.enabled = enabled; JiveGlobals.setProperty("xmpp.audit.active", enabled ? "true" : "false"); processEnabled(enabled); } @Override public Auditor getAuditor() { if (auditor == null) { throw new IllegalStateException("Must initialize audit manager first"); } return auditor; } @Override public int getMaxTotalSize() { return maxTotalSize; } @Override public void setMaxTotalSize(int size) { maxTotalSize = size; auditor.setMaxValues(maxTotalSize, maxFileSize, retention); JiveGlobals.setProperty("xmpp.audit.totalsize", Integer.toString(size)); } @Override public int getMaxFileSize() { return maxFileSize; } @Override public void setMaxFileSize(int size) { maxFileSize = size; auditor.setMaxValues(maxTotalSize, maxFileSize, retention); JiveGlobals.setProperty("xmpp.audit.filesize", Integer.toString(size)); } @Override public Duration getRetention() { return retention; } @Override public void setRetention(Duration duration) { retention = validateDuration(duration); auditor.setMaxValues(maxTotalSize, maxFileSize, retention); JiveGlobals.setProperty("xmpp.audit.days", Long.toString(duration.toDays())); // TODO fix loss of precision while remaining compatible with existing properties. } @Override public Duration getLogTimeout() { return logTimeout; } @Override public void setLogTimeout(Duration logTimeout) { this.logTimeout = logTimeout; auditor.setLogTimeout(logTimeout); JiveGlobals.setProperty("xmpp.audit.logtimeout", String.valueOf(logTimeout.toMillis())); } @Override public String getLogDir() { return logDir; } @Override public void setLogDir(String logDir) { this.logDir = logDir; auditor.setLogDir(logDir); JiveGlobals.setProperty("xmpp.audit.logdir", logDir); } @Override public boolean isAuditMessage() { return auditMessage; } @Override public void setAuditMessage(boolean auditMessage) { this.auditMessage = auditMessage; JiveGlobals.setProperty("xmpp.audit.message", auditMessage ? "true" : "false"); } @Override public boolean isAuditPresence() { return auditPresence; } @Override public void setAuditPresence(boolean auditPresence) { this.auditPresence = auditPresence; JiveGlobals.setProperty("xmpp.audit.presence", auditPresence ? "true" : "false"); } @Override public boolean isAuditIQ() { return auditIQ; } @Override public void setAuditIQ(boolean auditIQ) { this.auditIQ = auditIQ; JiveGlobals.setProperty("xmpp.audit.iq", Boolean.toString(auditIQ)); } @Override public boolean isAuditXPath() { return auditXPath; } @Override public void setAuditXPath(boolean auditXPath) { this.auditXPath = auditXPath; JiveGlobals.setProperty("xmpp.audit.xpath", Boolean.toString(auditXPath)); } @Override public void addXPath(String xpathExpression) { xpath.add(xpathExpression); saveXPath(); } @Override public void removeXPath(String xpathExpression) { xpath.remove(xpathExpression); saveXPath(); } private void saveXPath() { // TODO: save XPath values! //String[] filters = new String[xpath.size()]; //filters = (String[]) xpath.toArray(filters); } @Override public Iterator<String> getXPathFilters() { return xpath.iterator(); } @Override public void setIgnoreList(Collection<String> usernames) { if (ignoreList.equals(usernames)) { return; } ignoreList = usernames; // Encode the collection StringBuilder ignoreString = new StringBuilder(); for (String username : ignoreList) { if (ignoreString.length() == 0) { ignoreString.append(username); } else { ignoreString.append(',').append(username); } } JiveGlobals.setProperty("xmpp.audit.ignore", ignoreString.toString()); } @Override public Collection<String> getIgnoreList() { return Collections.unmodifiableCollection(ignoreList); } // ######################################################################### // Basic module methods // ######################################################################### @Override public void initialize(XMPPServer server) { super.initialize(server); enabled = JiveGlobals.getBooleanProperty("xmpp.audit.active"); auditMessage = JiveGlobals.getBooleanProperty("xmpp.audit.message"); auditPresence = JiveGlobals.getBooleanProperty("xmpp.audit.presence"); auditIQ = JiveGlobals.getBooleanProperty("xmpp.audit.iq"); auditXPath = JiveGlobals.getBooleanProperty("xmpp.audit.xpath"); // TODO: load xpath values! // String[] filters = context.getProperties("xmpp.audit.filter.xpath"); // for (int i = 0; i < filters.length; i++) { // xpath.add(filters[i]); // } maxTotalSize = JiveGlobals.getIntProperty("xmpp.audit.totalsize", MAX_TOTAL_SIZE); maxFileSize = JiveGlobals.getIntProperty("xmpp.audit.filesize", MAX_FILE_SIZE); retention = Duration.ofDays(JiveGlobals.getIntProperty("xmpp.audit.days", (int)MAX_DAYS.toDays())); logTimeout = Duration.ofMillis(JiveGlobals.getIntProperty("xmpp.audit.logtimeout", (int)DEFAULT_LOG_TIMEOUT.toMillis())); logDir = JiveGlobals.getProperty("xmpp.audit.logdir", JiveGlobals.getHomeDirectory() + File.separator + "logs"); processIgnoreString(JiveGlobals.getProperty("xmpp.audit.ignore", "")); auditor = new AuditorImpl(this); auditor.setMaxValues(maxTotalSize, maxFileSize, retention); auditor.setLogDir(logDir); auditor.setLogTimeout(logTimeout); interceptor = new AuditorInterceptor(); processEnabled(enabled); PropertyEventDispatcher.addListener(this); } private void processIgnoreString(String ignoreString) { ignoreList.clear(); // Decode the ignore list StringTokenizer tokenizer = new StringTokenizer(ignoreString, ","); while (tokenizer.hasMoreTokens()) { String username = tokenizer.nextToken().trim(); ignoreList.add(username); } } private void processEnabled(boolean enabled) { // Add or remove the auditor interceptor depending on the enabled status if (enabled) { InterceptorManager.getInstance().addInterceptor(interceptor); } else { InterceptorManager.getInstance().removeInterceptor(interceptor); } } private Duration validateDuration(Duration duration) { if (duration.isNegative()) { return Duration.ofDays(-1); } if (duration.isZero()) { return Duration.ofDays(1); } return duration; } @Override public void stop() { if (auditor != null) { auditor.stop(); } } @Override public void propertySet(String property, Map<String, Object> params) { final Object val = params.get("value"); if (!( val instanceof String )) { return; } String value = (String) val; switch (property) { case "xmpp.audit.active": enabled = Boolean.parseBoolean(value); processEnabled(enabled); break; case "xmpp.audit.message": auditMessage = Boolean.parseBoolean(value); break; case "xmpp.audit.presence": auditPresence = Boolean.parseBoolean(value); break; case "xmpp.audit.iq": auditIQ = Boolean.parseBoolean(value); break; case "xmpp.audit.xpath": auditXPath = Boolean.parseBoolean(value); break; case "xmpp.audit.totalsize": maxTotalSize = parseIntegerOrDefault(value, MAX_TOTAL_SIZE); auditor.setMaxValues(maxTotalSize, maxFileSize, retention); break; case "xmpp.audit.filesize": maxFileSize = parseIntegerOrDefault(value, MAX_FILE_SIZE); auditor.setMaxValues(maxTotalSize, maxFileSize, retention); break; case "xmpp.audit.days": retention = validateDuration(Duration.ofDays(parseIntegerOrDefault(value, (int)MAX_DAYS.toDays()))); auditor.setMaxValues(maxTotalSize, maxFileSize, retention); break; case "xmpp.audit.logtimeout": logTimeout = Duration.ofMillis(parseIntegerOrDefault(value, (int)DEFAULT_LOG_TIMEOUT.toMillis())); auditor.setLogTimeout(logTimeout); break; case "xmpp.audit.logdir": File d = null; if (!"".equals(value.trim())) { d = new File(value); } logDir = (d == null || !d.exists() || !d.canRead() || !d.canWrite() || !d .isDirectory()) ? JiveGlobals.getHomeDirectory() + File.separator + "logs" : value; auditor.setLogDir(logDir); break; case "xmpp.audit.ignore": processIgnoreString(value); break; } } private int parseIntegerOrDefault(String intValue, int defaultValue) { try { return Integer.parseInt(intValue); } catch (NumberFormatException nfe) { return defaultValue; } } @Override public void propertyDeleted(String property, Map<String, Object> params) { propertySet(property, Collections.emptyMap()); } @Override public void xmlPropertySet(String property, Map<String, Object> params) { } @Override public void xmlPropertyDeleted(String property, Map<String, Object> params) { } private class AuditorInterceptor implements PacketInterceptor { @Override public void interceptPacket(Packet packet, Session session, boolean read, boolean processed) { if (!processed) { // Ignore packets sent or received by users that are present in the ignore list JID from = packet.getFrom(); JID to = packet.getTo(); if ((from == null || !ignoreList.contains(from.getNode())) && (to == null || !ignoreList.contains(to.getNode()))) { auditor.audit(packet, session); } } } } }
package com.github.neuralnetworks.samples.cifar; import com.github.neuralnetworks.input.ImageInputProvider; import com.github.neuralnetworks.util.Util; import java.awt.image.BufferedImage; import java.awt.image.DataBufferByte; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.List; /** * Input provider for the CIFAR-10 and CIFAR-100 datasets. Requires location of the CIFAR images * files (not included in the library). Do not use this class directly, but use the subclasses instead * Experimental */ public abstract class CIFARInputProvider extends ImageInputProvider { private static final long serialVersionUID = 1L; protected RandomAccessFile files[]; protected int labelSize; protected int inputSize; protected byte[] nextInputRaw; protected float[] nextTarget; private List<Integer> elementsOrder; private CIFARInputProvider() { super(); this.elementsOrder = new ArrayList<>(); this.nextInputRaw = new byte[3072]; } @Override public int getInputSize() { return inputSize; } @Override public float[] getNextTarget() { return nextTarget; } @Override public float[] getNextInput() { // if no transformations are required and the data is grouped by color // channel the code can be optimized if (!requireAugmentation() && getProperties().getGroupByChannel()) { if (nextInput == null) { nextInput = new float[3072]; } float scaleColors = getProperties().getScaleColors() ? 255 : 1; for (int i = 0; i < nextInput.length; i++) { nextInput[i] = (nextInputRaw[i] & 0xFF) / scaleColors; } return nextInput; } return super.getNextInput(); } @Override protected BufferedImage getNextImage() { BufferedImage image = new BufferedImage(32, 32, BufferedImage.TYPE_3BYTE_BGR); byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); for (int i = 0; i < 1024; i++) { pixels[i * 3] = nextInputRaw[1024 * 2 + i]; pixels[i * 3 + 1] = nextInputRaw[1024 + i]; pixels[i * 3 + 2] = nextInputRaw[i]; } return image; } @Override public void beforeSample() { if (elementsOrder.size() == 0) { resetOrder(); } int currentEl = elementsOrder.remove(getProperties().getUseRandomOrder() ? getProperties().getRandom().nextInt(elementsOrder.size()) : 0); int id = currentEl % (getInputSize() / files.length); RandomAccessFile f = files[currentEl / (getInputSize() / files.length)]; try { f.seek(id * (3072 + labelSize)); if (labelSize > 1) { f.readUnsignedByte(); } Util.fillArray(nextTarget, 0); nextTarget[f.readUnsignedByte()] = 1; f.readFully(nextInputRaw); } catch (IOException e) { e.printStackTrace(); } } @Override public void reset() { super.reset(); resetOrder(); } public void resetOrder() { elementsOrder = new ArrayList<Integer>(getInputSize()); for (int i = 0; i < getInputSize(); i++) { elementsOrder.add(i); } } public static class CIFAR10TrainingInputProvider extends CIFARInputProvider { private static final long serialVersionUID = 1L; /** * @param directory - the folder where the CIFAR files are located */ public CIFAR10TrainingInputProvider(String directory) { super(); this.labelSize = 1; this.inputSize = 50000; this.nextTarget = new float[10]; this.files = new RandomAccessFile[5]; try { if (!directory.endsWith(File.separator)) { directory += File.separator; } files[0] = new RandomAccessFile(directory + "data_batch_1.bin", "r"); files[1] = new RandomAccessFile(directory + "data_batch_2.bin", "r"); files[2] = new RandomAccessFile(directory + "data_batch_3.bin", "r"); files[3] = new RandomAccessFile(directory + "data_batch_4.bin", "r"); files[4] = new RandomAccessFile(directory + "data_batch_5.bin", "r"); } catch (FileNotFoundException e) { e.printStackTrace(); } } } public static class CIFAR10TestingInputProvider extends CIFARInputProvider { private static final long serialVersionUID = 1L; /** * @param directory - the folder where the CIFAR files are located */ public CIFAR10TestingInputProvider(String directory) { super(); this.labelSize = 1; this.inputSize = 10000; this.nextTarget = new float[10]; this.files = new RandomAccessFile[1]; try { if (!directory.endsWith(File.separator)) { directory += File.separator; } files[0] = new RandomAccessFile(directory + "test_batch.bin", "r"); } catch (FileNotFoundException e) { e.printStackTrace(); } } } public static class CIFAR100TrainingInputProvider extends CIFARInputProvider { private static final long serialVersionUID = 1L; /** * @param directory - the folder where the CIFAR files are located */ public CIFAR100TrainingInputProvider(String directory) { super(); this.labelSize = 2; this.inputSize = 50000; this.nextTarget = new float[100]; this.files = new RandomAccessFile[5]; try { if (!directory.endsWith(File.separator)) { directory += File.separator; } files[0] = new RandomAccessFile(directory + "data_batch_1.bin", "r"); files[1] = new RandomAccessFile(directory + "data_batch_2.bin", "r"); files[2] = new RandomAccessFile(directory + "data_batch_3.bin", "r"); files[3] = new RandomAccessFile(directory + "data_batch_4.bin", "r"); files[4] = new RandomAccessFile(directory + "data_batch_5.bin", "r"); } catch (FileNotFoundException e) { e.printStackTrace(); } } } public static class CIFAR100TestingInputProvider extends CIFARInputProvider { private static final long serialVersionUID = 1L; /** * @param directory - the folder where the CIFAR files are located */ public CIFAR100TestingInputProvider(String directory) { super(); this.labelSize = 2; this.inputSize = 10000; this.nextTarget = new float[100]; this.files = new RandomAccessFile[1]; try { if (!directory.endsWith(File.separator)) { directory += File.separator; } files[0] = new RandomAccessFile(directory + "test_batch.bin", "r"); } catch (FileNotFoundException e) { e.printStackTrace(); } } } }
package it.poliba.sisinflab.simlib.datamodel; import it.poliba.sisinflab.simlib.input.Statement; import it.poliba.sisinflab.simlib.input.triplefile.Triple; import java.util.*; import java.util.stream.Collectors; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.summingInt; /** * Represents a graph structure. * * It represents a graph through its {@link #nodes} and keeps track of which ones are also graph {@link #items}. * {@link #items} are nodes for which we want to calculate similarity or relatedness values * (i.e. a movie, a song, a book etc.) * * @author Giorgio Basile * @since 1.0 */ public class Graph{ /** * It keeps track of all the graph nodes, using as keys their IDs in order to have a fast retrieval * through the {@link HashMap} indexing */ protected HashMap<String, Node> nodes; protected HashMap<String, Node> items; protected HashSet<Node> properties; protected List<Statement> statements; protected Graph(){ nodes = new HashMap<>(); items = new HashMap<>(); properties = new HashSet<>(); } /*************** FACTORY METHODS ****************/ public Node createNode(String id){ if(!nodes.containsKey(id)){ nodes.put(id, new Node(id, this)); } return nodes.get(id); } public Arrow createArrow(Node p, String direction){ return new Arrow(p, direction); } public Path createPath(LinkedList<Arrow> arrowsList){ return new Path(arrowsList); } public Path createPath(Arrow... arrows){ LinkedList<Arrow> arrowsList = new LinkedList<>(Arrays.asList(arrows)); return createPath(arrowsList); } public Path createPath(String... properties){ LinkedList<Arrow> arrowsList = new LinkedList<>(); for(String property : properties){ Node p = createNode(property); arrowsList.add(createArrow(p, Arrow.DIR_OUT)); } return createPath(arrowsList); } public Path propertyStringtoPath(String property, String direction) { return createPath(createArrow(createNode(property), direction)); } public Node getNode(String id){ return nodes.get(id); } public Node getItem(String id) { return items.get(id); } public HashMap<String, Node> getNodes() { return nodes; } public Map<String,Node> getNodes(Collection<String> targetNodes) { return this.getNodes().entrySet().parallelStream() .filter(e -> targetNodes.contains(e.getKey())) .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue() )); } public Map<String,Node> getNodesFrom(Set<String> sourceNodes, String property) { return this.getNodes().entrySet().stream() .filter(e -> sourceNodes.contains(e.getKey())) .map(e -> e.getValue().getNeighbors(this.createPath(property))) .flatMap(Set::stream) .distinct() .collect(Collectors.toMap(n -> n.getId(), n -> n)); } public void setNodes(HashMap<String, Node> nodes) { this.nodes = nodes; } public HashMap<String, Node> getItems() { return items; } public void setItems(HashMap<String, Node> items) { this.items = items; } public HashSet<Node> getProperties() { return properties; } public Set<Node> getPropertiesStartingFrom(Map<String, Node> items) { return items.values().stream() .flatMap(n -> n.getArrows(Arrow.DIR_OUT).keySet().stream()) .map(n -> n.getProperty()) .collect(Collectors.toSet()); } public void setProperties(HashSet<Node> properties) { this.properties = properties; } public List<Statement> getStatements() { return statements; } public void setStatements(List<Statement> statements) { this.statements = statements; } protected void build(List<Statement> tripleList){ statements = tripleList; System.out.println("Building graph..."); tripleList.stream() .forEach(this::processTriple); System.out.println("Graph building complete"); System.out.println("Nodes: " + nodes.size()); } protected void build(List<Statement> statements, List<String> itemsIDs){ build(statements); markItems(itemsIDs); System.out.println("Items: " + items.size()); } public void markItems(List<String> itemsIDs){ for(String itemID : itemsIDs){ Node n = nodes.get(itemID); if(n != null){ items.put(itemID, n); }else{ System.out.println("Node " + itemID + " not found in the graph"); } } } /** * Processes a Triple adding its components to the Graph data model * * @param s a given statement */ private void processTriple(Statement s){ Triple t = (Triple) s; if(!nodes.containsKey(t.getSubject())){ nodes.put(t.getSubject(), createNode(t.getSubject())); } if(!nodes.containsKey(t.getObject())){ nodes.put(t.getObject(), createNode(t.getObject())); } if(!nodes.containsKey(t.getPredicate())){ nodes.put(t.getPredicate(), createNode(t.getPredicate())); } /* * this should be able to avoid creation of new nodes when already existing * moreover, Nodes object in the arrows list, in the nodes and items HashMaps should have consistent reference */ Node tail = nodes.get(t.getSubject()); Node head = nodes.get(t.getObject()); Node p = nodes.get(t.getPredicate()); tail.addArrow(new Arrow(p, Arrow.DIR_OUT), head); //make sure that the triple extractor uses UNIQUE identifiers for each resource in the graph (it's not happening in metadata.txt) head.addArrow(new Arrow(p, Arrow.DIR_IN), tail); properties.add(p); } /** * Returns the nodes reachable following a given set of paths. For each path, there's the associated set of nodes * with the number of times they are reached. * * @param paths the given set of paths * @return a map which entries are each of the given paths with the associated reachable nodes with their frequencies */ public Map<Path, Map<Node, Integer>> getPathsObjects(Set<Path> paths) { Map<Path, Map<Node, Integer>> pathsObjects = new HashMap<>(); for (Path p : paths) { pathsObjects.put(p, getPathObjectsStream(p)); } return pathsObjects; } /** * Returns the set of nodes reachable following a given path (starting from anyone of the graph items), * giving also the number of times they are reached. * * @param path the given path * @return a map which entries are the reachable nodes with their associated frequency * @TODO I think the name is wrong: the method does not return a stream. */ public Map<Node, Integer> getPathObjectsStream(Path path) { return items.entrySet().parallelStream() .map(e -> { //for the one-step transitivity purpose, we need to get the neighbors using each possible subpath //this is different from the collectPathObjects methods in the Node class, because in that case //we don't collect nodes from which we can't follow the full path HashSet<Node> nodes = new HashSet<>(); Path subPath = new Path(path.getArrowsList()); while(subPath.getArrowsList().size() > 0){ nodes.addAll(e.getValue().getNeighbors(subPath)); subPath.getArrowsList().removeLast(); } return nodes; }) //for each Node, get HashSet of Nodes that are objects for "path" .flatMap(HashSet::stream) //create a single Stream with all the Nodes contained in the previous HashSets .collect(groupingBy(n -> n, summingInt(c -> 1))); } public int getArrowOccurrences(Arrow arrow){ return getNodes().entrySet().parallelStream() .collect(summingInt(e -> e.getValue().getArrowObjects(arrow).size())); } public int getStatementsCount() { return getStatements().size(); } public List<Path> findTopKShortestPaths(Node start, Node destination, int k, int maxDepth){ return start.findTopKShortestPaths(destination, k, maxDepth); } public HashMap<Node, List<Path>> findTopKShortestPaths(Node start, Collection<Node> destinations, int k, int maxDepth){ return start.findTopKShortestPaths(destinations, k, maxDepth); } public boolean contains(String nodeID){ return this.nodes.containsKey(nodeID); } public void removeProperties(Set<String> s, Map<String, Node> items){ List<Arrow> p_out = s.stream().map(p -> this.createArrow(this.createNode(p), Arrow.DIR_OUT)).collect(Collectors.toList()); List<Arrow> p_in = s.stream().map(p -> this.createArrow(this.createNode(p), Arrow.DIR_IN)).collect(Collectors.toList()); items.values().parallelStream().forEach(n -> { p_out.stream().forEach(p -> n.removeArrow(p)); p_in.stream().forEach(p -> n.removeArrow(p)); }); // s.stream().forEach(this::removeProperty); removeNodesWithNoLinks(); } private void removeNodesWithNoLinks() { List<String> nodeIDs = nodes.entrySet().stream() .filter(e -> !properties.contains(e.getValue())) //ATTENTION! properties have often no link .filter(e -> e.getValue().arrows.size() == 0) .map(e -> e.getKey()) .collect(Collectors.toList()); nodeIDs.stream() .forEach(this::removeNode); } public void removeProperty(String propID){ if(nodes.containsKey(propID)){ properties.remove(nodes.get(propID)); this.nodes.remove(propID); } } public void removeNode(String nodeID){ if(nodes.containsKey(nodeID)){ this.nodes.remove(nodeID); this.items.remove(nodeID); } } public HashMap<Integer, Collection<Node>> getNeighborsDifferentDepths(Node n, int maxDepth, String direction){ HashMap<Integer, Collection<Node>> neighbors = new HashMap<>(); for (int i = 1 ; i < maxDepth + 1 ; i++){ // HashSet<Node> neighs = n.getNeighbors(i, direction); // neighs.remove(n); neighbors.put(i, n.getNeighborsWithRepetition(i, direction)); } return neighbors; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.heron.metricsmgr.sink; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.apache.heron.spi.metricsmgr.metrics.ExceptionInfo; import org.apache.heron.spi.metricsmgr.metrics.MetricsInfo; import org.apache.heron.spi.metricsmgr.metrics.MetricsRecord; import org.apache.heron.spi.metricsmgr.sink.SinkContext; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class PrometheusSinkTests { private static final long NOW = System.currentTimeMillis(); private final class PrometheusTestSink extends PrometheusSink { private PrometheusTestSink() { } @Override protected void startHttpServer(String path, int port) { // no need to start the server for tests } public Map<String, Map<String, Double>> getMetrics() { return getMetricsCache().asMap(); } long currentTimeMillis() { return NOW; } } private Map<String, Object> defaultConf; private SinkContext context; private List<MetricsRecord> records; @Before public void before() throws IOException { defaultConf = new HashMap<>(); defaultConf.put("port", "9999"); defaultConf.put("path", "test"); defaultConf.put("flat-metrics", "true"); defaultConf.put("include-topology-name", "false"); context = Mockito.mock(SinkContext.class); Mockito.when(context.getTopologyName()).thenReturn("testTopology"); Mockito.when(context.getSinkId()).thenReturn("testId"); /* # example: metrics.yaml rules: - pattern: kafka.(\w+)<type=(.+), name=(.+)PerSec\w*, (.+)=(.+)><>Count name: kafka_$1_$2_$3_total attrNameSnakeCase: true type: COUNTER labels: "$4": "$5" type: COUNTER */ /* example: metrics kafkaOffset/nginx-lfp-beacon/totalSpoutLag kafkaOffset/lads_event_meta_backfill_data/partition_10/spoutLag */ List<Map<String, Object>> rules = Lists.newArrayList(); defaultConf.put("rules", rules); Map<String, Object> rule1 = Maps.newHashMap(); Map<String, Object> labels1 = Maps.newHashMap(); rules.add(rule1); rule1.put("pattern", "kafkaOffset/(.+)/(.+)"); rule1.put("name", "kafka_offset_$2"); rule1.put("type", "COUNTER"); rule1.put("attrNameSnakeCase", true); rule1.put("labels", labels1); labels1.put("topic", "$1"); Map<String, Object> rule2 = Maps.newHashMap(); Map<String, Object> labels2 = Maps.newHashMap(); rules.add(rule2); rule2.put("pattern", "kafkaOffset/(.+)/partition_(\\d+)/(.+)"); rule2.put("name", "kafka_offset_partition_$3"); rule2.put("type", "COUNTER"); rule2.put("labels", labels2); rule2.put("attrNameSnakeCase", true); labels2.put("topic", "$1"); labels2.put("partition", "$2"); Iterable<MetricsInfo> infos = Arrays.asList(new MetricsInfo("metric_1", "1.0"), new MetricsInfo("metric_2", "2.0")); records = Arrays.asList( newRecord("machine/component/instance_1", infos, Collections.emptyList()), newRecord("machine/component/instance_2", infos, Collections.emptyList())); } @Test public void testMetricsGrouping() { PrometheusTestSink sink = new PrometheusTestSink(); sink.init(defaultConf, context); for (MetricsRecord r : records) { sink.processRecord(r); } final Map<String, Map<String, Double>> metrics = sink.getMetrics(); assertTrue(metrics.containsKey("testTopology/component/instance_1")); assertTrue(metrics.containsKey("testTopology/component/instance_2")); } @Test public void testResponse() throws IOException { PrometheusTestSink sink = new PrometheusTestSink(); sink.init(defaultConf, context); for (MetricsRecord r : records) { sink.processRecord(r); } final String topology = "testTopology"; final List<String> expectedLines = Arrays.asList( createMetric(topology, "component", "instance_1", "metric_1", "1.0"), createMetric(topology, "component", "instance_1", "metric_2", "2.0"), createMetric(topology, "component", "instance_1", "metric_1", "1.0"), createMetric(topology, "component", "instance_1", "metric_2", "2.0") ); final Set<String> generatedLines = new HashSet<>(Arrays.asList(new String(sink.generateResponse()).split("\n"))); assertEquals(expectedLines.size(), generatedLines.size()); expectedLines.forEach((String line) -> { assertTrue(generatedLines.contains(line)); }); } @Test public void testResponseWhenMetricNamesHaveAnInstanceId() throws IOException { Iterable<MetricsInfo> infos = Arrays.asList( new MetricsInfo("__connection_buffer_by_instanceid/container_1_word_5/packets", "1.0"), new MetricsInfo("__time_spent_back_pressure_by_compid/container_1_exclaim1_1", "1.0"), new MetricsInfo("__client_stmgr-92/__ack_tuples_to_stmgrs", "1.0"), new MetricsInfo("__instance_bytes_received/1", "1.0") ); records = Arrays.asList( newRecord("machine/__stmgr__/stmgr-1", infos, Collections.emptyList()) ); PrometheusTestSink sink = new PrometheusTestSink(); sink.init(defaultConf, context); for (MetricsRecord r : records) { sink.processRecord(r); } final String topology = "testTopology"; final List<String> expectedLines = Arrays.asList( createMetric(topology, "__stmgr__", "stmgr-1", "connection_buffer_by_instanceid_packets", "container_1_word_5", "1.0"), createMetric(topology, "__stmgr__", "stmgr-1", "time_spent_back_pressure_by_compid", "container_1_exclaim1_1", "1.0"), createMetric(topology, "__stmgr__", "stmgr-1", "client_stmgr_ack_tuples_to_stmgrs", "stmgr-92", "1.0"), createMetric(topology, "__stmgr__", "stmgr-1", "instance_bytes_received", "1", "1.0") ); final Set<String> generatedLines = new HashSet<>(Arrays.asList(new String(sink.generateResponse()).split("\n"))); assertEquals(expectedLines.size(), generatedLines.size()); expectedLines.forEach((String line) -> { assertTrue(generatedLines.contains(line)); }); } @Test public void testApacheStormKafkaMetrics() throws IOException { Iterable<MetricsInfo> infos = Arrays.asList( new MetricsInfo("kafkaOffset/event_data/partition_0/spoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_10/spoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_0/earliestTimeOffset", "1.0"), new MetricsInfo("kafkaOffset/event_data/totalRecordsInPartitions", "1.0"), new MetricsInfo("kafkaOffset/event_data/totalSpoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_2/spoutLag", "1.0") ); records = Arrays.asList( newRecord("shared-aurora-036:31/spout-release-1/container_1_spout-release-1_31", infos, Collections.emptyList()) ); PrometheusTestSink sink = new PrometheusTestSink(); sink.init(defaultConf, context); for (MetricsRecord r : records) { sink.processRecord(r); } final String topology = "testTopology"; final List<String> expectedLines = Arrays.asList( createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "0", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "10", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_earliest_time_offset", "event_data", "0", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_total_records_in_partitions", "event_data", null, "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_total_spout_lag", "event_data", null, "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "2", "1.0") ); final Set<String> generatedLines = new HashSet<>(Arrays.asList(new String(sink.generateResponse()).split("\n"))); assertEquals(expectedLines.size(), generatedLines.size()); expectedLines.forEach((String line) -> { assertTrue(generatedLines.contains(line)); }); } @Test public void testComponentType() { Map<String, Double> metrics = new HashMap<>(); metrics.put("__execute-time-ns/default", 1d); assertEquals("bolt", PrometheusSink.getComponentType(metrics)); metrics = new HashMap<>(); metrics.put("__execute-time-ns/stream1", 1d); assertEquals("bolt", PrometheusSink.getComponentType(metrics)); metrics = new HashMap<>(); metrics.put("__next-tuple-count", 1d); assertEquals("spout", PrometheusSink.getComponentType(metrics)); metrics = new HashMap<>(); assertNull(PrometheusSink.getComponentType(metrics)); } private String createMetric(String topology, String component, String instance, String metric, String value) { return createMetric(topology, component, instance, metric, null, value); } private String createMetric(String topology, String component, String instance, String metric, String metricNameInstanceId, String value) { if (metricNameInstanceId != null) { return String.format("heron_%s" + "{component=\"%s\",instance_id=\"%s\",metric_instance_id=\"%s\",topology=\"%s\"}" + " %s %d", metric, component, instance, metricNameInstanceId, topology, value, NOW); } else { return String.format("heron_%s{component=\"%s\",instance_id=\"%s\",topology=\"%s\"} %s %d", metric, component, instance, topology, value, NOW); } } private String createOffsetMetric(String topology, String component, String instance, String metric, String topic, String partition, String value) { if (partition != null) { return String.format("heron_%s" + "{component=\"%s\",instance_id=\"%s\",partition=\"%s\"," + "topic=\"%s\",topology=\"%s\"}" + " %s %d", metric, component, instance, partition, topic, topology, value, NOW); } else { return String.format("heron_%s" + "{component=\"%s\",instance_id=\"%s\",topic=\"%s\",topology=\"%s\"} %s %d", metric, component, instance, topic, topology, value, NOW); } } private MetricsRecord newRecord(String source, Iterable<MetricsInfo> metrics, Iterable<ExceptionInfo> exceptions) { return new MetricsRecord(source, metrics, exceptions); } }
package com.dao; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.nio.Buffer; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.swing.event.TreeExpansionEvent; import com.datastruct.BST; import com.datastruct.BST2; import com.datastruct.HashManager; import com.datastruct.Node2; import com.datastruct.TreeNode; import com.datastruct.TreeNode2; import com.model.Course; import com.model.Teacher; import com.util.Config; import com.util.FileUtil; import com.util.HashUtil; import com.util.JsonUtil; public class CourseDao { private BST bst; public boolean add(Course course){ bst=HashManager.getBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME); int key= HashUtil.computeKey(course.getCou_name()); TreeNode treeNode=null; treeNode=bst.getNode(bst.getRoot(),key); if(treeNode!=null) return false; treeNode=new TreeNode(); treeNode.setKey(key); treeNode.setFilename(".\\course_data\\cou_"+treeNode.getKey()%20+".txt"); bst.setRoot(bst.add(bst.getRoot(), treeNode)); HashManager.saveBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME, bst); FileUtil.append(JsonUtil.courseToString(course), treeNode.getFilename()); return true; } public boolean remove(Course course){ bst=HashManager.getBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME); int key=HashUtil.computeKey(course.getCou_name()); TreeNode treeNode=null; treeNode=bst.getNode(bst.getRoot(), key); if(treeNode==null) return false; File file=new File(treeNode.getFilename()); List<String> data=new ArrayList<>(); try { BufferedReader bufferedReader=new BufferedReader(new FileReader(file)); String tmp=null; while((tmp=bufferedReader.readLine())!=null){ Course course2=JsonUtil.stringToCourse(tmp); if(!course2.getCou_name().equals(course.getCou_name())){ data.add(tmp); } } bufferedReader.close(); BufferedWriter bufferedWriter=new BufferedWriter( new FileWriter(file)); Iterator<String> iterator=data.iterator(); while(iterator.hasNext()){ bufferedWriter.write(iterator.next()); bufferedWriter.newLine(); } bufferedWriter.flush(); bufferedWriter.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } bst.setRoot(bst.remove(bst.getRoot(), key)); HashManager.saveBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME, bst); return true; } public boolean update(Course course){ bst=HashManager.getBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME); int key=HashUtil.computeKey(course.getCou_name()); TreeNode treeNode=null; treeNode=bst.getNode(bst.getRoot(), key); if(treeNode==null) return false; File file=new File(treeNode.getFilename()); List<String> data=new ArrayList<>(); String tmp=null; try { BufferedReader bufferedReader=new BufferedReader(new FileReader(file)); while((tmp=bufferedReader.readLine())!=null){ Course course2=JsonUtil.stringToCourse(tmp); if(course.getCou_name().equals(course2.getCou_name())){ tmp=JsonUtil.courseToString(course); } data.add(tmp); } bufferedReader.close(); BufferedWriter bufferedWriter=new BufferedWriter(new FileWriter(file)); Iterator<String> iterator=data.iterator(); while(iterator.hasNext()){ bufferedWriter.write(iterator.next()); bufferedWriter.newLine(); } bufferedWriter.flush(); bufferedWriter.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return true; } public Course getCourseByname(String name){ Course course=null; bst=HashManager.getBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME); int key=HashUtil.computeKey(name); TreeNode treeNode=bst.getNode(bst.getRoot(), key); if(treeNode==null) return course; File file=new File(treeNode.getFilename()); try { BufferedReader bufferedReader=new BufferedReader(new FileReader(file)); String tmp=null; while((tmp=bufferedReader.readLine())!=null){ course=JsonUtil.stringToCourse(tmp); if(course.getCou_name().equals(name)){ break; } } bufferedReader.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return course; } public List<Course> getCoursesByString(String str){ List<Course> courses=null; bst=HashManager.getBST(Config.COU_INFORMASTION_CODEINDEX_FILENAME); Set<String> filenames=new HashSet<>(); bst.getFilenames(bst.getRoot(), filenames); if(filenames.size()==0) return courses; Iterator<String> iterator=filenames.iterator(); courses=new ArrayList<>(); while(iterator.hasNext()){ try { BufferedReader bufferedReader=new BufferedReader(new FileReader(new File(iterator.next()))); String tmp=null; while((tmp=bufferedReader.readLine())!=null){ Course course=JsonUtil.stringToCourse(tmp); if(course.getCou_name().contains(str)){ courses.add(course); } } } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return courses; } public static void main(String args[]){ Course course=new Course(); course.setCou_name("hahada8"); course.setCou_startWeek("3"); CourseDao courseDao=new CourseDao(); courseDao.add(course); List<Course> tmp=courseDao.getCoursesByString("hahada"); System.out.println(tmp); } }
package org.pocketcampus.plugin.food.android; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.pocketcampus.platform.android.core.PluginController; import org.pocketcampus.platform.android.core.PluginModel; import org.pocketcampus.plugin.food.R; import org.pocketcampus.plugin.food.android.iface.IFoodController; import org.pocketcampus.plugin.food.android.iface.IFoodView; import org.pocketcampus.plugin.food.android.req.CastVoteRequest; import org.pocketcampus.plugin.food.android.req.GetFoodRequest; import org.pocketcampus.plugin.food.shared.EpflMeal; import org.pocketcampus.plugin.food.shared.EpflRating; import org.pocketcampus.plugin.food.shared.EpflRestaurant; import org.pocketcampus.plugin.food.shared.FoodRequest; import org.pocketcampus.plugin.food.shared.MealTime; import org.pocketcampus.plugin.food.shared.MealType; import org.pocketcampus.plugin.food.shared.PriceTarget; import org.pocketcampus.plugin.food.shared.FoodService.Client; import org.pocketcampus.plugin.food.shared.FoodService.Iface; import org.pocketcampus.plugin.food.shared.VoteRequest; import org.pocketcampus.plugin.map.shared.MapItem; import android.annotation.SuppressLint; import android.content.SharedPreferences; import android.provider.Settings.Secure; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.ImageLoaderConfiguration; /** * Controller for the food plugin. Takes care of interactions between the model * and the view and gets information from the server. * * @author Amer <amer@accandme.com> */ public class FoodController extends PluginController implements IFoodController { /** The plugin's model. */ private FoodModel mModel; /** Interface to the plugin's server client */ private Iface mClient; /** The name of the plugin */ private String mPluginName = "food"; /** * Some Private Vars */ private String androidId; private Map<MealType, String> iMealTypePicUrls; private Map<Long, AResto> iRestos; private Map<Long, AMeal> iMeals; private PriceTarget iServerDetectedPriceTarget; private Map<Long, String> iRestoNames; private Map<MealType, String> iTypeNames; /** * Initializes the plugin with a model and a client. */ @Override public void onCreate() { // Initializing the model is part of the controller's job... mModel = new FoodModel(getApplicationContext()); // ...as well as initializing the client. // The "client" is the connection we use to access the service. mClient = (Iface) getClient(new Client.Factory(), mPluginName); // initialize ImageLoader ImageLoader.getInstance().init(ImageLoaderConfiguration.createDefault(getApplicationContext())); androidId = Secure.getString(getApplicationContext().getContentResolver(), Secure.ANDROID_ID); } /** * Returns the model for which this controller works. */ @Override public PluginModel getModel() { return mModel; } /** * Setter and getter for iMealTypePicUrls, iRestos, iRestoNames, iTypeNames */ public Map<MealType, String> getMealTypePicUrls() { return iMealTypePicUrls; } public void setMealTypePicUrls(Map<MealType, String> obj) { iMealTypePicUrls = obj; } public Map<Long, AResto> getRestos() { return iRestos; } public void setRestos(Map<Long, AResto> obj) { iRestos = obj; } public Map<Long, AMeal> getMeals() { return iMeals; } public void setMeals(Map<Long, AMeal> obj) { iMeals = obj; mModel.getListenersToNotify().foodUpdated(); } public Map<Long, String> getRestoNames() { return iRestoNames; } public void setRestoNames(Map<Long, String> obj) { iRestoNames = obj; } public Map<MealType, String> getTypeNames() { return iTypeNames; } public void setTypeNames(Map<MealType, String> obj) { iTypeNames = obj; } public PriceTarget getServerDetectedPriceTarget() { return iServerDetectedPriceTarget; } public void setServerDetectedPriceTarget(PriceTarget val) { iServerDetectedPriceTarget = val; } @SuppressLint("UseSparseArrays") public void setEpflMenus(List<EpflRestaurant> menus) { Map<Long, AResto> restos = new HashMap<Long, AResto>(); Map<Long, AMeal> meals = new HashMap<Long, AMeal>(); for(EpflRestaurant r : menus) { restos.put(r.getRId(), new AResto(r.getRId(), r.getRName(), getSatisfaction(r.getRRating()), r.getRLocation())); for(EpflMeal m : r.getRMeals()) { meals.put(m.getMId(), new AMeal(m.getMId(), m.getMName(), getDescription(m.getMDescription()), m.getMPrices(), getSatisfaction(m.getMRating()), m.getMTypes(), r.getRId())); } } Map<Long, String> restoNames = new HashMap<Long, String>(); Map<MealType, String> typeNames = new HashMap<MealType, String>(); for(AResto r : restos.values()) restoNames.put(r.id, r.name); for(MealType t : MealType.values()) typeNames.put(t, translateEnum(t.name())); setRestoNames(restoNames); setTypeNames(typeNames); setRestos(restos); setMeals(meals); } /** * Initiates a request to the server to get food. */ public void refreshFood(IFoodView caller, Long foodDay, MealTime foodTime, boolean useCache) { FoodRequest req = new FoodRequest(); req.setDeviceLanguage(Locale.getDefault().getLanguage()); if(foodDay != null) req.setMealDate(foodDay); if(foodTime != null) req.setMealTime(foodTime); req.setUserGaspar(getUserGaspar()); new GetFoodRequest(caller).setBypassCache(!useCache).start(this, mClient, req); } /** * Initiates a request to vote. */ public void sendVoteReq(IFoodView caller, long mealId, double rating) { VoteRequest req = new VoteRequest(mealId, rating, androidId); new CastVoteRequest(caller).start(this, mClient, req); } /********* * SUPER DUPER HACK !!! */ public String getUserGaspar() { // we just read the private files of the auth plugin :-( SharedPreferences iStorage = getSharedPreferences("AUTH_STORAGE_NAME", 0); return iStorage.getString("GASPAR_USERNAME_KEY", null); } /** * HELPERS CLASSES * */ public static class AResto { public AResto(long id, String name, String satisfaction, MapItem location) { this.id = id; this.name = name; this.satisfaction = satisfaction; this.location = location; } long id; String name; String satisfaction; MapItem location; } public static class AMeal { public AMeal(long id, String name, String desc, Map<PriceTarget, Double> price, String satisfaction, List<MealType> types, long resto) { this.id = id; this.name = name; this.desc = desc; this.prices = price; this.satisfaction = satisfaction; this.types = types; this.resto = resto; } long id; String name; String desc; Map<PriceTarget, Double> prices; String satisfaction; List<MealType> types; long resto; String getSummary() { return name + " " + (desc == null ? "" : desc.replaceAll("<br>", " ")); } } private String getSatisfaction(EpflRating rating) { if(rating.getVoteCount() == 0) return getString(R.string.food_text_novotes); String satiscation = Math.round(rating.getRatingValue() * 100) + "% " + getString(R.string.food_text_satisfaction); String votes = rating.getVoteCount() + " " + getString(rating.getVoteCount() > 1 ? R.string.food_text_votes : R.string.food_text_vote); return satiscation + " (" + votes + ")"; } private String getDescription(String desc) { desc = desc.trim(); if(desc.length() == 0) return null; return desc.replaceAll("\\n", "<br>"); } public String translateEnum (String enumVal) { int resId = getResources().getIdentifier("food_enum_" + enumVal, "string", getPackageName()); if(resId == 0) return enumVal; return getString(resId); } }
/* * Copyright 2017 Dmitriy Ponomarenko * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dimowner.tastycocktails.cocktails.list; import android.graphics.drawable.Drawable; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v7.util.DiffUtil; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.TextView; import com.bumptech.glide.Glide; import com.bumptech.glide.load.DataSource; import com.bumptech.glide.load.engine.GlideException; import com.bumptech.glide.request.RequestListener; import com.bumptech.glide.request.target.Target; import java.util.ArrayList; import java.util.List; import com.dimowner.tastycocktails.R; import com.dimowner.tastycocktails.cocktails.CocktailsListFragment; import com.dimowner.tastycocktails.util.TimeUtils; import static com.dimowner.tastycocktails.cocktails.CocktailsListFragment.TYPE_HISTORY; /** * Created on 26.07.2017. * @author Dimowner */ public class CocktailsRecyclerAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { private final static int VIEW_TYPE_NORMAL = 1; private final static int VIEW_TYPE_PROGRESS = 2; private final static int VIEW_TYPE_FOOTER = 3; private static final int VIEW_TYPE_FOOTER2 = 4; private boolean showFooter; private List<ListItem> mBaseData = new ArrayList<>(); private String filterStr = ""; private List<ListItem> mShowingData; private ItemClickListener itemClickListener; private OnFavoriteClickListener onFavoriteClickListener; private ItemLongClickListener itemLongClickListener; private int itemLayoutResId; private boolean showFooter2 = false; //In what type of list is adapter use: normal, fav, history; private int type = CocktailsListFragment.TYPE_UNKNOWN; public class ItemViewHolder extends RecyclerView.ViewHolder { TextView name; TextView description; ImageView image; ImageView btnFev; View view; LinearLayout container; public ItemViewHolder(View itemView) { super(itemView); this.view = itemView; this.name = itemView.findViewById(R.id.list_item_name); this.description = itemView.findViewById(R.id.list_item_description); this.image = itemView.findViewById(R.id.list_item_image); this.btnFev = itemView.findViewById(R.id.list_item_btn_favorite); this.container = itemView.findViewById(R.id.container); } public LinearLayout getContainer() { return container; } } static class LoadingViewHolder extends RecyclerView.ViewHolder { ProgressBar progressBar; LoadingViewHolder(View itemView) { super(itemView); progressBar = itemView.findViewById(R.id.list_item_progress); } } public static class FooterViewHolder extends RecyclerView.ViewHolder { final View view; FooterViewHolder(View itemView){ super(itemView); view = itemView; } } public CocktailsRecyclerAdapter(int layoutResId) { this.mShowingData = new ArrayList<>(); this.itemLayoutResId = layoutResId; } public CocktailsRecyclerAdapter(int type, int layoutResId) { this.mShowingData = new ArrayList<>(); this.itemLayoutResId = layoutResId; this.type = type; } public void showFooter(boolean show) { if (showFooter == show) return; showFooter = show; } @NonNull @Override public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { if (viewType == VIEW_TYPE_NORMAL) { View v = LayoutInflater.from(parent.getContext()).inflate(itemLayoutResId, parent, false); return new ItemViewHolder(v); } else if (viewType == VIEW_TYPE_PROGRESS) { View v = LayoutInflater.from(parent.getContext()) .inflate(R.layout.list_item_progress, parent, false); return new LoadingViewHolder(v); } else if (viewType == VIEW_TYPE_FOOTER) { View v = LayoutInflater.from(parent.getContext()) .inflate(R.layout.list_footer, parent, false); return new FooterViewHolder(v); } else if (viewType == VIEW_TYPE_FOOTER2) { View v = LayoutInflater.from(parent.getContext()) .inflate(R.layout.list_item_footer3, parent, false); return new FooterViewHolder(v); } else { return null; } } @Override public void onBindViewHolder(final RecyclerView.ViewHolder h, final int position1) { if (h.getItemViewType() == VIEW_TYPE_NORMAL) { // int pos = h.getAdapterPosition(); final int pos = position1; ItemViewHolder holder = (ItemViewHolder) h; holder.name.setText(mShowingData.get(pos).getName()); if (type == TYPE_HISTORY) { holder.description.setText(TimeUtils.formatTime(mShowingData.get(pos).getHistory())); } else { holder.description.setText(mShowingData.get(pos).getCategory()); } if (mShowingData.get(pos).getAvatar_url() != null) { Glide.with(holder.view.getContext()) .load(mShowingData.get(pos).getAvatar_url()) // .apply(RequestOptions.circleCropTransform()) .listener(new RequestListener<Drawable>() { @Override public boolean onLoadFailed(@Nullable GlideException e, Object model, Target<Drawable> target, boolean isFirstResource) { holder.image.setImageResource(R.drawable.no_image); return false; } @Override public boolean onResourceReady(Drawable resource, Object model, Target<Drawable> target, DataSource dataSource, boolean isFirstResource) { return false; } }) .into(holder.image); } else { holder.image.setImageResource(R.drawable.no_image); } final int id = (int) mShowingData.get(pos).getId(); holder.btnFev.setOnClickListener(v -> { if (onFavoriteClickListener != null) { onFavoriteClickListener.onFavoriteClick( holder.btnFev, findPositionForId(id), id, -1); } }); if (mShowingData.get(pos).isFavorite()) { holder.btnFev.setImageResource(R.drawable.round_heart_grey); } else { holder.btnFev.setImageResource(R.drawable.round_heart_border_grey); } holder.view.setOnClickListener(v -> { if (itemClickListener != null) { itemClickListener.onItemClick(v, findPositionForId(id)); } }); holder.view.setOnLongClickListener(v -> { if (itemLongClickListener != null) { itemLongClickListener.onItemLongClick(v, id, findPositionForId(id)); } return true; }); } else if (h.getItemViewType() == VIEW_TYPE_PROGRESS) { //Do nothing } // //Set transition names // Resources res = holder.view.getResources(); // ViewCompat.setTransitionName(holder.name, res.getString(R.string.list_item_label_transition)); // ViewCompat.setTransitionName(holder.description, res.getString(R.string.list_item_content_transition)); // ViewCompat.setTransitionName(holder.image, res.getString(R.string.list_item_image_transition)); } private int findPositionForId(int id) { for (int i = 0; i < mShowingData.size(); i++) { if (id == mShowingData.get(i).getId()) { return i; } } return 0; } @Override public void onDetachedFromRecyclerView(@NonNull RecyclerView recyclerView) { super.onDetachedFromRecyclerView(recyclerView); } @Override public int getItemCount() { return mShowingData.size() + (showFooter ? 1 : 0) + (showFooter2 ? 1 : 0); } @Override public int getItemViewType(int position) { if (showFooter) { if (position == mShowingData.size()) { return VIEW_TYPE_FOOTER; } else if (position == mShowingData.size()+1) { return VIEW_TYPE_FOOTER2; } } else { if (position == mShowingData.size()) { return VIEW_TYPE_FOOTER2; } } return VIEW_TYPE_NORMAL; } public void showBottomPanelMargin(boolean b) { showFooter2 = b; if (showFooter) { if (b) { notifyItemInserted(mShowingData.size() + 1); } else { notifyItemRemoved(mShowingData.size() + 1); } } else { if (b) { notifyItemInserted(mShowingData.size()); } else { notifyItemRemoved(mShowingData.size()); } } } public ListItem getItem(int pos) { if (pos >=0 && pos < mShowingData.size()) { return mShowingData.get(pos); } else { return null; } } public void setData(List<ListItem> data) { mBaseData = data; if (isFiltered()) { updateShowingDataWithFilter(); } else { CocktailsDiffUtilCallback productDiffUtilCallback = new CocktailsDiffUtilCallback(mShowingData, mBaseData); DiffUtil.DiffResult productDiffResult = DiffUtil.calculateDiff(productDiffUtilCallback); this.mShowingData.clear(); this.mShowingData.addAll(mBaseData); productDiffResult.dispatchUpdatesTo(this); } } public List<ListItem> getData() { return mShowingData; } // public void addItems(List<ListItem> items) { // mShowingData.addAll(items); // notifyItemRangeInserted(mShowingData.size() - items.size() - 1, items.size()); // } /** * Update showing data by applying search filter. */ private void updateShowingDataWithFilter() { if (isFiltered()) { List<ListItem> oldData = new ArrayList<>(mShowingData); mShowingData.clear(); for (int i = 0; i < mBaseData.size(); i++) { if (mBaseData.get(i).getName().toLowerCase().contains(filterStr.toLowerCase())) { mShowingData.add(mBaseData.get(i)); } } CocktailsDiffUtilCallback productDiffUtilCallback = new CocktailsDiffUtilCallback(oldData, mShowingData); DiffUtil.DiffResult productDiffResult = DiffUtil.calculateDiff(productDiffUtilCallback); productDiffResult.dispatchUpdatesTo(this); } } public void applyFilter(String str) { if (str == null || str.isEmpty()) { filterStr = ""; CocktailsDiffUtilCallback productDiffUtilCallback = new CocktailsDiffUtilCallback(mShowingData, mBaseData); DiffUtil.DiffResult productDiffResult = DiffUtil.calculateDiff(productDiffUtilCallback); this.mShowingData.clear(); mShowingData.addAll(mBaseData); productDiffResult.dispatchUpdatesTo(this); } else { filterStr = str; updateShowingDataWithFilter(); } } private boolean isFiltered() { return (filterStr != null && !filterStr.isEmpty()); } public void setItemClickListener(ItemClickListener itemClickListener) { this.itemClickListener = itemClickListener; } public void setOnFavoriteClickListener(OnFavoriteClickListener onFavoriteClickListener) { this.onFavoriteClickListener = onFavoriteClickListener; } public void setItemLongClickListener(ItemLongClickListener itemLongClickListener) { this.itemLongClickListener = itemLongClickListener; } // /** //// * Save adapters state //// * @return adapter state. //// */ //// public Parcelable onSaveInstanceState() { //// SavedState ss = new SavedState(AbsSavedState.EMPTY_STATE); //// ss.items = mShowingData.toArray(new ListItem[0]); //// return ss; //// } //// //// /** //// * Restore adapters state //// * @param state Adapter state. //// */ //// public void onRestoreInstanceState(Parcelable state) { //// SavedState ss = (SavedState) state; //// mShowingData = new ArrayList<>(); //// Collections.addAll(mShowingData, ss.items); //// notifyDataSetChanged(); //// } // // // /** // * Object state // */ // public static class SavedState extends View.BaseSavedState { // SavedState(Parcelable superState) { // super(superState); // } // // private SavedState(Parcel in) { // super(in); // items = (ListItem[]) in.readParcelableArray(getClass().getClassLoader()); // } // // @Override // public void writeToParcel(Parcel out, int flags) { // super.writeToParcel(out, flags); // out.writeParcelableArray(items, flags); // } // // ListItem[] items; // // public static final Parcelable.Creator<SavedState> CREATOR = // new Parcelable.Creator<SavedState>() { // @Override // public SavedState createFromParcel(Parcel in) { // return new SavedState(in); // } // // @Override // public SavedState[] newArray(int size) { // return new SavedState[size]; // } // }; // } public interface ItemClickListener{ void onItemClick(View view, int position); } public interface ItemLongClickListener{ void onItemLongClick(View view, long id, int position); } public interface OnFavoriteClickListener { void onFavoriteClick(ImageView view, int position, int id, int action); } }
package org.springframework.cloud.netflix.zuul.filters; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.TestRestTemplate; import org.springframework.cloud.netflix.zuul.EnableZuulProxy; import org.springframework.cloud.netflix.zuul.RoutesEndpoint; import org.springframework.cloud.netflix.zuul.ZuulProxyConfiguration; import org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator; import org.springframework.cloud.netflix.zuul.filters.route.SimpleHostRoutingFilter; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpEntity; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.RestTemplate; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import static junit.framework.TestCase.assertFalse; import static junit.framework.TestCase.assertTrue; import static org.junit.Assert.assertEquals; @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = SampleCustomZuulProxyApplication.class) @WebAppConfiguration @IntegrationTest({"server.port: 0", "server.contextPath: /app"}) @DirtiesContext public class CustomHostRoutingFilterTests { @Value("${local.server.port}") private int port; @Autowired private DiscoveryClientRouteLocator routes; @Autowired private RoutesEndpoint endpoint; @Test public void getOnSelfViaCustomHostRoutingFilter() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app"); this.endpoint.reset(); ResponseEntity<String> result = new TestRestTemplate().getForEntity( "http://localhost:" + this.port + "/app/self/get/1", String.class); assertEquals(HttpStatus.OK, result.getStatusCode()); assertEquals("Get 1", result.getBody()); } @Test public void postOnSelfViaCustomHostRoutingFilter() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app"); this.endpoint.reset(); MultiValueMap<String, Object> params = new LinkedMultiValueMap<>(); params.add("id", "2"); ResponseEntity<String> result = new TestRestTemplate().postForEntity( "http://localhost:" + this.port + "/app/self/post", params, String.class); assertEquals(HttpStatus.OK, result.getStatusCode()); assertEquals("Post 2", result.getBody()); } @Test public void putOnSelfViaCustomHostRoutingFilter() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app"); this.endpoint.reset(); ResponseEntity<String> result = new TestRestTemplate().exchange( "http://localhost:" + this.port + "/app/self/put/3", HttpMethod.PUT, new HttpEntity<>((Void) null), String.class); assertEquals(HttpStatus.OK, result.getStatusCode()); assertEquals("Put 3", result.getBody()); } @Test public void patchOnSelfViaCustomHostRoutingFilter() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app"); this.endpoint.reset(); MultiValueMap<String, Object> params = new LinkedMultiValueMap<>(); params.add("patch", "5"); ResponseEntity<String> result = new TestRestTemplate().exchange( "http://localhost:" + this.port + "/app/self/patch/4", HttpMethod.PATCH, new HttpEntity<>(params), String.class); assertEquals(HttpStatus.OK, result.getStatusCode()); assertEquals("Patch 45", result.getBody()); } @Test public void getOnSelfIgnoredHeaders() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app"); this.endpoint.reset(); ResponseEntity<String> result = new TestRestTemplate().getForEntity( "http://localhost:" + this.port + "/app/self/get/1", String.class); assertEquals(HttpStatus.OK, result.getStatusCode()); assertTrue(result.getHeaders().containsKey("X-NotIgnored")); assertFalse(result.getHeaders().containsKey("X-Ignored")); } @Test public void getOnSelfWithSessionCookie() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app"); this.endpoint.reset(); RestTemplate restTemplate = new RestTemplate(); ResponseEntity<String> result1 = restTemplate.getForEntity( "http://localhost:" + this.port + "/app/self/cookie/1", String.class); ResponseEntity<String> result2 = restTemplate.getForEntity( "http://localhost:" + this.port + "/app/self/cookie/2", String.class); assertEquals("SetCookie 1", result1.getBody()); assertEquals("GetCookie 1", result2.getBody()); } } @Configuration @EnableAutoConfiguration @RestController class SampleCustomZuulProxyApplication { @RequestMapping(value = "/get/{id}", method = RequestMethod.GET) public String get(@PathVariable String id, HttpServletResponse response) { response.setHeader("X-Ignored", "foo"); response.setHeader("X-NotIgnored", "bar"); return "Get " + id; } @RequestMapping(value = "/cookie/{id}", method = RequestMethod.GET) public String getWithCookie(@PathVariable String id, HttpSession session) { Object testCookie = session.getAttribute("testCookie"); if (testCookie != null) { return "GetCookie " + testCookie; } session.setAttribute("testCookie", id); return "SetCookie " + id; } @RequestMapping(value = "/post", method = RequestMethod.POST) public String post(@RequestParam("id") String id) { return "Post " + id; } @RequestMapping(value = "/put/{id}", method = RequestMethod.PUT) public String put(@PathVariable String id) { return "Put " + id; } @RequestMapping(value = "/patch/{id}", method = RequestMethod.PATCH) public String patch(@PathVariable String id, @RequestParam("patch") String patch) { return "Patch " + id + patch; } public static void main(String[] args) { SpringApplication.run(SampleCustomZuulProxyApplication.class, args); } @Configuration @EnableZuulProxy protected static class CustomZuulProxyConfig extends ZuulProxyConfiguration { @Bean @Override public SimpleHostRoutingFilter simpleHostRoutingFilter() { return new CustomHostRoutingFilter(); } private class CustomHostRoutingFilter extends SimpleHostRoutingFilter { @Override public Object run() { super.addIgnoredHeaders("X-Ignored"); return super.run(); } @Override protected CloseableHttpClient newClient() { // Custom client with cookie support. // In practice, we would want a custom cookie store using a multimap with a user key. return HttpClients.custom() .setConnectionManager(newConnectionManager()) .setDefaultCookieStore(new BasicCookieStore()) .setDefaultRequestConfig(RequestConfig.custom() .setCookieSpec(CookieSpecs.DEFAULT) .build()) .build(); } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.redshiftdataapi.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The properties of a table. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-data-2019-12-20/TableMember" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TableMember implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the table. * </p> */ private String name; /** * <p> * The schema containing the table. * </p> */ private String schema; /** * <p> * The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL TEMPORARY, * ALIAS, and SYNONYM. * </p> */ private String type; /** * <p> * The name of the table. * </p> * * @param name * The name of the table. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the table. * </p> * * @return The name of the table. */ public String getName() { return this.name; } /** * <p> * The name of the table. * </p> * * @param name * The name of the table. * @return Returns a reference to this object so that method calls can be chained together. */ public TableMember withName(String name) { setName(name); return this; } /** * <p> * The schema containing the table. * </p> * * @param schema * The schema containing the table. */ public void setSchema(String schema) { this.schema = schema; } /** * <p> * The schema containing the table. * </p> * * @return The schema containing the table. */ public String getSchema() { return this.schema; } /** * <p> * The schema containing the table. * </p> * * @param schema * The schema containing the table. * @return Returns a reference to this object so that method calls can be chained together. */ public TableMember withSchema(String schema) { setSchema(schema); return this; } /** * <p> * The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL TEMPORARY, * ALIAS, and SYNONYM. * </p> * * @param type * The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL * TEMPORARY, ALIAS, and SYNONYM. */ public void setType(String type) { this.type = type; } /** * <p> * The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL TEMPORARY, * ALIAS, and SYNONYM. * </p> * * @return The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL * TEMPORARY, ALIAS, and SYNONYM. */ public String getType() { return this.type; } /** * <p> * The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL TEMPORARY, * ALIAS, and SYNONYM. * </p> * * @param type * The type of the table. Possible values include TABLE, VIEW, SYSTEM TABLE, GLOBAL TEMPORARY, LOCAL * TEMPORARY, ALIAS, and SYNONYM. * @return Returns a reference to this object so that method calls can be chained together. */ public TableMember withType(String type) { setType(type); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getSchema() != null) sb.append("Schema: ").append(getSchema()).append(","); if (getType() != null) sb.append("Type: ").append(getType()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TableMember == false) return false; TableMember other = (TableMember) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getSchema() == null ^ this.getSchema() == null) return false; if (other.getSchema() != null && other.getSchema().equals(this.getSchema()) == false) return false; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getSchema() == null) ? 0 : getSchema().hashCode()); hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); return hashCode; } @Override public TableMember clone() { try { return (TableMember) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.redshiftdataapi.model.transform.TableMemberMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package org.apache.lucene.search.suggest.analyzing; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // TODO // - test w/ syns // - add pruning of low-freq ngrams? import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; //import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.AnalyzerWrapper; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.suggest.InputIterator; import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.search.suggest.Sort; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.InputStreamDataInput; import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.Version; import org.apache.lucene.util.fst.Builder; import org.apache.lucene.util.fst.FST.Arc; import org.apache.lucene.util.fst.FST.BytesReader; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.Outputs; import org.apache.lucene.util.fst.PositiveIntOutputs; import org.apache.lucene.util.fst.Util.MinResult; import org.apache.lucene.util.fst.Util; /** * Builds an ngram model from the text sent to {@link * #build} and predicts based on the last grams-1 tokens in * the request sent to {@link #lookup}. This tries to * handle the "long tail" of suggestions for when the * incoming query is a never before seen query string. * * <p>Likely this suggester would only be used as a * fallback, when the primary suggester fails to find * any suggestions. * * <p>Note that the weight for each suggestion is unused, * and the suggestions are the analyzed forms (so your * analysis process should normally be very "light"). * * <p>This uses the stupid backoff language model to smooth * scores across ngram models; see * "Large language models in machine translation", * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.76.1126 * for details. * * <p> From {@link #lookup}, the key of each result is the * ngram token; the value is Long.MAX_VALUE * score (fixed * point, cast to long). Divide by Long.MAX_VALUE to get * the score back, which ranges from 0.0 to 1.0. * * onlyMorePopular is unused. * * @lucene.experimental */ public class FreeTextSuggester extends Lookup { /** Codec name used in the header for the saved model. */ public final static String CODEC_NAME = "freetextsuggest"; /** Initial version of the the saved model file format. */ public final static int VERSION_START = 0; /** Current version of the the saved model file format. */ public final static int VERSION_CURRENT = VERSION_START; /** By default we use a bigram model. */ public static final int DEFAULT_GRAMS = 2; // In general this could vary with gram, but the // original paper seems to use this constant: /** The constant used for backoff smoothing; during * lookup, this means that if a given trigram did not * occur, and we backoff to the bigram, the overall score * will be 0.4 times what the bigram model would have * assigned. */ public final static double ALPHA = 0.4; /** Holds 1gram, 2gram, 3gram models as a single FST. */ private FST<Long> fst; /** * Analyzer that will be used for analyzing suggestions at * index time. */ private final Analyzer indexAnalyzer; private long totTokens; /** * Analyzer that will be used for analyzing suggestions at * query time. */ private final Analyzer queryAnalyzer; // 2 = bigram, 3 = trigram private final int grams; private final byte separator; /** The default character used to join multiple tokens * into a single ngram token. The input tokens produced * by the analyzer must not contain this character. */ public static final byte DEFAULT_SEPARATOR = 0x1e; /** Instantiate, using the provided analyzer for both * indexing and lookup, using bigram model by default. */ public FreeTextSuggester(Analyzer analyzer) { this(analyzer, analyzer, DEFAULT_GRAMS); } /** Instantiate, using the provided indexing and lookup * analyzers, using bigram model by default. */ public FreeTextSuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer) { this(indexAnalyzer, queryAnalyzer, DEFAULT_GRAMS); } /** Instantiate, using the provided indexing and lookup * analyzers, with the specified model (2 * = bigram, 3 = trigram, etc.). */ public FreeTextSuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer, int grams) { this(indexAnalyzer, queryAnalyzer, grams, DEFAULT_SEPARATOR); } /** Instantiate, using the provided indexing and lookup * analyzers, and specified model (2 = bigram, 3 = * trigram ,etc.). The separator is passed to {@link * ShingleFilter#setTokenSeparator} to join multiple * tokens into a single ngram token; it must be an ascii * (7-bit-clean) byte. No input tokens should have this * byte, otherwise {@code IllegalArgumentException} is * thrown. */ public FreeTextSuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer, int grams, byte separator) { this.grams = grams; this.indexAnalyzer = addShingles(indexAnalyzer); this.queryAnalyzer = addShingles(queryAnalyzer); if (grams < 1) { throw new IllegalArgumentException("grams must be >= 1"); } if ((separator & 0x80) != 0) { throw new IllegalArgumentException("separator must be simple ascii character"); } this.separator = separator; } /** Returns byte size of the underlying FST. */ @Override public long sizeInBytes() { if (fst == null) { return 0; } return fst.sizeInBytes(); } private static class AnalyzingComparator implements Comparator<BytesRef> { private final ByteArrayDataInput readerA = new ByteArrayDataInput(); private final ByteArrayDataInput readerB = new ByteArrayDataInput(); private final BytesRef scratchA = new BytesRef(); private final BytesRef scratchB = new BytesRef(); @Override public int compare(BytesRef a, BytesRef b) { readerA.reset(a.bytes, a.offset, a.length); readerB.reset(b.bytes, b.offset, b.length); // By token: scratchA.length = readerA.readShort(); scratchA.bytes = a.bytes; scratchA.offset = readerA.getPosition(); scratchB.bytes = b.bytes; scratchB.length = readerB.readShort(); scratchB.offset = readerB.getPosition(); int cmp = scratchA.compareTo(scratchB); if (cmp != 0) { return cmp; } readerA.skipBytes(scratchA.length); readerB.skipBytes(scratchB.length); // By length (smaller surface forms sorted first): cmp = a.length - b.length; if (cmp != 0) { return cmp; } // By surface form: scratchA.offset = readerA.getPosition(); scratchA.length = a.length - scratchA.offset; scratchB.offset = readerB.getPosition(); scratchB.length = b.length - scratchB.offset; return scratchA.compareTo(scratchB); } } private Analyzer addShingles(final Analyzer other) { if (grams == 1) { return other; } else { // TODO: use ShingleAnalyzerWrapper? // Tack on ShingleFilter to the end, to generate token ngrams: return new AnalyzerWrapper(other.getReuseStrategy()) { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { return other; } @Override protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) { ShingleFilter shingles = new ShingleFilter(components.getTokenStream(), 2, grams); shingles.setTokenSeparator(Character.toString((char) separator)); return new TokenStreamComponents(components.getTokenizer(), shingles); } }; } } @Override public void build(InputIterator iterator) throws IOException { build(iterator, IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB); } /** Build the suggest index, using up to the specified * amount of temporary RAM while building. Note that * the weights for the suggestions are ignored. */ public void build(InputIterator iterator, double ramBufferSizeMB) throws IOException { if (iterator.hasPayloads()) { throw new IllegalArgumentException("payloads are not supported"); } String prefix = getClass().getSimpleName(); File directory = Sort.defaultTempDir(); // TODO: messy ... java7 has Files.createTempDirectory // ... but 4.x is java6: File tempIndexPath = null; Random random = new Random(); while (true) { tempIndexPath = new File(directory, prefix + ".index." + random.nextInt(Integer.MAX_VALUE)); if (tempIndexPath.mkdir()) { break; } } Directory dir = FSDirectory.open(tempIndexPath); IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_CURRENT, indexAnalyzer); iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE); iwc.setRAMBufferSizeMB(ramBufferSizeMB); IndexWriter writer = new IndexWriter(dir, iwc); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); // TODO: if only we had IndexOptions.TERMS_ONLY... ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); ft.setOmitNorms(true); ft.freeze(); Document doc = new Document(); Field field = new Field("body", "", ft); doc.add(field); totTokens = 0; IndexReader reader = null; boolean success = false; try { while (true) { BytesRef surfaceForm = iterator.next(); if (surfaceForm == null) { break; } field.setStringValue(surfaceForm.utf8ToString()); writer.addDocument(doc); } reader = DirectoryReader.open(writer, false); Terms terms = MultiFields.getTerms(reader, "body"); if (terms == null) { throw new IllegalArgumentException("need at least one suggestion"); } // Move all ngrams into an FST: TermsEnum termsEnum = terms.iterator(null); Outputs<Long> outputs = PositiveIntOutputs.getSingleton(); Builder<Long> builder = new Builder<Long>(FST.INPUT_TYPE.BYTE1, outputs); IntsRef scratchInts = new IntsRef(); while (true) { BytesRef term = termsEnum.next(); if (term == null) { break; } int ngramCount = countGrams(term); if (ngramCount > grams) { throw new IllegalArgumentException("tokens must not contain separator byte; got token=" + term + " but gramCount=" + ngramCount + ", which is greater than expected max ngram size=" + grams); } if (ngramCount == 1) { totTokens += termsEnum.totalTermFreq(); } builder.add(Util.toIntsRef(term, scratchInts), encodeWeight(termsEnum.totalTermFreq())); } fst = builder.finish(); if (fst == null) { throw new IllegalArgumentException("need at least one suggestion"); } //System.out.println("FST: " + fst.getNodeCount() + " nodes"); /* PrintWriter pw = new PrintWriter("/x/tmp/out.dot"); Util.toDot(fst, pw, true, true); pw.close(); */ success = true; } finally { try { if (success) { IOUtils.close(writer, reader); } else { IOUtils.closeWhileHandlingException(writer, reader); } } finally { for(String file : dir.listAll()) { File path = new File(tempIndexPath, file); if (path.delete() == false) { throw new IllegalStateException("failed to remove " + path); } } if (tempIndexPath.delete() == false) { throw new IllegalStateException("failed to remove " + tempIndexPath); } dir.close(); } } } @Override public boolean store(OutputStream output) throws IOException { DataOutput out = new OutputStreamDataOutput(output); CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeByte(separator); out.writeVInt(grams); out.writeVLong(totTokens); fst.save(out); return true; } @Override public boolean load(InputStream input) throws IOException { DataInput in = new InputStreamDataInput(input); CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_START); byte separatorOrig = in.readByte(); if (separatorOrig != separator) { throw new IllegalStateException("separator=" + separator + " is incorrect: original model was built with separator=" + separatorOrig); } int gramsOrig = in.readVInt(); if (gramsOrig != grams) { throw new IllegalStateException("grams=" + grams + " is incorrect: original model was built with grams=" + gramsOrig); } totTokens = in.readVLong(); fst = new FST<Long>(in, PositiveIntOutputs.getSingleton()); return true; } @Override public List<LookupResult> lookup(final CharSequence key, /* ignored */ boolean onlyMorePopular, int num) { try { return lookup(key, num); } catch (IOException ioe) { // bogus: throw new RuntimeException(ioe); } } private int countGrams(BytesRef token) { int count = 1; for(int i=0;i<token.length;i++) { if (token.bytes[token.offset + i] == separator) { count++; } } return count; } /** Retrieve suggestions. */ public List<LookupResult> lookup(final CharSequence key, int num) throws IOException { try (TokenStream ts = queryAnalyzer.tokenStream("", key.toString())) { TermToBytesRefAttribute termBytesAtt = ts.addAttribute(TermToBytesRefAttribute.class); OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class); PositionLengthAttribute posLenAtt = ts.addAttribute(PositionLengthAttribute.class); PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class); ts.reset(); BytesRef[] lastTokens = new BytesRef[grams]; //System.out.println("lookup: key='" + key + "'"); // Run full analysis, but save only the // last 1gram, last 2gram, etc.: BytesRef tokenBytes = termBytesAtt.getBytesRef(); int maxEndOffset = -1; boolean sawRealToken = false; while(ts.incrementToken()) { termBytesAtt.fillBytesRef(); sawRealToken |= tokenBytes.length > 0; // TODO: this is somewhat iffy; today, ShingleFilter // sets posLen to the gram count; maybe we should make // a separate dedicated att for this? int gramCount = posLenAtt.getPositionLength(); assert gramCount <= grams; // Safety: make sure the recalculated count "agrees": if (countGrams(tokenBytes) != gramCount) { throw new IllegalArgumentException("tokens must not contain separator byte; got token=" + tokenBytes + " but gramCount=" + gramCount + " does not match recalculated count=" + countGrams(tokenBytes)); } maxEndOffset = Math.max(maxEndOffset, offsetAtt.endOffset()); lastTokens[gramCount-1] = BytesRef.deepCopyOf(tokenBytes); } ts.end(); if (!sawRealToken) { throw new IllegalArgumentException("no tokens produced by analyzer, or the only tokens were empty strings"); } // Carefully fill last tokens with _ tokens; // ShingleFilter appraently won't emit "only hole" // tokens: int endPosInc = posIncAtt.getPositionIncrement(); // Note this will also be true if input is the empty // string (in which case we saw no tokens and // maxEndOffset is still -1), which in fact works out OK // because we fill the unigram with an empty BytesRef // below: boolean lastTokenEnded = offsetAtt.endOffset() > maxEndOffset || endPosInc > 0; //System.out.println("maxEndOffset=" + maxEndOffset + " vs " + offsetAtt.endOffset()); if (lastTokenEnded) { //System.out.println(" lastTokenEnded"); // If user hit space after the last token, then // "upgrade" all tokens. This way "foo " will suggest // all bigrams starting w/ foo, and not any unigrams // starting with "foo": for(int i=grams-1;i>0;i--) { BytesRef token = lastTokens[i-1]; if (token == null) { continue; } token.grow(token.length+1); token.bytes[token.length] = separator; token.length++; lastTokens[i] = token; } lastTokens[0] = new BytesRef(); } Arc<Long> arc = new Arc<Long>(); BytesReader bytesReader = fst.getBytesReader(); // Try highest order models first, and if they return // results, return that; else, fallback: double backoff = 1.0; List<LookupResult> results = new ArrayList<LookupResult>(num); // We only add a given suffix once, from the highest // order model that saw it; for subsequent lower order // models we skip it: final Set<BytesRef> seen = new HashSet<BytesRef>(); for(int gram=grams-1;gram>=0;gram--) { BytesRef token = lastTokens[gram]; // Don't make unigram predictions from empty string: if (token == null || (token.length == 0 && key.length() > 0)) { // Input didn't have enough tokens: //System.out.println(" gram=" + gram + ": skip: not enough input"); continue; } if (endPosInc > 0 && gram <= endPosInc) { // Skip hole-only predictions; in theory we // shouldn't have to do this, but we'd need to fix // ShingleFilter to produce only-hole tokens: //System.out.println(" break: only holes now"); break; } //System.out.println("try " + (gram+1) + " gram token=" + token.utf8ToString()); // TODO: we could add fuzziness here // match the prefix portion exactly //Pair<Long,BytesRef> prefixOutput = null; Long prefixOutput = null; try { prefixOutput = lookupPrefix(fst, bytesReader, token, arc); } catch (IOException bogus) { throw new RuntimeException(bogus); } //System.out.println(" prefixOutput=" + prefixOutput); if (prefixOutput == null) { // This model never saw this prefix, e.g. the // trigram model never saw context "purple mushroom" backoff *= ALPHA; continue; } // TODO: we could do this division at build time, and // bake it into the FST? // Denominator for computing scores from current // model's predictions: long contextCount = totTokens; BytesRef lastTokenFragment = null; for(int i=token.length-1;i>=0;i--) { if (token.bytes[token.offset+i] == separator) { BytesRef context = new BytesRef(token.bytes, token.offset, i); Long output = Util.get(fst, Util.toIntsRef(context, new IntsRef())); assert output != null; contextCount = decodeWeight(output); lastTokenFragment = new BytesRef(token.bytes, token.offset + i + 1, token.length - i - 1); break; } } final BytesRef finalLastToken; if (lastTokenFragment == null) { finalLastToken = BytesRef.deepCopyOf(token); } else { finalLastToken = BytesRef.deepCopyOf(lastTokenFragment); } assert finalLastToken.offset == 0; CharsRef spare = new CharsRef(); // complete top-N MinResult<Long> completions[] = null; try { // Because we store multiple models in one FST // (1gram, 2gram, 3gram), we must restrict the // search so that it only considers the current // model. For highest order model, this is not // necessary since all completions in the FST // must be from this model, but for lower order // models we have to filter out the higher order // ones: // Must do num+seen.size() for queue depth because we may // reject up to seen.size() paths in acceptResult(): Util.TopNSearcher<Long> searcher = new Util.TopNSearcher<Long>(fst, num, num+seen.size(), weightComparator) { BytesRef scratchBytes = new BytesRef(); @Override protected void addIfCompetitive(Util.FSTPath<Long> path) { if (path.arc.label != separator) { //System.out.println(" keep path: " + Util.toBytesRef(path.input, new BytesRef()).utf8ToString() + "; " + path + "; arc=" + path.arc); super.addIfCompetitive(path); } else { //System.out.println(" prevent path: " + Util.toBytesRef(path.input, new BytesRef()).utf8ToString() + "; " + path + "; arc=" + path.arc); } } @Override protected boolean acceptResult(IntsRef input, Long output) { Util.toBytesRef(input, scratchBytes); finalLastToken.grow(finalLastToken.length + scratchBytes.length); int lenSav = finalLastToken.length; finalLastToken.append(scratchBytes); //System.out.println(" accept? input='" + scratchBytes.utf8ToString() + "'; lastToken='" + finalLastToken.utf8ToString() + "'; return " + (seen.contains(finalLastToken) == false)); boolean ret = seen.contains(finalLastToken) == false; finalLastToken.length = lenSav; return ret; } }; // since this search is initialized with a single start node // it is okay to start with an empty input path here searcher.addStartPaths(arc, prefixOutput, true, new IntsRef()); completions = searcher.search(); } catch (IOException bogus) { throw new RuntimeException(bogus); } int prefixLength = token.length; BytesRef suffix = new BytesRef(8); //System.out.println(" " + completions.length + " completions"); nextCompletion: for (MinResult<Long> completion : completions) { token.length = prefixLength; // append suffix Util.toBytesRef(completion.input, suffix); token.append(suffix); //System.out.println(" completion " + token.utf8ToString()); // Skip this path if a higher-order model already // saw/predicted its last token: BytesRef lastToken = token; for(int i=token.length-1;i>=0;i--) { if (token.bytes[token.offset+i] == separator) { assert token.length-i-1 > 0; lastToken = new BytesRef(token.bytes, token.offset+i+1, token.length-i-1); break; } } if (seen.contains(lastToken)) { //System.out.println(" skip dup " + lastToken.utf8ToString()); continue nextCompletion; } seen.add(BytesRef.deepCopyOf(lastToken)); spare.grow(token.length); UnicodeUtil.UTF8toUTF16(token, spare); LookupResult result = new LookupResult(spare.toString(), (long) (Long.MAX_VALUE * backoff * ((double) decodeWeight(completion.output)) / contextCount)); results.add(result); assert results.size() == seen.size(); //System.out.println(" add result=" + result); } backoff *= ALPHA; } Collections.sort(results, new Comparator<LookupResult>() { @Override public int compare(LookupResult a, LookupResult b) { if (a.value > b.value) { return -1; } else if (a.value < b.value) { return 1; } else { // Tie break by UTF16 sort order: return ((String) a.key).compareTo((String) b.key); } } }); if (results.size() > num) { results.subList(num, results.size()).clear(); } return results; } } /** weight -> cost */ private long encodeWeight(long ngramCount) { return Long.MAX_VALUE - ngramCount; } /** cost -> weight */ //private long decodeWeight(Pair<Long,BytesRef> output) { private long decodeWeight(Long output) { assert output != null; return (int)(Long.MAX_VALUE - output); } // NOTE: copied from WFSTCompletionLookup & tweaked private Long lookupPrefix(FST<Long> fst, FST.BytesReader bytesReader, BytesRef scratch, Arc<Long> arc) throws /*Bogus*/IOException { Long output = fst.outputs.getNoOutput(); fst.getFirstArc(arc); byte[] bytes = scratch.bytes; int pos = scratch.offset; int end = pos + scratch.length; while (pos < end) { if (fst.findTargetArc(bytes[pos++] & 0xff, arc, arc, bytesReader) == null) { return null; } else { output = fst.outputs.add(output, arc.output); } } return output; } static final Comparator<Long> weightComparator = new Comparator<Long> () { @Override public int compare(Long left, Long right) { return left.compareTo(right); } }; /** * Returns the weight associated with an input string, * or null if it does not exist. */ public Object get(CharSequence key) { throw new UnsupportedOperationException(); } }
/* * Copyright 2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.powermock.modules.junit4.common.internal.impl; import junit.framework.TestCase; import org.junit.Test; import org.junit.runner.Description; import org.junit.runner.manipulation.*; import org.junit.runner.notification.RunNotifier; import org.powermock.core.spi.PowerMockTestListener; import org.powermock.core.spi.testresult.TestSuiteResult; import org.powermock.core.spi.testresult.impl.TestSuiteResultImpl; import org.powermock.modules.junit4.common.internal.JUnit4TestSuiteChunker; import org.powermock.modules.junit4.common.internal.PowerMockJUnitRunnerDelegate; import org.powermock.tests.utils.PowerMockTestNotifier; import org.powermock.tests.utils.TestChunk; import org.powermock.tests.utils.impl.AbstractTestSuiteChunkerImpl; import org.powermock.tests.utils.impl.PowerMockTestNotifierImpl; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.*; public class JUnit4TestSuiteChunkerImpl extends AbstractTestSuiteChunkerImpl<PowerMockJUnitRunnerDelegate> implements JUnit4TestSuiteChunker, Filterable, Sortable { private Description description; private final Class<? extends PowerMockJUnitRunnerDelegate> runnerDelegateImplementationType; public JUnit4TestSuiteChunkerImpl(Class<?> testClass, Class<? extends PowerMockJUnitRunnerDelegate> runnerDelegateImplementationType) throws Exception { super(testClass); if (testClass == null) { throw new IllegalArgumentException("You must supply a test class"); } if (runnerDelegateImplementationType == null) { throw new IllegalArgumentException("Runner delegate type cannot be null."); } this.runnerDelegateImplementationType = runnerDelegateImplementationType; try { createTestDelegators(testClass, getTestChunksEntries(testClass)); } catch (InvocationTargetException e) { final Throwable cause = e.getCause(); if (cause instanceof Exception) { throw (Exception) cause; } else if (cause instanceof Error) { throw (Error) cause; } else { throw new RuntimeException(cause); } } } public void run(RunNotifier notifier) { List<TestChunk> chunkEntries = getTestChunks(); Iterator<TestChunk> iterator = chunkEntries.iterator(); if (delegates.size() != getChunkSize()) { throw new IllegalStateException("Internal error: There must be an equal number of suites and delegates."); } final Class<?> testClass = getTestClasses()[0]; final PowerMockTestListener[] powerMockTestListeners = (PowerMockTestListener[]) getPowerMockTestListenersLoadedByASpecificClassLoader( testClass, this.getClass().getClassLoader()); final Set<Method> allMethods = new LinkedHashSet<Method>(); for (TestChunk testChunk : getTestChunks()) { allMethods.addAll(testChunk.getTestMethodsToBeExecutedByThisClassloader()); } final Method[] allMethodsAsArray = allMethods.toArray(new Method[0]); final PowerMockTestNotifier powerMockTestNotifier = new PowerMockTestNotifierImpl(powerMockTestListeners); powerMockTestNotifier.notifyBeforeTestSuiteStarted(testClass, allMethodsAsArray); int failureCount = 0; int successCount = 0; int ignoreCount = 0; for (PowerMockJUnitRunnerDelegate delegate : delegates) { TestChunk next = iterator.next(); final ClassLoader key = next.getClassLoader(); PowerMockJUnit4RunListener powerMockListener = new PowerMockJUnit4RunListener(key, powerMockTestNotifier); notifier.addListener(powerMockListener); final ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(key); try { delegate.run(notifier); } finally { Thread.currentThread().setContextClassLoader(originalClassLoader); } final int failureCountForThisPowerMockListener = powerMockListener.getFailureCount(); final int ignoreCountForThisPowerMockListener = powerMockListener.getIgnoreCount(); failureCount += failureCountForThisPowerMockListener; ignoreCount += ignoreCountForThisPowerMockListener; successCount += delegate.getTestCount() - failureCountForThisPowerMockListener - ignoreCountForThisPowerMockListener; notifier.removeListener(powerMockListener); } final TestSuiteResult testSuiteResult = new TestSuiteResultImpl(failureCount, successCount, getTestCount(), ignoreCount); powerMockTestNotifier.notifyAfterTestSuiteEnded(testClass, allMethodsAsArray, testSuiteResult); } public boolean shouldExecuteTestForMethod(Class<?> testClass, Method potentialTestMethod) { return (potentialTestMethod.getName().startsWith("test") && Modifier.isPublic(potentialTestMethod.getModifiers()) && potentialTestMethod.getReturnType().equals(Void.TYPE) && TestCase.class.isAssignableFrom(testClass) || potentialTestMethod .isAnnotationPresent(Test.class)); } @Override protected PowerMockJUnitRunnerDelegate createDelegatorFromClassloader(ClassLoader classLoader, Class<?> testClass, final List<Method> methodsToTest) throws Exception { Set<String> methodNames = new HashSet<String>(); for (Method method : methodsToTest) { methodNames.add(method.getName()); } final Class<?> testClassLoadedByMockedClassLoader = Class.forName(testClass.getName(), false, classLoader); /* * Array classes cannot be loaded be classloader.loadClass(..) in JDK 6. * See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6500212. */ final Class<?> powerMockTestListenerArrayType = Class.forName(PowerMockTestListener[].class.getName(), false, classLoader); final Class<?> delegateClass = Class.forName(runnerDelegateImplementationType.getName(), false, classLoader); Constructor<?> con = delegateClass.getConstructor(new Class[] { Class.class, String[].class, powerMockTestListenerArrayType }); final PowerMockJUnitRunnerDelegate newInstance = (PowerMockJUnitRunnerDelegate) con.newInstance(new Object[] { testClassLoadedByMockedClassLoader, methodNames.toArray(new String[0]), getPowerMockTestListenersLoadedByASpecificClassLoader(testClass, classLoader) }); return newInstance; } public synchronized int getTestCount() { if (testCount == NOT_INITIALIZED) { testCount = 0; for (PowerMockJUnitRunnerDelegate delegate : delegates) { testCount += delegate.getTestCount(); } } return testCount; } public Description getDescription() { if (description == null) { if (delegates.size() == 0) { /* * This happens if Test A extends Test B and B uses the @RunWith * annotation and there are no tests defined in class B. */ return Description.createTestDescription(this.getClass(), "no tests in this class"); } // Use the first delegator as the base for the description. PowerMockJUnitRunnerDelegate delegate = delegates.get(0); description = delegate.getDescription(); /* * Add the remaining descriptions of all the chunked delegators. We * do this to make sure that we avoid adding chunks as "Unrooted * tests". */ for (int i = 1; i < delegates.size(); i++) { // Get the method-level descriptions ArrayList<Description> children = delegates.get(i).getDescription().getChildren(); // Add all method-level descriptions to the main description. for (Description methodDescription : children) { description.addChild(methodDescription); } } } return description; } public void filter(Filter filter) throws NoTestsRemainException { for (Object delegate : delegates) { if (delegate instanceof Filterable) { ((Filterable) delegate).filter(filter); } } } public void sort(Sorter sorter) { for (Object delegate : delegates) { if (delegate instanceof Sortable) { ((Sortable) delegate).sort(sorter); } } } }
package net.sf.jsqlparser.statement.select; /** * A limit clause in the form [LIMIT {[offset,] row_count) | (row_count | ALL) OFFSET offset}] */ public class Limit { private long offset; private long rowCount; private boolean rowCountJdbcParameter = false; private boolean offsetJdbcParameter = false; private boolean limitAll; private boolean comma = false; private String commentLimit; private String commentOffset; private String commentOffsetValue; private String commentLimitValue; private String commentAll; private String commentComma; private String commentAfterCommaValue; public long getOffset() { return offset; } public long getRowCount() { return rowCount; } public void setOffset(long l) { offset = l; } public void setRowCount(long l) { rowCount = l; } public boolean isOffsetJdbcParameter() { return offsetJdbcParameter; } public boolean isRowCountJdbcParameter() { return rowCountJdbcParameter; } public void setOffsetJdbcParameter(boolean b) { offsetJdbcParameter = b; } public void setRowCountJdbcParameter(boolean b) { rowCountJdbcParameter = b; } /** * @return true if the limit is "LIMIT ALL [OFFSET ...]) */ public boolean isLimitAll() { return limitAll; } public void setLimitAll(boolean b) { limitAll = b; } public String toString() { String retVal = ""; if (rowCount > 0 || rowCountJdbcParameter || limitAll) { if (limitAll) { retVal += (getCommentLimit() != null ? " "+getCommentLimit() : "") + " LIMIT " + (getCommentAll() != null ? getCommentAll()+" " : "") + "ALL"; } else { if (isComma()) { retVal +=(getCommentLimit() != null ? " "+getCommentLimit() : "") + " LIMIT " + (getCommentLimitValue() != null ? getCommentLimitValue()+" " : "") + (offsetJdbcParameter ? "?" : offset + ""); retVal +=(getCommentComma() != null ? " "+getCommentComma()+" " : "") + ", " + (getCommentAfterCommaValue() != null ? getCommentAfterCommaValue()+" " : "") + (rowCountJdbcParameter ? "?" : rowCount + ""); return retVal; } else { retVal += (getCommentLimit() != null ? " "+getCommentLimit() : "") + " LIMIT " + (getCommentLimitValue() != null ? getCommentLimitValue()+" " : "") + (rowCountJdbcParameter ? "?" : rowCount + ""); } } } if (offset > 0 || offsetJdbcParameter) { retVal += (getCommentOffset() != null ? " "+getCommentOffset() : "") + " OFFSET " + (getCommentOffsetValue() != null ? getCommentOffsetValue()+" " : "") + (offsetJdbcParameter ? "?" : offset + ""); } return retVal; } /** * @return the commentLimit */ public String getCommentLimit() { return commentLimit; } /** * @param commentLimit the commentLimit to set */ public void setCommentLimit(String commentLimit) { this.commentLimit = commentLimit; } /** * @return the commentOffset */ public String getCommentOffset() { return commentOffset; } /** * @param commentOffset the commentOffset to set */ public void setCommentOffset(String commentOffset) { this.commentOffset = commentOffset; } /** * @return the commentOffsetValue */ public String getCommentOffsetValue() { return commentOffsetValue; } /** * @param commentOffsetValue the commentOffsetValue to set */ public void setCommentOffsetValue(String commentOffsetValue) { this.commentOffsetValue = commentOffsetValue; } /** * @return the commentLimitValue */ public String getCommentLimitValue() { return commentLimitValue; } /** * @param commentLimitValue the commentLimitValue to set */ public void setCommentLimitValue(String commentLimitValue) { this.commentLimitValue = commentLimitValue; } /** * @return the comma */ public boolean isComma() { return comma; } /** * @param comma the comma to set */ public void setComma(boolean comma) { this.comma = comma; } /** * @return the commentAll */ public String getCommentAll() { return commentAll; } /** * @param commentAll the commentAll to set */ public void setCommentAll(String commentAll) { this.commentAll = commentAll; } /** * @return the commentComma */ public String getCommentComma() { return commentComma; } /** * @param commentComma the commentComma to set */ public void setCommentComma(String commentComma) { this.commentComma = commentComma; } /** * @return the commentAfterCommaValue */ public String getCommentAfterCommaValue() { return commentAfterCommaValue; } /** * @param commentAfterCommaValue the commentAfterCommaValue to set */ public void setCommentAfterCommaValue(String commentAfterCommaValue) { this.commentAfterCommaValue = commentAfterCommaValue; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.direct; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isA; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.theInstance; import static org.junit.Assert.assertThat; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.util.UserCodeException; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link DoFnLifecycleManager}. */ @RunWith(JUnit4.class) public class DoFnLifecycleManagerTest { @Rule public ExpectedException thrown = ExpectedException.none(); private TestFn fn = new TestFn(); private DoFnLifecycleManager mgr = DoFnLifecycleManager.of(fn); @Test public void setupOnGet() throws Exception { TestFn obtained = (TestFn) mgr.get(); assertThat(obtained, not(theInstance(fn))); assertThat(obtained.setupCalled, is(true)); assertThat(obtained.teardownCalled, is(false)); } @Test public void getMultipleCallsSingleSetupCall() throws Exception { TestFn obtained = (TestFn) mgr.get(); TestFn secondObtained = (TestFn) mgr.get(); assertThat(obtained, theInstance(secondObtained)); assertThat(obtained.setupCalled, is(true)); assertThat(obtained.teardownCalled, is(false)); } @Test public void getMultipleThreadsDifferentInstances() throws Exception { CountDownLatch startSignal = new CountDownLatch(1); ExecutorService executor = Executors.newCachedThreadPool(); List<Future<TestFn>> futures = new ArrayList<>(); for (int i = 0; i < 10; i++) { futures.add(executor.submit(new GetFnCallable(mgr, startSignal))); } startSignal.countDown(); List<TestFn> fns = new ArrayList<>(); for (Future<TestFn> future : futures) { fns.add(future.get(1L, TimeUnit.SECONDS)); } for (TestFn fn : fns) { assertThat(fn.setupCalled, is(true)); int sameInstances = 0; for (TestFn otherFn : fns) { if (otherFn == fn) { sameInstances++; } } assertThat(sameInstances, equalTo(1)); } } @Test public void teardownOnRemove() throws Exception { TestFn obtained = (TestFn) mgr.get(); mgr.remove(); assertThat(obtained, not(theInstance(fn))); assertThat(obtained.setupCalled, is(true)); assertThat(obtained.teardownCalled, is(true)); assertThat(mgr.get(), not(Matchers.<DoFn<?, ?>>theInstance(obtained))); } @Test public void teardownThrowsRemoveThrows() throws Exception { TestFn obtained = (TestFn) mgr.get(); obtained.teardown(); thrown.expect(UserCodeException.class); thrown.expectCause(isA(IllegalStateException.class)); thrown.expectMessage("Cannot call teardown: already torn down"); mgr.remove(); } @Test public void teardownAllOnRemoveAll() throws Exception { CountDownLatch startSignal = new CountDownLatch(1); ExecutorService executor = Executors.newCachedThreadPool(); List<Future<TestFn>> futures = new ArrayList<>(); for (int i = 0; i < 10; i++) { futures.add(executor.submit(new GetFnCallable(mgr, startSignal))); } startSignal.countDown(); List<TestFn> fns = new ArrayList<>(); for (Future<TestFn> future : futures) { fns.add(future.get(1L, TimeUnit.SECONDS)); } mgr.removeAll(); for (TestFn fn : fns) { assertThat(fn.setupCalled, is(true)); assertThat(fn.teardownCalled, is(true)); } } @Test public void removeAndRemoveAllConcurrent() throws Exception { CountDownLatch startSignal = new CountDownLatch(1); ExecutorService executor = Executors.newCachedThreadPool(); List<Future<TestFn>> futures = new ArrayList<>(); for (int i = 0; i < 10; i++) { futures.add(executor.submit(new GetFnCallable(mgr, startSignal))); } startSignal.countDown(); List<TestFn> fns = new ArrayList<>(); for (Future<TestFn> future : futures) { fns.add(future.get(1L, TimeUnit.SECONDS)); } CountDownLatch removeSignal = new CountDownLatch(1); List<Future<Void>> removeFutures = new ArrayList<>(); for (int i = 0; i < 5; i++) { // These will reuse the threads used in the GetFns removeFutures.add(executor.submit(new TeardownFnCallable(mgr, removeSignal))); } removeSignal.countDown(); assertThat(mgr.removeAll(), Matchers.emptyIterable()); for (Future<Void> removed : removeFutures) { // Should not have thrown an exception. removed.get(); } for (TestFn fn : fns) { assertThat(fn.setupCalled, is(true)); assertThat(fn.teardownCalled, is(true)); } } private static class GetFnCallable implements Callable<TestFn> { private final DoFnLifecycleManager mgr; private final CountDownLatch startSignal; private GetFnCallable(DoFnLifecycleManager mgr, CountDownLatch startSignal) { this.mgr = mgr; this.startSignal = startSignal; } @Override public TestFn call() throws Exception { startSignal.await(); return (TestFn) mgr.get(); } } private static class TeardownFnCallable implements Callable<Void> { private final DoFnLifecycleManager mgr; private final CountDownLatch startSignal; private TeardownFnCallable(DoFnLifecycleManager mgr, CountDownLatch startSignal) { this.mgr = mgr; this.startSignal = startSignal; } @Override public Void call() throws Exception { startSignal.await(); // Will throw an exception if the TestFn has already been removed from this thread mgr.remove(); return null; } } private static class TestFn extends DoFn<Object, Object> { boolean setupCalled = false; boolean teardownCalled = false; @Setup public void setup() { checkState(!setupCalled, "Cannot call setup: already set up"); checkState(!teardownCalled, "Cannot call setup: already torn down"); setupCalled = true; } @ProcessElement public void processElement(ProcessContext c) throws Exception {} @Teardown public void teardown() { checkState(setupCalled, "Cannot call teardown: not set up"); checkState(!teardownCalled, "Cannot call teardown: already torn down"); teardownCalled = true; } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.historyIntegrTests; import static com.intellij.history.core.LocalVcsTestCase.list; import com.intellij.history.core.Paths; import com.intellij.history.core.revisions.Revision; import com.intellij.history.utils.RunnableAdapter; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.*; import java.io.File; import java.io.IOException; import java.util.concurrent.Semaphore; public class ExternalChangesAndRefreshingTest extends IntegrationTestCase { public void testRefreshingSynchronously() throws Exception { doTestRefreshing(false); } public void testRefreshingAsynchronously() throws Exception { doTestRefreshing(true); } @Override protected void runBareRunnable(Runnable r) throws Throwable { if (getName().equals("testRefreshingAsynchronously")) { // this methods waits for another thread to finish, that leds // to deadlock in swing-thread. Therefore we have to run this test // outside of swing-thread r.run(); } else { super.runBareRunnable(r); } } private void doTestRefreshing(boolean async) throws Exception { String path1 = createFileExternally("f1.txt"); String path2 = createFileExternally("f2.txt"); assertFalse(hasVcsEntry(path1)); assertFalse(hasVcsEntry(path2)); refreshVFS(async); assertTrue(hasVcsEntry(path1)); assertTrue(hasVcsEntry(path2)); assertEquals(2, getVcsRevisionsFor(root).size()); } public void testChangeSetName() throws Exception { createFileExternally("f.txt"); refreshVFS(); Revision r = getVcsRevisionsFor(root).get(0); assertEquals("External change", r.getCauseChangeName()); } public void testRefreshDuringCommand() { // shouldn't throw CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { refreshVFS(); } }, "", null); } public void testRefreshingSpecifiedFiles() throws Exception { String f1 = createFileExternally("f1.txt"); String f2 = createFileExternally("f2.txt"); LocalFileSystem.getInstance().refreshIoFiles(list(new File(f1), new File(f2))); assertTrue(hasVcsEntry(f1)); assertTrue(hasVcsEntry(f2)); } public void testCommandDuringRefresh() throws Exception { createFileExternally("f.txt"); VirtualFileListener l = new VirtualFileAdapter() { @Override public void fileCreated(VirtualFileEvent e) { executeSomeCommand(); } }; // shouldn't throw addFileListenerDuring(l, new Runnable() { public void run() { refreshVFS(); } }); } private void executeSomeCommand() { CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { } }, "", null); } public void testContentOfFileChangedDuringRefresh() throws Exception { final VirtualFile f = root.createChildData(null, "file.txt"); f.setBinaryContent("before".getBytes()); performAllPendingJobs(); ContentChangesListener l = new ContentChangesListener(f); addFileListenerDuring(l, new RunnableAdapter() { @Override public void doRun() throws IOException { changeFileExternally(f.getPath(), "after"); refreshVFS(); } }); // todo unrelable test because content recorded before LvcsFileListener does its job assertEquals("before", l.getContentBefore()); assertEquals("after", l.getContentAfter()); } private void performAllPendingJobs() { refreshVFS(); } public void testFileCreationDuringRefresh() throws Exception { final String path = createFileExternally("f.txt"); changeFileExternally(path, "content"); final String[] content = new String[1]; VirtualFileListener l = new VirtualFileAdapter() { @Override public void fileCreated(VirtualFileEvent e) { try { if (!e.getFile().getPath().equals(path)) return; content[0] = new String(e.getFile().contentsToByteArray()); } catch (IOException ex) { throw new RuntimeException(ex); } } }; addFileListenerDuring(l, new Runnable() { public void run() { refreshVFS(); } }); assertEquals("content", content[0]); } public void ignoreTestCreationOfExcludedDirectoryDuringRefresh() throws Exception { // todo does not work due to FileListener order. FileIndex gets event later than Lvcs. VirtualFile dir = root.createChildDirectory(null, "EXCLUDED"); String p = dir.getPath(); assertTrue(hasVcsEntry(p)); ModifiableRootModel m = ModuleRootManager.getInstance(myModule).getModifiableModel(); m.getContentEntries()[0].addExcludeFolder(dir); m.commit(); assertFalse(hasVcsEntry(p)); dir.delete(null); createDirectoryExternally("EXCLUDED"); refreshVFS(); assertFalse(hasVcsEntry(p)); } public void testDeletionOfFilteredDirectoryExternallyDoesNotThrowExceptionDuringRefresh() throws Exception { VirtualFile f = root.createChildDirectory(null, FILTERED_DIR_NAME); String path = Paths.appended(root.getPath(), FILTERED_DIR_NAME); assertFalse(hasVcsEntry(path)); new File(path).delete(); refreshVFS(); assertFalse(hasVcsEntry(path)); } public void testCreationOfExcludedDirWithFilesDuringRefreshShouldNotThrowException() throws Exception { // there was a problem with the DirectoryIndex - the files that were created during the refresh // were not correctly excluded, thereby causing the LocalHistory to fail during addition of // files under the excluded dir. File targetDir = createTargetDir(); FileUtil.copyDir(targetDir, new File(root.getPath(), "target")); VirtualFileManager.getInstance().refresh(false); VirtualFile classes = root.findFileByRelativePath("target/classes"); addExcludedDir(classes); classes.getParent().delete(null); FileUtil.copyDir(targetDir, new File(root.getPath(), "target")); VirtualFileManager.getInstance().refresh(false); // shouldn't throw } private File createTargetDir() throws IOException { File result = createTempDirectory(); File classes = new File(result, "classes"); classes.mkdir(); new File(classes, "bak.txt").createNewFile(); return result; } private void refreshVFS() { refreshVFS(false); } private void refreshVFS(boolean async) { try { final Semaphore s = new Semaphore(1); s.acquire(); VirtualFileManager.getInstance().refresh(async, new Runnable() { public void run() { s.release(); } }); s.acquire(); } catch (InterruptedException e) { throw new RuntimeException(e); } } }
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hyracks.storage.am.lsm.btree.impls; import java.io.File; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.io.FileReference; import org.apache.hyracks.data.std.primitive.IntegerPointable; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; import org.apache.hyracks.storage.am.bloomfilter.impls.BloomCalculations; import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilter; import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory; import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterSpecification; import org.apache.hyracks.storage.am.btree.impls.BTree; import org.apache.hyracks.storage.am.btree.impls.BTree.BTreeAccessor; import org.apache.hyracks.storage.am.btree.impls.BTree.BTreeBulkLoader; import org.apache.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor; import org.apache.hyracks.storage.am.btree.impls.RangePredicate; import org.apache.hyracks.storage.am.common.api.IFreePageManager; import org.apache.hyracks.storage.am.common.api.IIndexAccessor; import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader; import org.apache.hyracks.storage.am.common.api.IIndexCursor; import org.apache.hyracks.storage.am.common.api.IIndexOperationContext; import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback; import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback; import org.apache.hyracks.storage.am.common.api.ISearchPredicate; import org.apache.hyracks.storage.am.common.api.ITreeIndex; import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor; import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory; import org.apache.hyracks.storage.am.common.api.IndexException; import org.apache.hyracks.storage.am.common.api.TreeIndexException; import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException; import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate; import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback; import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation; import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator; import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference; import org.apache.hyracks.storage.am.lsm.btree.tuples.LSMBTreeTupleReference; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory; import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext; import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy; import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker; import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache; import org.apache.hyracks.storage.am.lsm.common.freepage.VirtualFreePageManager; import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex; import org.apache.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper; import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences; import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFilterManager; import org.apache.hyracks.storage.am.lsm.common.impls.LSMIndexSearchCursor; import org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor; import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory; import org.apache.hyracks.storage.common.buffercache.IBufferCache; import org.apache.hyracks.storage.common.file.IFileMapProvider; public class LSMBTree extends AbstractLSMIndex implements ITreeIndex { // For creating BTree's used in flush and merge. protected final LSMBTreeDiskComponentFactory componentFactory; // For creating BTree's used in bulk load. Different from diskBTreeFactory // because it should have a different tuple writer in it's leaf frames. protected final LSMBTreeDiskComponentFactory bulkLoadComponentFactory; // Common for in-memory and on-disk components. protected final ITreeIndexFrameFactory insertLeafFrameFactory; protected final ITreeIndexFrameFactory deleteLeafFrameFactory; protected final IBinaryComparatorFactory[] cmpFactories; private final boolean needKeyDupCheck; private final int[] btreeFields; public LSMBTree(List<IVirtualBufferCache> virtualBufferCaches, ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory insertLeafFrameFactory, ITreeIndexFrameFactory deleteLeafFrameFactory, ILSMIndexFileManager fileManager, TreeIndexFactory<BTree> diskBTreeFactory, TreeIndexFactory<BTree> bulkLoadBTreeFactory, BloomFilterFactory bloomFilterFactory, ILSMComponentFilterFactory filterFactory, ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager, double bloomFilterFalsePositiveRate, IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] cmpFactories, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, boolean needKeyDupCheck, int[] btreeFields, int[] filterFields, boolean durable) { super(virtualBufferCaches, diskBTreeFactory.getBufferCache(), fileManager, diskFileMapProvider, bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback, filterFrameFactory, filterManager, filterFields, durable); int i = 0; for (IVirtualBufferCache virtualBufferCache : virtualBufferCaches) { LSMBTreeMemoryComponent mutableComponent = new LSMBTreeMemoryComponent(new BTree(virtualBufferCache, virtualBufferCache.getFileMapProvider(), new VirtualFreePageManager( virtualBufferCache.getNumPages()), interiorFrameFactory, insertLeafFrameFactory, cmpFactories, fieldCount, new FileReference(new File(fileManager.getBaseDir() + "_virtual_" + i))), virtualBufferCache, i == 0 ? true : false, filterFactory == null ? null : filterFactory.createLSMComponentFilter()); memoryComponents.add(mutableComponent); ++i; } this.insertLeafFrameFactory = insertLeafFrameFactory; this.deleteLeafFrameFactory = deleteLeafFrameFactory; this.cmpFactories = cmpFactories; componentFactory = new LSMBTreeDiskComponentFactory(diskBTreeFactory, bloomFilterFactory, filterFactory); bulkLoadComponentFactory = new LSMBTreeDiskComponentFactory(bulkLoadBTreeFactory, bloomFilterFactory, filterFactory); this.needKeyDupCheck = needKeyDupCheck; this.btreeFields = btreeFields; } // Without memory components public LSMBTree(ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory insertLeafFrameFactory, ITreeIndexFrameFactory deleteLeafFrameFactory, ILSMIndexFileManager fileManager, TreeIndexFactory<BTree> diskBTreeFactory, TreeIndexFactory<BTree> bulkLoadBTreeFactory, BloomFilterFactory bloomFilterFactory, double bloomFilterFalsePositiveRate, IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] cmpFactories, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, boolean needKeyDupCheck, boolean durable) { super(diskBTreeFactory.getBufferCache(), fileManager, diskFileMapProvider, bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback, durable); this.insertLeafFrameFactory = insertLeafFrameFactory; this.deleteLeafFrameFactory = deleteLeafFrameFactory; this.cmpFactories = cmpFactories; componentFactory = new LSMBTreeDiskComponentFactory(diskBTreeFactory, bloomFilterFactory, null); bulkLoadComponentFactory = new LSMBTreeDiskComponentFactory(bulkLoadBTreeFactory, bloomFilterFactory, null); this.needKeyDupCheck = needKeyDupCheck; this.btreeFields = null; } @Override public synchronized void create() throws HyracksDataException { if (isActivated) { throw new HyracksDataException("Failed to create the index since it is activated."); } fileManager.deleteDirs(); fileManager.createDirs(); diskComponents.clear(); } @Override public synchronized void activate() throws HyracksDataException { if (isActivated) { throw new HyracksDataException("Failed to activate the index since it is already activated."); } for (ILSMComponent c : memoryComponents) { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; ((IVirtualBufferCache) mutableComponent.getBTree().getBufferCache()).open(); mutableComponent.getBTree().create(); mutableComponent.getBTree().activate(); } List<ILSMComponent> immutableComponents = diskComponents; immutableComponents.clear(); List<LSMComponentFileReferences> validFileReferences; try { validFileReferences = fileManager.cleanupAndGetValidFiles(); } catch (IndexException e) { throw new HyracksDataException(e); } for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) { LSMBTreeDiskComponent component; try { component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(), lsmComonentFileReference.getBloomFilterFileReference(), false); } catch (IndexException e) { throw new HyracksDataException(e); } immutableComponents.add(component); } isActivated = true; } @Override public synchronized void deactivate(boolean flushOnExit) throws HyracksDataException { if (!isActivated) { throw new HyracksDataException("Failed to deactivate the index since it is already deactivated."); } if (flushOnExit) { BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback); ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE); accessor.scheduleFlush(cb); try { cb.waitForIO(); } catch (InterruptedException e) { throw new HyracksDataException(e); } } List<ILSMComponent> immutableComponents = diskComponents; for (ILSMComponent c : immutableComponents) { LSMBTreeDiskComponent component = (LSMBTreeDiskComponent) c; BTree btree = component.getBTree(); BloomFilter bloomFilter = component.getBloomFilter(); btree.deactivate(); bloomFilter.deactivate(); } for (ILSMComponent c : memoryComponents) { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; mutableComponent.getBTree().deactivate(); mutableComponent.getBTree().destroy(); ((IVirtualBufferCache) mutableComponent.getBTree().getBufferCache()).close(); } isActivated = false; } @Override public synchronized void deactivate() throws HyracksDataException { deactivate(true); } @Override public void destroy() throws HyracksDataException { if (isActivated) { throw new HyracksDataException("Failed to destroy the index since it is activated."); } List<ILSMComponent> immutableComponents = diskComponents; for (ILSMComponent c : immutableComponents) { LSMBTreeDiskComponent component = (LSMBTreeDiskComponent) c; component.getBTree().destroy(); component.getBloomFilter().destroy(); } for (ILSMComponent c : memoryComponents) { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; mutableComponent.getBTree().destroy(); } fileManager.deleteDirs(); } @Override public void clear() throws HyracksDataException { if (!isActivated) { throw new HyracksDataException("Failed to clear the index since it is not activated."); } List<ILSMComponent> immutableComponents = diskComponents; for (ILSMComponent c : memoryComponents) { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; mutableComponent.getBTree().clear(); mutableComponent.reset(); } for (ILSMComponent c : immutableComponents) { LSMBTreeDiskComponent component = (LSMBTreeDiskComponent) c; component.getBloomFilter().deactivate(); component.getBTree().deactivate(); component.getBloomFilter().destroy(); component.getBTree().destroy(); } immutableComponents.clear(); } @Override public void getOperationalComponents(ILSMIndexOperationContext ctx) throws HyracksDataException { List<ILSMComponent> immutableComponents = diskComponents; List<ILSMComponent> operationalComponents = ctx.getComponentHolder(); int cmc = currentMutableComponentId.get(); ctx.setCurrentMutableComponentId(cmc); int numMutableComponents = memoryComponents.size(); operationalComponents.clear(); switch (ctx.getOperation()) { case UPDATE: case UPSERT: case PHYSICALDELETE: case FLUSH: case DELETE: operationalComponents.add(memoryComponents.get(cmc)); break; case INSERT: for (int i = 0; i < numMutableComponents - 1; i++) { ILSMComponent c = memoryComponents.get((cmc + i + 1) % numMutableComponents); LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; if (mutableComponent.isReadable()) { // Make sure newest components are added first operationalComponents.add(0, mutableComponent); } } // The current mutable component is always added operationalComponents.add(0, memoryComponents.get(cmc)); operationalComponents.addAll(immutableComponents); break; case SEARCH: for (int i = 0; i < numMutableComponents - 1; i++) { ILSMComponent c = memoryComponents.get((cmc + i + 1) % numMutableComponents); LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; if (mutableComponent.isReadable()) { // Make sure newest components are added first operationalComponents.add(0, mutableComponent); } } // The current mutable component is always added operationalComponents.add(0, memoryComponents.get(cmc)); if (filterManager != null) { for (ILSMComponent c : immutableComponents) { if (c.getLSMComponentFilter().satisfy( ((AbstractSearchPredicate) ctx.getSearchPredicate()).getMinFilterTuple(), ((AbstractSearchPredicate) ctx.getSearchPredicate()).getMaxFilterTuple(), ((LSMBTreeOpContext) ctx).filterCmp)) { operationalComponents.add(c); } } } else { operationalComponents.addAll(immutableComponents); } break; case MERGE: operationalComponents.addAll(ctx.getComponentsToBeMerged()); break; case FULL_MERGE: operationalComponents.addAll(immutableComponents); break; case REPLICATE: operationalComponents.addAll(ctx.getComponentsToBeReplicated()); break; default: throw new UnsupportedOperationException("Operation " + ctx.getOperation() + " not supported."); } } @Override public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException { LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx; ITupleReference indexTuple; if (ctx.indexTuple != null) { ctx.indexTuple.reset(tuple); indexTuple = ctx.indexTuple; } else { indexTuple = tuple; } switch (ctx.getOperation()) { case PHYSICALDELETE: ctx.currentMutableBTreeAccessor.delete(indexTuple); break; case INSERT: insert(indexTuple, ctx); break; default: ctx.currentMutableBTreeAccessor.upsert(indexTuple); break; } if (ctx.filterTuple != null) { ctx.filterTuple.reset(tuple); memoryComponents.get(currentMutableComponentId.get()).getLSMComponentFilter() .update(ctx.filterTuple, ctx.filterCmp); } } private boolean insert(ITupleReference tuple, LSMBTreeOpContext ctx) throws HyracksDataException, IndexException { ILSMComponent c = ctx.getComponentHolder().get(0); LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; MultiComparator comparator = MultiComparator.create(mutableComponent.getBTree().getComparatorFactories()); LSMBTreePointSearchCursor searchCursor = new LSMBTreePointSearchCursor(ctx); IIndexCursor memCursor = new BTreeRangeSearchCursor(ctx.currentMutableBTreeOpCtx.leafFrame, false); RangePredicate predicate = new RangePredicate(tuple, tuple, true, true, comparator, comparator); if (needKeyDupCheck) { // first check the inmemory component ctx.currentMutableBTreeAccessor.search(memCursor, predicate); try { if (memCursor.hasNext()) { memCursor.next(); LSMBTreeTupleReference lsmbtreeTuple = (LSMBTreeTupleReference) memCursor.getTuple(); if (!lsmbtreeTuple.isAntimatter()) { throw new TreeIndexDuplicateKeyException("Failed to insert key since key already exists."); } else { memCursor.close(); ctx.currentMutableBTreeAccessor.upsertIfConditionElseInsert(tuple, AntimatterAwareTupleAcceptor.INSTANCE); return true; } } } finally { memCursor.close(); } // TODO: Can we just remove the above code that search the mutable // component and do it together with the search call below? i.e. instead // of passing false to the lsmHarness.search(), we pass true to include // the mutable component? // the key was not in the inmemory component, so check the disk // components // This is a hack to avoid searching the current active mutable component twice. It is critical to add it back once the search is over. ILSMComponent firstComponent = ctx.getComponentHolder().remove(0); search(ctx, searchCursor, predicate); try { if (searchCursor.hasNext()) { throw new TreeIndexDuplicateKeyException("Failed to insert key since key already exists."); } } finally { searchCursor.close(); // Add the current active mutable component back ctx.getComponentHolder().add(0, firstComponent); } } ctx.currentMutableBTreeAccessor.upsertIfConditionElseInsert(tuple, AntimatterAwareTupleAcceptor.INSTANCE); return true; } @Override public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred) throws HyracksDataException, IndexException { LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx; List<ILSMComponent> operationalComponents = ctx.getComponentHolder(); LSMBTreeCursorInitialState initialState = new LSMBTreeCursorInitialState(insertLeafFrameFactory, ctx.cmp, ctx.bloomFilterCmp, lsmHarness, pred, ctx.searchCallback, operationalComponents); cursor.open(initialState, pred); } @Override public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException { ILSMComponent flushingComponent = ctx.getComponentHolder().get(0); LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference(); LSMBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE); assert ctx.getComponentHolder().size() == 1; opCtx.setOperation(IndexOperation.FLUSH); opCtx.getComponentHolder().add(flushingComponent); ILSMIndexAccessorInternal flushAccessor = new LSMBTreeAccessor(lsmHarness, opCtx); ioScheduler.scheduleOperation(new LSMBTreeFlushOperation(flushAccessor, flushingComponent, componentFileRefs .getInsertIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), callback, fileManager .getBaseDir())); } @Override public ILSMComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException { LSMBTreeFlushOperation flushOp = (LSMBTreeFlushOperation) operation; LSMBTreeMemoryComponent flushingComponent = (LSMBTreeMemoryComponent) flushOp.getFlushingComponent(); IIndexAccessor accessor = flushingComponent.getBTree().createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE); RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null); IIndexCursor countingCursor = ((BTreeAccessor) accessor).createCountingSearchCursor(); accessor.search(countingCursor, nullPred); long numElements = 0L; try { while (countingCursor.hasNext()) { countingCursor.next(); ITupleReference countTuple = countingCursor.getTuple(); numElements = IntegerPointable.getInteger(countTuple.getFieldData(0), countTuple.getFieldStart(0)); } } finally { countingCursor.close(); } int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements); BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement, bloomFilterFalsePositiveRate); LSMBTreeDiskComponent component = createDiskComponent(componentFactory, flushOp.getBTreeFlushTarget(), flushOp.getBloomFilterFlushTarget(), true); IIndexBulkLoader bulkLoader = component.getBTree().createBulkLoader(1.0f, false, numElements, false); IIndexBulkLoader builder = component.getBloomFilter().createBuilder(numElements, bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements()); IIndexCursor scanCursor = accessor.createSearchCursor(false); accessor.search(scanCursor, nullPred); try { while (scanCursor.hasNext()) { scanCursor.next(); builder.add(scanCursor.getTuple()); bulkLoader.add(scanCursor.getTuple()); } } finally { scanCursor.close(); builder.end(); } bulkLoader.end(); if (component.getLSMComponentFilter() != null) { List<ITupleReference> filterTuples = new ArrayList<ITupleReference>(); filterTuples.add(flushingComponent.getLSMComponentFilter().getMinTuple()); filterTuples.add(flushingComponent.getLSMComponentFilter().getMaxTuple()); filterManager.updateFilterInfo(component.getLSMComponentFilter(), filterTuples); filterManager.writeFilterInfo(component.getLSMComponentFilter(), component.getBTree()); } return component; } @Override public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException, IndexException { LSMBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE); opCtx.setOperation(IndexOperation.MERGE); List<ILSMComponent> mergingComponents = ctx.getComponentHolder(); boolean returnDeletedTuples = false; if (ctx.getComponentHolder().get(ctx.getComponentHolder().size() - 1) != diskComponents.get(diskComponents .size() - 1)) { returnDeletedTuples = true; } ITreeIndexCursor cursor = new LSMBTreeRangeSearchCursor(opCtx, returnDeletedTuples); BTree firstBTree = ((LSMBTreeDiskComponent) mergingComponents.get(0)).getBTree(); BTree lastBTree = ((LSMBTreeDiskComponent) mergingComponents.get(mergingComponents.size() - 1)).getBTree(); FileReference firstFile = diskFileMapProvider.lookupFileName(firstBTree.getFileId()); FileReference lastFile = diskFileMapProvider.lookupFileName(lastBTree.getFileId()); LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(firstFile.getFile() .getName(), lastFile.getFile().getName()); ILSMIndexAccessorInternal accessor = new LSMBTreeAccessor(lsmHarness, opCtx); ioScheduler.scheduleOperation(new LSMBTreeMergeOperation(accessor, mergingComponents, cursor, relMergeFileRefs .getInsertIndexFileReference(), relMergeFileRefs.getBloomFilterFileReference(), callback, fileManager .getBaseDir())); } @Override public ILSMComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException { LSMBTreeMergeOperation mergeOp = (LSMBTreeMergeOperation) operation; ITreeIndexCursor cursor = mergeOp.getCursor(); RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null); ILSMIndexOperationContext opCtx = ((LSMIndexSearchCursor) cursor).getOpCtx(); opCtx.getComponentHolder().addAll(mergeOp.getMergingComponents()); search(opCtx, cursor, rangePred); List<ILSMComponent> mergedComponents = mergeOp.getMergingComponents(); long numElements = 0L; for (int i = 0; i < mergedComponents.size(); ++i) { numElements += ((LSMBTreeDiskComponent) mergedComponents.get(i)).getBloomFilter().getNumElements(); } int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements); BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement, bloomFilterFalsePositiveRate); LSMBTreeDiskComponent mergedComponent = createDiskComponent(componentFactory, mergeOp.getBTreeMergeTarget(), mergeOp.getBloomFilterMergeTarget(), true); IIndexBulkLoader bulkLoader = mergedComponent.getBTree().createBulkLoader(1.0f, false, numElements, false); IIndexBulkLoader builder = mergedComponent.getBloomFilter().createBuilder(numElements, bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements()); try { while (cursor.hasNext()) { cursor.next(); ITupleReference frameTuple = cursor.getTuple(); builder.add(frameTuple); bulkLoader.add(frameTuple); } } finally { cursor.close(); builder.end(); } bulkLoader.end(); if (mergedComponent.getLSMComponentFilter() != null) { List<ITupleReference> filterTuples = new ArrayList<ITupleReference>(); for (int i = 0; i < mergeOp.getMergingComponents().size(); ++i) { filterTuples.add(mergeOp.getMergingComponents().get(i).getLSMComponentFilter().getMinTuple()); filterTuples.add(mergeOp.getMergingComponents().get(i).getLSMComponentFilter().getMaxTuple()); } filterManager.updateFilterInfo(mergedComponent.getLSMComponentFilter(), filterTuples); filterManager.writeFilterInfo(mergedComponent.getLSMComponentFilter(), mergedComponent.getBTree()); } return mergedComponent; } protected LSMBTreeDiskComponent createDiskComponent(LSMBTreeDiskComponentFactory factory, FileReference btreeFileRef, FileReference bloomFilterFileRef, boolean createComponent) throws HyracksDataException, IndexException { // Create new BTree instance. LSMBTreeDiskComponent component = (LSMBTreeDiskComponent) factory .createLSMComponentInstance(new LSMComponentFileReferences(btreeFileRef, null, bloomFilterFileRef)); if (createComponent) { component.getBTree().create(); component.getBloomFilter().create(); } // BTree will be closed during cleanup of merge(). component.getBTree().activate(); component.getBloomFilter().activate(); if (component.getLSMComponentFilter() != null) { filterManager.readFilterInfo(component.getLSMComponentFilter(), component.getBTree()); } return component; } @Override public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex) throws TreeIndexException { try { return new LSMBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex); } catch (HyracksDataException e) { throw new TreeIndexException(e); } } protected ILSMComponent createBulkLoadTarget() throws HyracksDataException, IndexException { LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference(); return createDiskComponent(bulkLoadComponentFactory, componentFileRefs.getInsertIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true); } @Override public void markAsValid(ILSMComponent lsmComponent) throws HyracksDataException { // The order of forcing the dirty page to be flushed is critical. The // bloom filter must be always done first. LSMBTreeDiskComponent component = (LSMBTreeDiskComponent) lsmComponent; // Flush the bloom filter first. int fileId = component.getBloomFilter().getFileId(); IBufferCache bufferCache = component.getBTree().getBufferCache(); int startPage = 0; int maxPage = component.getBloomFilter().getNumPages(); forceFlushDirtyPages(bufferCache, fileId, startPage, maxPage); forceFlushDirtyPages(component.getBTree()); markAsValidInternal(component.getBTree()); } public class LSMBTreeBulkLoader implements IIndexBulkLoader { private final ILSMComponent component; private final BTreeBulkLoader bulkLoader; private final IIndexBulkLoader builder; private boolean cleanedUpArtifacts = false; private boolean isEmptyComponent = true; private boolean endedBloomFilterLoad = false; public final PermutingTupleReference indexTuple; public final PermutingTupleReference filterTuple; public final MultiComparator filterCmp; public LSMBTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException { if (checkIfEmptyIndex && !isEmptyIndex()) { throw new TreeIndexException("Cannot load an index that is not empty"); } try { component = createBulkLoadTarget(); } catch (HyracksDataException | IndexException e) { throw new TreeIndexException(e); } bulkLoader = (BTreeBulkLoader) ((LSMBTreeDiskComponent) component).getBTree().createBulkLoader(fillFactor, verifyInput, numElementsHint, false); int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElementsHint); BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement, bloomFilterFalsePositiveRate); builder = ((LSMBTreeDiskComponent) component).getBloomFilter().createBuilder(numElementsHint, bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements()); if (filterFields != null) { indexTuple = new PermutingTupleReference(btreeFields); filterCmp = MultiComparator.create(component.getLSMComponentFilter().getFilterCmpFactories()); filterTuple = new PermutingTupleReference(filterFields); } else { indexTuple = null; filterCmp = null; filterTuple = null; } } @Override public void add(ITupleReference tuple) throws IndexException, HyracksDataException { try { ITupleReference t; if (indexTuple != null) { indexTuple.reset(tuple); t = indexTuple; } else { t = tuple; } bulkLoader.add(t); builder.add(t); if (filterTuple != null) { filterTuple.reset(tuple); component.getLSMComponentFilter().update(filterTuple, filterCmp); } } catch (IndexException | HyracksDataException | RuntimeException e) { cleanupArtifacts(); throw e; } if (isEmptyComponent) { isEmptyComponent = false; } } protected void cleanupArtifacts() throws HyracksDataException, IndexException { if (!cleanedUpArtifacts) { cleanedUpArtifacts = true; // We make sure to end the bloom filter load to release latches. if (!endedBloomFilterLoad) { builder.end(); endedBloomFilterLoad = true; } ((LSMBTreeDiskComponent) component).getBTree().deactivate(); ((LSMBTreeDiskComponent) component).getBTree().destroy(); ((LSMBTreeDiskComponent) component).getBloomFilter().deactivate(); ((LSMBTreeDiskComponent) component).getBloomFilter().destroy(); } } @Override public void end() throws HyracksDataException, IndexException { if (!cleanedUpArtifacts) { if (!endedBloomFilterLoad) { builder.end(); endedBloomFilterLoad = true; } bulkLoader.end(); if (component.getLSMComponentFilter() != null) { filterManager.writeFilterInfo(component.getLSMComponentFilter(), ((LSMBTreeDiskComponent) component).getBTree()); } if (isEmptyComponent) { cleanupArtifacts(); } else { lsmHarness.addBulkLoadedComponent(component); } } } } public LSMBTreeOpContext createOpContext(IModificationOperationCallback modificationCallback, ISearchOperationCallback searchCallback) { return new LSMBTreeOpContext(memoryComponents, insertLeafFrameFactory, deleteLeafFrameFactory, modificationCallback, searchCallback, componentFactory.getBloomFilterKeyFields().length, btreeFields, filterFields); } @Override public ILSMIndexAccessorInternal createAccessor(IModificationOperationCallback modificationCallback, ISearchOperationCallback searchCallback) { return new LSMBTreeAccessor(lsmHarness, createOpContext(modificationCallback, searchCallback)); } public class LSMBTreeAccessor extends LSMTreeIndexAccessor { public LSMBTreeAccessor(ILSMHarness lsmHarness, ILSMIndexOperationContext ctx) { super(lsmHarness, ctx); } @Override public IIndexCursor createSearchCursor(boolean exclusive) { return new LSMBTreeSearchCursor(ctx); } public MultiComparator getMultiComparator() { LSMBTreeOpContext concreteCtx = (LSMBTreeOpContext) ctx; return concreteCtx.cmp; } } @Override public IBufferCache getBufferCache() { return diskBufferCache; } @Override public IBinaryComparatorFactory[] getComparatorFactories() { return cmpFactories; } @Override public ITreeIndexFrameFactory getInteriorFrameFactory() { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) memoryComponents .get(currentMutableComponentId.get()); return mutableComponent.getBTree().getInteriorFrameFactory(); } @Override public int getFieldCount() { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) memoryComponents .get(currentMutableComponentId.get()); return mutableComponent.getBTree().getFieldCount(); } @Override public int getFileId() { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) memoryComponents .get(currentMutableComponentId.get()); return mutableComponent.getBTree().getFileId(); } @Override public IFreePageManager getFreePageManager() { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) memoryComponents .get(currentMutableComponentId.get()); return mutableComponent.getBTree().getFreePageManager(); } @Override public ITreeIndexFrameFactory getLeafFrameFactory() { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) memoryComponents .get(currentMutableComponentId.get()); return mutableComponent.getBTree().getLeafFrameFactory(); } @Override public long getMemoryAllocationSize() { long size = 0; for (ILSMComponent c : memoryComponents) { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; IBufferCache virtualBufferCache = mutableComponent.getBTree().getBufferCache(); size += virtualBufferCache.getNumPages() * virtualBufferCache.getPageSize(); } return size; } @Override public int getRootPageId() { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) memoryComponents .get(currentMutableComponentId.get()); return mutableComponent.getBTree().getRootPageId(); } @Override public void validate() throws HyracksDataException { for (ILSMComponent c : memoryComponents) { LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) c; mutableComponent.getBTree().validate(); } List<ILSMComponent> immutableComponents = diskComponents; for (ILSMComponent c : immutableComponents) { BTree btree = ((LSMBTreeDiskComponent) c).getBTree(); btree.validate(); } } @Override public String toString() { return "LSMBTree [" + fileManager.getBaseDir() + "]"; } @Override public boolean isPrimaryIndex() { return needKeyDupCheck; } @Override public Set<String> getLSMComponentPhysicalFiles(ILSMComponent lsmComponent) { Set<String> files = new HashSet<String>(); LSMBTreeDiskComponent component = (LSMBTreeDiskComponent) lsmComponent; files.add(component.getBTree().getFileReference().toString()); files.add(component.getBloomFilter().getFileReference().toString()); return files; } }
// File generated by OpenXava: Wed Sep 11 11:56:52 CEST 2013 // Archivo generado por OpenXava: Wed Sep 11 11:56:52 CEST 2013 // WARNING: NO EDIT // OJO: NO EDITAR // Component: Office Entity/Entidad package org.openxava.test.model; import java.util.*; import java.math.*; import java.rmi.RemoteException; import org.openxava.component.MetaComponent; import org.openxava.model.meta.MetaModel; import org.openxava.util.*; /** * * @author MCarmen Gimeno */ public class Office implements java.io.Serializable, org.openxava.test.model.IOffice { // Constructor public Office() { initMembers(); } private void initMembers() { setNumber(0); setZoneNumber(0); setName(null); setReceptionist(0); } // Properties/Propiedades private static org.openxava.converters.IConverter receptionistConverter; private org.openxava.converters.IConverter getReceptionistConverter() { if (receptionistConverter == null) { try { receptionistConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("receptionist"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "receptionist")); } } return receptionistConverter; } private java.lang.Integer receptionist; private java.lang.Integer get_Receptionist() { return receptionist; } private void set_Receptionist(java.lang.Integer newReceptionist) { this.receptionist = newReceptionist; } /** * * */ public int getReceptionist() { try { return ((Integer) getReceptionistConverter().toJava(get_Receptionist())).intValue(); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Receptionist", "Office", "int")); } } /** * */ public void setReceptionist(int newReceptionist) { try { set_Receptionist((java.lang.Integer) getReceptionistConverter().toDB(new Integer(newReceptionist))); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Receptionist", "Office", "int")); } } private static org.openxava.converters.IConverter nameConverter; private org.openxava.converters.IConverter getNameConverter() { if (nameConverter == null) { try { nameConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("name"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "name")); } } return nameConverter; } private java.lang.String name; private java.lang.String get_Name() { return name; } private void set_Name(java.lang.String newName) { this.name = newName; } /** * * */ public String getName() { try { return (String) getNameConverter().toJava(get_Name()); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Name", "Office", "String")); } } /** * */ public void setName(String newName) { try { set_Name((java.lang.String) getNameConverter().toDB(newName)); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Name", "Office", "String")); } } private static org.openxava.converters.IConverter zoneNumberConverter; private org.openxava.converters.IConverter getZoneNumberConverter() { if (zoneNumberConverter == null) { try { zoneNumberConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("zoneNumber"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "zoneNumber")); } } return zoneNumberConverter; } private java.lang.Integer zoneNumber; private java.lang.Integer get_ZoneNumber() { return zoneNumber; } private void set_ZoneNumber(java.lang.Integer newZoneNumber) { this.zoneNumber = newZoneNumber; } /** * * */ public int getZoneNumber() { try { return ((Integer) getZoneNumberConverter().toJava(get_ZoneNumber())).intValue(); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "ZoneNumber", "Office", "int")); } } /** * */ public void setZoneNumber(int newZoneNumber) { try { set_ZoneNumber((java.lang.Integer) getZoneNumberConverter().toDB(new Integer(newZoneNumber))); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "ZoneNumber", "Office", "int")); } } private int number; public int getNumber() { return number; } public void setNumber(int newNumber) { this.number = newNumber; } // References/Referencias private org.openxava.test.model.IClerk officeManager; private java.lang.Integer officeManager_number; public org.openxava.test.model.IClerk getOfficeManager() { if (officeManager != null) { // Because not-found='ignore' annul lazy initialization, we simulate it try { officeManager.toString(); } catch (Exception ex) { return null; } } return officeManager; } public void setOfficeManager(org.openxava.test.model.IClerk newClerk) throws RemoteException{ if (newClerk != null && !(newClerk instanceof org.openxava.test.model.Clerk)) { throw new IllegalArgumentException(XavaResources.getString("ejb_to_pojo_illegal")); } this.officeManager = newClerk; this.officeManager_number = newClerk == null?null:new Integer(newClerk.getNumber()); } private org.openxava.test.model.ICarrier defaultCarrier; public org.openxava.test.model.ICarrier getDefaultCarrier() { if (defaultCarrier != null) { // Because not-found='ignore' annul lazy initialization, we simulate it try { defaultCarrier.toString(); } catch (Exception ex) { return null; } } return defaultCarrier; } public void setDefaultCarrier(org.openxava.test.model.ICarrier newCarrier) { if (newCarrier != null && !(newCarrier instanceof org.openxava.test.model.Carrier)) { throw new IllegalArgumentException(XavaResources.getString("ejb_to_pojo_illegal")); } this.defaultCarrier = newCarrier; } private org.openxava.test.model.IWarehouse mainWarehouse; private java.lang.Integer mainWarehouse_number; public org.openxava.test.model.IWarehouse getMainWarehouse() { if (mainWarehouse != null) { // Because not-found='ignore' annul lazy initialization, we simulate it try { mainWarehouse.toString(); } catch (Exception ex) { return null; } } return mainWarehouse; } public void setMainWarehouse(org.openxava.test.model.IWarehouse newWarehouse) throws RemoteException{ if (newWarehouse != null && !(newWarehouse instanceof org.openxava.test.model.Warehouse)) { throw new IllegalArgumentException(XavaResources.getString("ejb_to_pojo_illegal")); } this.mainWarehouse = newWarehouse; this.mainWarehouse_number = newWarehouse == null?null:new Integer(newWarehouse.getNumber()); } // Colecciones/Collections // Methods/Metodos // User defined finders/Buscadores definidos por el usuario public static Office findByNumber(int number) throws javax.ejb.ObjectNotFoundException { if (XavaPreferences.getInstance().isJPAPersistence()) { javax.persistence.Query query = org.openxava.jpa.XPersistence.getManager().createQuery("from Office as o where o.number = :arg0"); query.setParameter("arg0", new Integer(number)); try { return (Office) query.getSingleResult(); } catch (Exception ex) { // In this way in order to work with Java pre 5 if (ex.getClass().getName().equals("javax.persistence.NoResultException")) { throw new javax.ejb.ObjectNotFoundException(XavaResources.getString("object_not_found", "Office")); } else { ex.printStackTrace(); throw new RuntimeException(ex.getMessage()); } } } else { org.hibernate.Query query = org.openxava.hibernate.XHibernate.getSession().createQuery("from Office as o where o.number = :arg0"); query.setParameter("arg0", new Integer(number)); Office r = (Office) query.uniqueResult(); if (r == null) { throw new javax.ejb.ObjectNotFoundException(XavaResources.getString("object_not_found", "Office")); } return r; } } private static MetaModel metaModel; public MetaModel getMetaModel() throws XavaException { if (metaModel == null) { metaModel = MetaComponent.get("Office").getMetaEntity(); } return metaModel; } public String toString() { try { return getMetaModel().toString(this); } catch (XavaException ex) { System.err.println(XavaResources.getString("toString_warning", "Office")); return super.toString(); } } public boolean equals(Object other) { if (other == null) return false; return toString().equals(other.toString()); } public int hashCode() { return toString().hashCode(); } }
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.util.internal.StringUtil; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.ReadOnlyBufferException; import java.nio.channels.GatheringByteChannel; import java.nio.channels.ScatteringByteChannel; /** * Read-only ByteBuf which wraps a read-only ByteBuffer. */ class ReadOnlyByteBufferBuf extends AbstractReferenceCountedByteBuf { protected final ByteBuffer buffer; private final ByteBufAllocator allocator; private ByteBuffer tmpNioBuf; public ReadOnlyByteBufferBuf(ByteBufAllocator allocator, ByteBuffer buffer) { super(buffer.remaining()); if (!buffer.isReadOnly()) { throw new IllegalArgumentException("must be a readonly buffer: " + StringUtil.simpleClassName(buffer)); } this.allocator = allocator; this.buffer = buffer.slice().order(ByteOrder.BIG_ENDIAN); writerIndex(this.buffer.limit()); } @Override protected void deallocate() { } @Override public byte getByte(int index) { ensureAccessible(); return _getByte(index); } @Override protected byte _getByte(int index) { return buffer.get(index); } @Override public short getShort(int index) { ensureAccessible(); return _getShort(index); } @Override protected short _getShort(int index) { return buffer.getShort(index); } @Override public int getUnsignedMedium(int index) { ensureAccessible(); return _getUnsignedMedium(index); } @Override protected int _getUnsignedMedium(int index) { return (getByte(index) & 0xff) << 16 | (getByte(index + 1) & 0xff) << 8 | getByte(index + 2) & 0xff; } @Override public int getInt(int index) { ensureAccessible(); return _getInt(index); } @Override protected int _getInt(int index) { return buffer.getInt(index); } @Override public long getLong(int index) { ensureAccessible(); return _getLong(index); } @Override protected long _getLong(int index) { return buffer.getLong(index); } @Override public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { checkDstIndex(index, length, dstIndex, dst.capacity()); if (dst.hasArray()) { getBytes(index, dst.array(), dst.arrayOffset() + dstIndex, length); } else if (dst.nioBufferCount() > 0) { for (ByteBuffer bb: dst.nioBuffers(dstIndex, length)) { int bbLen = bb.remaining(); getBytes(index, bb); index += bbLen; } } else { dst.setBytes(dstIndex, this, index, length); } return this; } @Override public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { checkDstIndex(index, length, dstIndex, dst.length); if (dstIndex < 0 || dstIndex > dst.length - length) { throw new IndexOutOfBoundsException(String.format( "dstIndex: %d, length: %d (expected: range(0, %d))", dstIndex, length, dst.length)); } ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index).limit(index + length); tmpBuf.get(dst, dstIndex, length); return this; } @Override public ByteBuf getBytes(int index, ByteBuffer dst) { checkIndex(index); if (dst == null) { throw new NullPointerException("dst"); } int bytesToCopy = Math.min(capacity() - index, dst.remaining()); ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index).limit(index + bytesToCopy); dst.put(tmpBuf); return this; } @Override protected void _setByte(int index, int value) { throw new ReadOnlyBufferException(); } @Override protected void _setShort(int index, int value) { throw new ReadOnlyBufferException(); } @Override protected void _setMedium(int index, int value) { throw new ReadOnlyBufferException(); } @Override protected void _setInt(int index, int value) { throw new ReadOnlyBufferException(); } @Override protected void _setLong(int index, long value) { throw new ReadOnlyBufferException(); } @Override public int capacity() { return maxCapacity(); } @Override public ByteBuf capacity(int newCapacity) { throw new ReadOnlyBufferException(); } @Override public ByteBufAllocator alloc() { return allocator; } @Override public ByteOrder order() { return ByteOrder.BIG_ENDIAN; } @Override public ByteBuf unwrap() { return null; } @Override public boolean isDirect() { return buffer.isDirect(); } @Override public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { ensureAccessible(); if (length == 0) { return this; } if (buffer.hasArray()) { out.write(buffer.array(), index + buffer.arrayOffset(), length); } else { byte[] tmp = new byte[length]; ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index); tmpBuf.get(tmp); out.write(tmp); } return this; } @Override public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { ensureAccessible(); if (length == 0) { return 0; } ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index).limit(index + length); return out.write(tmpBuf); } @Override public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { throw new ReadOnlyBufferException(); } @Override public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { throw new ReadOnlyBufferException(); } @Override public ByteBuf setBytes(int index, ByteBuffer src) { throw new ReadOnlyBufferException(); } @Override public int setBytes(int index, InputStream in, int length) throws IOException { throw new ReadOnlyBufferException(); } @Override public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { throw new ReadOnlyBufferException(); } protected final ByteBuffer internalNioBuffer() { ByteBuffer tmpNioBuf = this.tmpNioBuf; if (tmpNioBuf == null) { this.tmpNioBuf = tmpNioBuf = buffer.duplicate(); } return tmpNioBuf; } @Override public ByteBuf copy(int index, int length) { ensureAccessible(); ByteBuffer src; try { src = (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length); } catch (IllegalArgumentException e) { throw new IndexOutOfBoundsException("Too many bytes to read - Need " + (index + length)); } ByteBuffer dst = ByteBuffer.allocateDirect(length); dst.put(src); dst.order(order()); dst.clear(); return new UnpooledDirectByteBuf(alloc(), dst, maxCapacity()); } @Override public int nioBufferCount() { return 1; } @Override public ByteBuffer[] nioBuffers(int index, int length) { return new ByteBuffer[] { nioBuffer(index, length) }; } @Override public ByteBuffer nioBuffer(int index, int length) { return (ByteBuffer) buffer.duplicate().position(index).limit(index + length); } @Override public ByteBuffer internalNioBuffer(int index, int length) { ensureAccessible(); return (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length); } @Override public boolean hasArray() { return buffer.hasArray(); } @Override public byte[] array() { return buffer.array(); } @Override public int arrayOffset() { return buffer.arrayOffset(); } @Override public boolean hasMemoryAddress() { return false; } @Override public long memoryAddress() { throw new UnsupportedOperationException(); } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.util.concurrent; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import com.google.common.annotations.Beta; import com.google.common.base.Supplier; import com.google.j2objc.annotations.WeakOuter; import java.util.concurrent.Callable; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.concurrent.GuardedBy; /** * Base class for services that can implement {@link #startUp} and {@link #shutDown} but while in * the "running" state need to perform a periodic task. Subclasses can implement {@link #startUp}, * {@link #shutDown} and also a {@link #runOneIteration} method that will be executed periodically. * * <p>This class uses the {@link ScheduledExecutorService} returned from {@link #executor} to run * the {@link #startUp} and {@link #shutDown} methods and also uses that service to schedule the * {@link #runOneIteration} that will be executed periodically as specified by its {@link * Scheduler}. When this service is asked to stop via {@link #stopAsync} it will cancel the periodic * task (but not interrupt it) and wait for it to stop before running the {@link #shutDown} method. * * <p>Subclasses are guaranteed that the life cycle methods ({@link #runOneIteration}, {@link * #startUp} and {@link #shutDown}) will never run concurrently. Notably, if any execution of {@link * #runOneIteration} takes longer than its schedule defines, then subsequent executions may start * late. Also, all life cycle methods are executed with a lock held, so subclasses can safely * modify shared state without additional synchronization necessary for visibility to later * executions of the life cycle methods. * * <h3>Usage Example</h3> * * <p>Here is a sketch of a service which crawls a website and uses the scheduling capabilities to * rate limit itself. <pre> {@code * class CrawlingService extends AbstractScheduledService { * private Set<Uri> visited; * private Queue<Uri> toCrawl; * protected void startUp() throws Exception { * toCrawl = readStartingUris(); * } * * protected void runOneIteration() throws Exception { * Uri uri = toCrawl.remove(); * Collection<Uri> newUris = crawl(uri); * visited.add(uri); * for (Uri newUri : newUris) { * if (!visited.contains(newUri)) { toCrawl.add(newUri); } * } * } * * protected void shutDown() throws Exception { * saveUris(toCrawl); * } * * protected Scheduler scheduler() { * return Scheduler.newFixedRateSchedule(0, 1, TimeUnit.SECONDS); * } * }}</pre> * * <p>This class uses the life cycle methods to read in a list of starting URIs and save the set of * outstanding URIs when shutting down. Also, it takes advantage of the scheduling functionality to * rate limit the number of queries we perform. * * @author Luke Sandberg * @since 11.0 */ @Beta public abstract class AbstractScheduledService implements Service { private static final Logger logger = Logger.getLogger(AbstractScheduledService.class.getName()); /** * A scheduler defines the policy for how the {@link AbstractScheduledService} should run its * task. * * <p>Consider using the {@link #newFixedDelaySchedule} and {@link #newFixedRateSchedule} factory * methods, these provide {@link Scheduler} instances for the common use case of running the * service with a fixed schedule. If more flexibility is needed then consider subclassing {@link * CustomScheduler}. * * @author Luke Sandberg * @since 11.0 */ public abstract static class Scheduler { /** * Returns a {@link Scheduler} that schedules the task using the {@link * ScheduledExecutorService#scheduleWithFixedDelay} method. * * @param initialDelay the time to delay first execution * @param delay the delay between the termination of one execution and the commencement of the * next * @param unit the time unit of the initialDelay and delay parameters */ public static Scheduler newFixedDelaySchedule( final long initialDelay, final long delay, final TimeUnit unit) { checkNotNull(unit); checkArgument(delay > 0, "delay must be > 0, found %s", delay); return new Scheduler() { @Override public Future<?> schedule( AbstractService service, ScheduledExecutorService executor, Runnable task) { return executor.scheduleWithFixedDelay(task, initialDelay, delay, unit); } }; } /** * Returns a {@link Scheduler} that schedules the task using the {@link * ScheduledExecutorService#scheduleAtFixedRate} method. * * @param initialDelay the time to delay first execution * @param period the period between successive executions of the task * @param unit the time unit of the initialDelay and period parameters */ public static Scheduler newFixedRateSchedule( final long initialDelay, final long period, final TimeUnit unit) { checkNotNull(unit); checkArgument(period > 0, "period must be > 0, found %s", period); return new Scheduler() { @Override public Future<?> schedule( AbstractService service, ScheduledExecutorService executor, Runnable task) { return executor.scheduleAtFixedRate(task, initialDelay, period, unit); } }; } /** Schedules the task to run on the provided executor on behalf of the service. */ abstract Future<?> schedule( AbstractService service, ScheduledExecutorService executor, Runnable runnable); private Scheduler() {} } /* use AbstractService for state management */ private final AbstractService delegate = new ServiceDelegate(); @WeakOuter private final class ServiceDelegate extends AbstractService { // A handle to the running task so that we can stop it when a shutdown has been requested. // These two fields are volatile because their values will be accessed from multiple threads. private volatile Future<?> runningTask; private volatile ScheduledExecutorService executorService; // This lock protects the task so we can ensure that none of the template methods (startUp, // shutDown or runOneIteration) run concurrently with one another. // TODO(lukes): why don't we use ListenableFuture to sequence things? Then we could drop the // lock. private final ReentrantLock lock = new ReentrantLock(); @WeakOuter class Task implements Runnable { @Override public void run() { lock.lock(); try { if (runningTask.isCancelled()) { // task may have been cancelled while blocked on the lock. return; } AbstractScheduledService.this.runOneIteration(); } catch (Throwable t) { try { shutDown(); } catch (Exception ignored) { logger.log( Level.WARNING, "Error while attempting to shut down the service after failure.", ignored); } notifyFailed(t); runningTask.cancel(false); // prevent future invocations. } finally { lock.unlock(); } } } private final Runnable task = new Task(); @Override protected final void doStart() { executorService = MoreExecutors.renamingDecorator( executor(), new Supplier<String>() { @Override public String get() { return serviceName() + " " + state(); } }); executorService.execute( new Runnable() { @Override public void run() { lock.lock(); try { startUp(); runningTask = scheduler().schedule(delegate, executorService, task); notifyStarted(); } catch (Throwable t) { notifyFailed(t); if (runningTask != null) { // prevent the task from running if possible runningTask.cancel(false); } } finally { lock.unlock(); } } }); } @Override protected final void doStop() { runningTask.cancel(false); executorService.execute( new Runnable() { @Override public void run() { try { lock.lock(); try { if (state() != State.STOPPING) { // This means that the state has changed since we were scheduled. This implies // that an execution of runOneIteration has thrown an exception and we have // transitioned to a failed state, also this means that shutDown has already // been called, so we do not want to call it again. return; } shutDown(); } finally { lock.unlock(); } notifyStopped(); } catch (Throwable t) { notifyFailed(t); } } }); } @Override public String toString() { return AbstractScheduledService.this.toString(); } } /** Constructor for use by subclasses. */ protected AbstractScheduledService() {} /** * Run one iteration of the scheduled task. If any invocation of this method throws an exception, * the service will transition to the {@link Service.State#FAILED} state and this method will no * longer be called. */ protected abstract void runOneIteration() throws Exception; /** * Start the service. * * <p>By default this method does nothing. */ protected void startUp() throws Exception {} /** * Stop the service. This is guaranteed not to run concurrently with {@link #runOneIteration}. * * <p>By default this method does nothing. */ protected void shutDown() throws Exception {} /** * Returns the {@link Scheduler} object used to configure this service. This method will only be * called once. */ protected abstract Scheduler scheduler(); /** * Returns the {@link ScheduledExecutorService} that will be used to execute the {@link #startUp}, * {@link #runOneIteration} and {@link #shutDown} methods. If this method is overridden the * executor will not be {@linkplain ScheduledExecutorService#shutdown shutdown} when this service * {@linkplain Service.State#TERMINATED terminates} or {@linkplain Service.State#TERMINATED * fails}. Subclasses may override this method to supply a custom {@link ScheduledExecutorService} * instance. This method is guaranteed to only be called once. * * <p>By default this returns a new {@link ScheduledExecutorService} with a single thread thread * pool that sets the name of the thread to the {@linkplain #serviceName() service name}. Also, * the pool will be {@linkplain ScheduledExecutorService#shutdown() shut down} when the service * {@linkplain Service.State#TERMINATED terminates} or {@linkplain Service.State#TERMINATED * fails}. */ protected ScheduledExecutorService executor() { @WeakOuter class ThreadFactoryImpl implements ThreadFactory { @Override public Thread newThread(Runnable runnable) { return MoreExecutors.newThread(serviceName(), runnable); } } final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryImpl()); // Add a listener to shutdown the executor after the service is stopped. This ensures that the // JVM shutdown will not be prevented from exiting after this service has stopped or failed. // Technically this listener is added after start() was called so it is a little gross, but it // is called within doStart() so we know that the service cannot terminate or fail concurrently // with adding this listener so it is impossible to miss an event that we are interested in. addListener( new Listener() { @Override public void terminated(State from) { executor.shutdown(); } @Override public void failed(State from, Throwable failure) { executor.shutdown(); } }, directExecutor()); return executor; } /** * Returns the name of this service. {@link AbstractScheduledService} may include the name in * debugging output. * * @since 14.0 */ protected String serviceName() { return getClass().getSimpleName(); } @Override public String toString() { return serviceName() + " [" + state() + "]"; } @Override public final boolean isRunning() { return delegate.isRunning(); } @Override public final State state() { return delegate.state(); } /** * @since 13.0 */ @Override public final void addListener(Listener listener, Executor executor) { delegate.addListener(listener, executor); } /** * @since 14.0 */ @Override public final Throwable failureCause() { return delegate.failureCause(); } /** * @since 15.0 */ @Override public final Service startAsync() { delegate.startAsync(); return this; } /** * @since 15.0 */ @Override public final Service stopAsync() { delegate.stopAsync(); return this; } /** * @since 15.0 */ @Override public final void awaitRunning() { delegate.awaitRunning(); } /** * @since 15.0 */ @Override public final void awaitRunning(long timeout, TimeUnit unit) throws TimeoutException { delegate.awaitRunning(timeout, unit); } /** * @since 15.0 */ @Override public final void awaitTerminated() { delegate.awaitTerminated(); } /** * @since 15.0 */ @Override public final void awaitTerminated(long timeout, TimeUnit unit) throws TimeoutException { delegate.awaitTerminated(timeout, unit); } /** * A {@link Scheduler} that provides a convenient way for the {@link AbstractScheduledService} to * use a dynamically changing schedule. After every execution of the task, assuming it hasn't been * cancelled, the {@link #getNextSchedule} method will be called. * * @author Luke Sandberg * @since 11.0 */ @Beta public abstract static class CustomScheduler extends Scheduler { /** * A callable class that can reschedule itself using a {@link CustomScheduler}. */ private class ReschedulableCallable extends ForwardingFuture<Void> implements Callable<Void> { /** The underlying task. */ private final Runnable wrappedRunnable; /** The executor on which this Callable will be scheduled. */ private final ScheduledExecutorService executor; /** * The service that is managing this callable. This is used so that failure can be reported * properly. */ private final AbstractService service; /** * This lock is used to ensure safe and correct cancellation, it ensures that a new task is * not scheduled while a cancel is ongoing. Also it protects the currentFuture variable to * ensure that it is assigned atomically with being scheduled. */ private final ReentrantLock lock = new ReentrantLock(); /** The future that represents the next execution of this task.*/ @GuardedBy("lock") private Future<Void> currentFuture; ReschedulableCallable( AbstractService service, ScheduledExecutorService executor, Runnable runnable) { this.wrappedRunnable = runnable; this.executor = executor; this.service = service; } @Override public Void call() throws Exception { wrappedRunnable.run(); reschedule(); return null; } /** * Atomically reschedules this task and assigns the new future to {@link #currentFuture}. */ public void reschedule() { // invoke the callback outside the lock, prevents some shenanigans. Schedule schedule; try { schedule = CustomScheduler.this.getNextSchedule(); } catch (Throwable t) { service.notifyFailed(t); return; } // We reschedule ourselves with a lock held for two reasons. 1. we want to make sure that // cancel calls cancel on the correct future. 2. we want to make sure that the assignment // to currentFuture doesn't race with itself so that currentFuture is assigned in the // correct order. Throwable scheduleFailure = null; lock.lock(); try { if (currentFuture == null || !currentFuture.isCancelled()) { currentFuture = executor.schedule(this, schedule.delay, schedule.unit); } } catch (Throwable e) { // If an exception is thrown by the subclass then we need to make sure that the service // notices and transitions to the FAILED state. We do it by calling notifyFailed directly // because the service does not monitor the state of the future so if the exception is not // caught and forwarded to the service the task would stop executing but the service would // have no idea. // TODO(lukes): consider building everything in terms of ListenableScheduledFuture then // the AbstractService could monitor the future directly. Rescheduling is still hard... // but it would help with some of these lock ordering issues. scheduleFailure = e; } finally { lock.unlock(); } // Call notifyFailed outside the lock to avoid lock ordering issues. if (scheduleFailure != null) { service.notifyFailed(scheduleFailure); } } // N.B. Only protect cancel and isCancelled because those are the only methods that are // invoked by the AbstractScheduledService. @Override public boolean cancel(boolean mayInterruptIfRunning) { // Ensure that a task cannot be rescheduled while a cancel is ongoing. lock.lock(); try { return currentFuture.cancel(mayInterruptIfRunning); } finally { lock.unlock(); } } @Override public boolean isCancelled() { lock.lock(); try { return currentFuture.isCancelled(); } finally { lock.unlock(); } } @Override protected Future<Void> delegate() { throw new UnsupportedOperationException( "Only cancel and isCancelled is supported by this future"); } } @Override final Future<?> schedule( AbstractService service, ScheduledExecutorService executor, Runnable runnable) { ReschedulableCallable task = new ReschedulableCallable(service, executor, runnable); task.reschedule(); return task; } /** * A value object that represents an absolute delay until a task should be invoked. * * @author Luke Sandberg * @since 11.0 */ @Beta protected static final class Schedule { private final long delay; private final TimeUnit unit; /** * @param delay the time from now to delay execution * @param unit the time unit of the delay parameter */ public Schedule(long delay, TimeUnit unit) { this.delay = delay; this.unit = checkNotNull(unit); } } /** * Calculates the time at which to next invoke the task. * * <p>This is guaranteed to be called immediately after the task has completed an iteration and * on the same thread as the previous execution of {@link * AbstractScheduledService#runOneIteration}. * * @return a schedule that defines the delay before the next execution. */ protected abstract Schedule getNextSchedule() throws Exception; } }
package org.jbehave.examples.core.steps; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import org.jbehave.core.annotations.Alias; import org.jbehave.core.annotations.Aliases; import org.jbehave.core.annotations.AsParameterConverter; import org.jbehave.core.annotations.Given; import org.jbehave.core.annotations.Named; import org.jbehave.core.annotations.Then; import org.jbehave.core.annotations.When; import org.jbehave.core.model.ExamplesTable; import org.jbehave.core.model.OutcomesTable; import org.jbehave.core.steps.CandidateSteps; import org.jbehave.core.steps.InstanceStepsFactory; import org.jbehave.core.steps.Parameters; import org.jbehave.examples.core.CoreStory; import org.jbehave.examples.core.model.Stock; import org.jbehave.examples.core.model.Stock.AlertStatus; import org.jbehave.examples.core.model.Trader; import org.jbehave.examples.core.persistence.TraderPersister; import org.jbehave.examples.core.service.TradingService; /** * POJO holding the candidate steps for the trader example. * The {@link CandidateSteps} instance wrapping this are created via the {@link InstanceStepsFactory} * in the {@link CoreStory}. */ public class TraderSteps { private TradingService service; private ThreadLocal<Stock> stock = new ThreadLocal<>(); private Trader trader; private List<Trader> traders = new ArrayList<>(); private List<Trader> searchedTraders; private Date date; private ExamplesTable ranksTable; private String stockExchange; private String assetClass; private TradeType tradeType; public TraderSteps() { this(new TradingService()); } public TraderSteps(TradingService service) { this.service = service; } public TradingService getService() { return this.service; } @Given("a date of $date") public void aDate(Date date) { this.date = date; } @When("$days days pass") public void daysPass(int days) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.add(Calendar.DAY_OF_YEAR, days); date = calendar.getTime(); } @Then("the date is $date") public void theDate(Date date) { assertThat(date, equalTo(this.date)); } @Given("a trader of {name|id} $trader") public void aTrader(Trader trader) { this.trader = trader; } @Given("the trader ranks: $ranksTable") @Alias("the traders: $ranksTable") public void theTraderRanks(ExamplesTable ranksTable) { this.ranksTable = ranksTable; traders.clear(); traders.addAll(toTraders(ranksTable)); } @When("traders are subset to \"$regex\" by name") @Alias("traders are filtered by \"$regex\"") public void subsetTradersByName(String regex) { searchedTraders = new ArrayList<>(); for (Trader trader : traders) { if (trader.getName().matches(regex)) { searchedTraders.add(trader); } } } @Then("the current trader activity is: $activityTable") public void theTradersActivityIs(ExamplesTable activityTable) { for (int i = 0; i < activityTable.getRowCount(); i++) { Parameters row = activityTable.withDefaults(this.ranksTable.getRowAsParameters(i)).getRowAsParameters(i); System.out.println(row.valueAs("name", Trader.class) + " (" + row.valueAs("rank", String.class, "N/A") + ") has done " + row.valueAs("trades", Integer.class) + " trades"); } } @Then("the traders returned are: $tradersTable") public void theTradersReturnedAre(ExamplesTable tradersTable) { OutcomesTable outcomes = new OutcomesTable(); outcomes.addOutcome("traders", searchedTraders.toString(), equalTo(toTraders(tradersTable).toString())); outcomes.addOutcome("a success", "Any Value", equalTo("Any Value")); outcomes.verify(); } private List<Trader> toTraders(ExamplesTable table) { List<Trader> traders = new ArrayList<>(); List<Map<String, String>> rows = table.getRows(); for (Map<String, String> row : rows) { String name = row.get("name"); String rank = row.get("rank"); traders.add(getService().newTrader(name, rank)); } Collections.sort(traders); return traders; } @Given("the stock exchange $stockExchange") @Alias("the stock exchange <stockExchange>") public void theStockExchange(@Named("stockExchange") String stockExchange) { this.stockExchange = stockExchange; } @Given("the asset class $assetClass") @Alias("the asset class <assetClass>") public void theAssetClass(@Named("assetClass") String assetClass) { this.assetClass = assetClass; } @Given("a stock of symbol $symbol and a threshold of $threshold") @Alias("a stock of <symbol> and a <threshold>") // alias used with examples table public void aStock(@Named("symbol") String symbol, @Named("threshold") double threshold) { stock.set(getService().newStock(symbol, threshold)); } @When("the stock is traded at price $price") @Aliases(values = { "the stock is sold at price $price", "the stock is exchanged at price $price", "the stock is traded with <price>" }) // multiple aliases, one used with examples table public void theStockIsTraded(@Named("price") double price) { stock.get().tradeAt(price); } @Given("the alert status is $status") // shows that matching pattern need only be unique for step type public void theAlertStatusIsReset(@Named("status") String status) { if (AlertStatus.OFF.name().startsWith(status) && stock.get() != null) { stock.get().resetAlert(); } } @Then("the alert status is $status") @Alias("the trader is alerted with <status>") // alias used with examples table public void theAlertStatusIs(@Named("status") String status) { assertThat(stock.get().getStatus().name(), equalTo(status)); } @Then(value = "the alert status is currently $status", priority = 1) // prioritise over potential match with previous method public void theAlertStatusIsCurrently(@Named("status") String status) { assertThat(stock.get().getStatus().name(), equalTo(status)); } @When("the trader sells all stocks") public void theTraderSellsAllStocks() { trader.sellAllStocks(); } @Then("the trader is left with no stocks") public void theTraderIsLeftWithNoStocks() { assertThat(trader.getStocks().size(), equalTo(0)); } @Given("a trade type $tradeType") public void givenATradeType(TradeType type) { this.tradeType = type; } @Then("the current trade type is $type") public void thenTheCurrentTradeTypeIs(String type) { assertThat(this.tradeType.name(), equalTo(type)); } @Then("the list of trade types is $types") public void thenTheListTradeTypesIs(List<TradeType> types) { List<String> values = new ArrayList<>(); for (TradeType type : TradeType.values()) { values.add(type.name()); } assertThat(types.toString(), equalTo(values.toString())); } enum TradeType { BUY, SELL; } // Method used as dynamical parameter converter @AsParameterConverter public Trader retrieveTrader(String name) { for (Trader trader : traders) { if (trader.getName().equals(name)) { return trader; } } return mockTradePersister().retrieveTrader(name); } static TraderPersister mockTradePersister() { return new TraderPersister(new Trader("Mauro", asList(new Stock("STK1", 10.d)))); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.search; import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse.Clusters; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.core.search.action.AsyncSearchResponse; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.Supplier; import static org.elasticsearch.xpack.core.async.AsyncTaskIndexService.restoreResponseHeadersContext; /** * A mutable search response that allows to update and create partial response synchronously. * Synchronized methods ensure that updates of the content are blocked if another thread is * creating an async response concurrently. This limits the number of final reduction that can * run concurrently to 1 and ensures that we pause the search progress when an {@link AsyncSearchResponse} is built. */ class MutableSearchResponse { private final int totalShards; private final int skippedShards; private final Clusters clusters; private final AtomicArray<ShardSearchFailure> shardFailures; private final ThreadContext threadContext; private boolean isPartial; private int successfulShards; private TotalHits totalHits; /** * How we get the reduced aggs when {@link #finalResponse} isn't populated. * We default to returning no aggs, this {@code -> null}. We'll replace * this as we receive updates on the search progress listener. */ private Supplier<InternalAggregations> reducedAggsSource = () -> null; private int reducePhase; /** * The response produced by the search API. Once we receive it we stop * building our own {@linkplain SearchResponse}s when get async search * is called, and instead return this. */ private SearchResponse finalResponse; private ElasticsearchException failure; private Map<String, List<String>> responseHeaders; private boolean frozen; /** * Creates a new mutable search response. * * @param totalShards The number of shards that participate in the request, or -1 to indicate a failure. * @param skippedShards The number of skipped shards, or -1 to indicate a failure. * @param clusters The remote clusters statistics. * @param threadContext The thread context to retrieve the final response headers. */ MutableSearchResponse(int totalShards, int skippedShards, Clusters clusters, ThreadContext threadContext) { this.totalShards = totalShards; this.skippedShards = skippedShards; this.clusters = clusters; this.shardFailures = totalShards == -1 ? null : new AtomicArray<>(totalShards-skippedShards); this.isPartial = true; this.threadContext = threadContext; this.totalHits = new TotalHits(0L, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } /** * Updates the response with the result of a partial reduction. * @param reducedAggs is a strategy for producing the reduced aggs */ synchronized void updatePartialResponse(int successfulShards, TotalHits totalHits, Supplier<InternalAggregations> reducedAggs, int reducePhase) { failIfFrozen(); if (reducePhase < this.reducePhase) { // should never happen since partial response are updated under a lock // in the search phase controller throw new IllegalStateException("received partial response out of order: " + reducePhase + " < " + this.reducePhase); } //when we get partial results skipped shards are not included in the provided number of successful shards this.successfulShards = successfulShards + skippedShards; this.totalHits = totalHits; this.reducedAggsSource = reducedAggs; this.reducePhase = reducePhase; } /** * Updates the response with the final {@link SearchResponse} once the * search is complete. */ synchronized void updateFinalResponse(SearchResponse response) { failIfFrozen(); assert response.getTotalShards() == totalShards : "received number of total shards differs from the one " + "notified through onListShards"; assert response.getSkippedShards() == skippedShards : "received number of skipped shards differs from the one " + "notified through onListShards"; assert response.getFailedShards() == buildShardFailures().length : "number of tracked failures differs from failed shards"; // copy the response headers from the current context this.responseHeaders = threadContext.getResponseHeaders(); this.finalResponse = response; this.isPartial = false; this.frozen = true; } /** * Updates the response with a fatal failure. This method preserves the partial response * received from previous updates */ synchronized void updateWithFailure(ElasticsearchException exc) { failIfFrozen(); // copy the response headers from the current context this.responseHeaders = threadContext.getResponseHeaders(); //note that when search fails, we may have gotten partial results before the failure. In that case async // search will return an error plus the last partial results that were collected. this.isPartial = true; this.failure = exc; this.frozen = true; } /** * Adds a shard failure concurrently (non-blocking). */ void addShardFailure(int shardIndex, ShardSearchFailure failure) { synchronized (this) { failIfFrozen(); } shardFailures.set(shardIndex, failure); } private SearchResponse buildResponse(long taskStartTimeNanos, InternalAggregations reducedAggs) { InternalSearchResponse internal = new InternalSearchResponse( new SearchHits(SearchHits.EMPTY, totalHits, Float.NaN), reducedAggs, null, null, false, false, reducePhase); long tookInMillis = TimeValue.timeValueNanos(System.nanoTime() - taskStartTimeNanos).getMillis(); return new SearchResponse(internal, null, totalShards, successfulShards, skippedShards, tookInMillis, buildShardFailures(), clusters); } /** * Creates an {@link AsyncSearchResponse} based on the current state of the mutable response. * The final reduce of the aggregations is executed if needed (partial response). * This method is synchronized to ensure that we don't perform final reduces concurrently. * This method also restores the response headers in the current thread context when requested, if the final response is available. */ synchronized AsyncSearchResponse toAsyncSearchResponse(AsyncSearchTask task, long expirationTime, boolean restoreResponseHeaders) { if (restoreResponseHeaders && responseHeaders != null) { restoreResponseHeadersContext(threadContext, responseHeaders); } SearchResponse searchResponse; if (finalResponse != null) { // We have a final response, use it. searchResponse = finalResponse; } else if (clusters == null) { // An error occurred before we got the shard list searchResponse = null; } else { /* * Build the response, reducing aggs if we haven't already and * storing the result of the reduction so we won't have to reduce * the same aggregation results a second time if nothing has changed. * This does cost memory because we have a reference to the finally * reduced aggs sitting around which can't be GCed until we get an update. */ InternalAggregations reducedAggs = reducedAggsSource.get(); reducedAggsSource = () -> reducedAggs; searchResponse = buildResponse(task.getStartTimeNanos(), reducedAggs); } return new AsyncSearchResponse(task.getExecutionId().getEncoded(), searchResponse, failure, isPartial, frozen == false, task.getStartTime(), expirationTime); } synchronized AsyncSearchResponse toAsyncSearchResponse(AsyncSearchTask task, long expirationTime, ElasticsearchException reduceException) { if (this.failure != null) { reduceException.addSuppressed(this.failure); } return new AsyncSearchResponse(task.getExecutionId().getEncoded(), buildResponse(task.getStartTimeNanos(), null), reduceException, isPartial, frozen == false, task.getStartTime(), expirationTime); } private void failIfFrozen() { if (frozen) { throw new IllegalStateException("invalid update received after the completion of the request"); } } private ShardSearchFailure[] buildShardFailures() { if (shardFailures == null) { return ShardSearchFailure.EMPTY_ARRAY; } List<ShardSearchFailure> failures = new ArrayList<>(); for (int i = 0; i < shardFailures.length(); i++) { ShardSearchFailure failure = shardFailures.get(i); if (failure != null) { failures.add(failure); } } return failures.toArray(ShardSearchFailure[]::new); } }
package grapheditor_beta; import java.awt.BorderLayout; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.print.Printable; import java.awt.print.PrinterJob; import java.io.File; import java.util.Collection; import java.util.Iterator; import javax.swing.ImageIcon; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.SwingUtilities; /** * * @author John Gasparis */ public class GraphGUI extends JFrame { private GraphImpl graph = null; private JFrame frame; private JMenuBar bar; private JMenu fileMenu; /* *Creates the gui according to which class is passed in the constructor * */ public GraphGUI(final JFrame frame, GraphImpl graph) { super(); this.frame = frame; if (graph instanceof GraphEditor) { this.setTitle("Graph Editor"); this.graph = (GraphEditor) graph; } else { this.setTitle("Random Graph Editor"); this.graph = (RandomGraphEditor) graph; } createMenu(); add(graph.getGraphZoomScrollPane()); add(graph.getPanel(), BorderLayout.SOUTH); this.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent winEvt) { frame.setVisible(true); } }); setIconImage(Toolkit.getDefaultToolkit().getImage(this.getClass().getResource("images/icon.gif"))); } /* *Creates components which are related with menu * */ private void createMenu() { bar = new JMenuBar(); setJMenuBar(bar); fileMenu = new JMenu("File"); final JMenuItem findItem = new JMenuItem("Find"); JMenuItem newItem = new JMenuItem("New"); newItem.setMnemonic('N'); newItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Collection<Number> cEdges = graph.getGraph().getEdges(); Collection<Number> cVertices = graph.getGraph().getVertices(); Iterator<Number> iEdges = cEdges.iterator(); Iterator<Number> iVertices = cVertices.iterator(); Number[] num = new Number[cEdges.size()]; int i = 0; while (iEdges.hasNext()) { num[i] = iEdges.next(); i++; } for (int j = 0; j < num.length; j++) { graph.getGraph().removeEdge(num[j]); } num = new Number[cVertices.size()]; i = 0; while (iVertices.hasNext()) { num[i] = iVertices.next(); i++; } for (Number num1 : num) { graph.getGraph().removeVertex(num1); } graph.reset(); graph.getGraphZoomScrollPane().repaint(); graph.trasnformGraphDefault(); if (graph instanceof RandomGraphEditor) { GraphGUI.this.setVisible(false); RandomGraphEditor.create(frame, true, null, null, null); GraphGUI.this.dispose(); } } }); fileMenu.add(newItem); findItem.setMnemonic('F'); findItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { SwingUtilities.invokeLater(new Runnable() { DFSImpl dfs; @Override public void run() { if (graph instanceof GraphEditor) { dfs = new DFSImpl((GraphEditor) graph, null); } else { dfs = new DFSImpl(null, (RandomGraphEditor) graph); } } }); } }); fileMenu.add(findItem); fileMenu.addSeparator(); JMenuItem saveItem = new JMenuItem("Save As..."); saveItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser fileChooser = new JFileChooser(); int option = fileChooser.showSaveDialog(graph.getPanel()); if (option == JFileChooser.APPROVE_OPTION) { File file = fileChooser.getSelectedFile(); graph.writeImage(file); } } }); fileMenu.add(saveItem); fileMenu.addSeparator(); JMenuItem printItem = new JMenuItem("Print..."); printItem.setMnemonic('P'); printItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { PrinterJob printerJob = PrinterJob.getPrinterJob(); printerJob.setPrintable((Printable) graph); if (printerJob.printDialog()) { try { printerJob.print(); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Error while printing" , "Error" , JOptionPane.ERROR_MESSAGE , new ImageIcon("images/icon.gif")); } } } }); fileMenu.add(printItem); fileMenu.addSeparator(); JMenuItem exitItem = new JMenuItem("Exit"); exitItem.setMnemonic('E'); exitItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { GraphGUI.this.dispose(); frame.setVisible(true); } }); fileMenu.add(exitItem); bar.add(fileMenu); } public JMenu getFileMenu() { return fileMenu; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.memory; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.stats.GcMonitor; import io.airlift.units.DataSize; import io.trino.Session; import io.trino.execution.TaskId; import io.trino.execution.TaskStateMachine; import io.trino.memory.context.MemoryReservationHandler; import io.trino.memory.context.MemoryTrackingContext; import io.trino.operator.TaskContext; import io.trino.spi.QueryId; import io.trino.spiller.SpillSpaceTracker; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.OptionalInt; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.function.BiFunction; import java.util.function.BiPredicate; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static com.google.common.base.Verify.verifyNotNull; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static io.airlift.units.DataSize.succinctBytes; import static io.trino.ExceededMemoryLimitException.exceededLocalTotalMemoryLimit; import static io.trino.ExceededMemoryLimitException.exceededLocalUserMemoryLimit; import static io.trino.ExceededSpillLimitException.exceededPerQueryLocalLimit; import static io.trino.memory.context.AggregatedMemoryContext.newRootAggregatedMemoryContext; import static io.trino.operator.Operator.NOT_BLOCKED; import static java.lang.String.format; import static java.util.Map.Entry.comparingByValue; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; @ThreadSafe public class QueryContext { private static final long GUARANTEED_MEMORY = DataSize.of(1, MEGABYTE).toBytes(); private final QueryId queryId; private final GcMonitor gcMonitor; private final Executor notificationExecutor; private final ScheduledExecutorService yieldExecutor; private final long maxSpill; private final SpillSpaceTracker spillSpaceTracker; private final Map<TaskId, TaskContext> taskContexts = new ConcurrentHashMap<>(); @GuardedBy("this") private boolean resourceOverCommit; private volatile boolean memoryLimitsInitialized; // TODO: This field should be final. However, due to the way QueryContext is constructed the memory limit is not known in advance @GuardedBy("this") private long maxUserMemory; @GuardedBy("this") private long maxTotalMemory; private final MemoryTrackingContext queryMemoryContext; @GuardedBy("this") private MemoryPool memoryPool; @GuardedBy("this") private long spillUsed; public QueryContext( QueryId queryId, DataSize maxUserMemory, DataSize maxTotalMemory, MemoryPool memoryPool, GcMonitor gcMonitor, Executor notificationExecutor, ScheduledExecutorService yieldExecutor, DataSize maxSpill, SpillSpaceTracker spillSpaceTracker) { this.queryId = requireNonNull(queryId, "queryId is null"); this.maxUserMemory = requireNonNull(maxUserMemory, "maxUserMemory is null").toBytes(); this.maxTotalMemory = requireNonNull(maxTotalMemory, "maxTotalMemory is null").toBytes(); this.memoryPool = requireNonNull(memoryPool, "memoryPool is null"); this.gcMonitor = requireNonNull(gcMonitor, "gcMonitor is null"); this.notificationExecutor = requireNonNull(notificationExecutor, "notificationExecutor is null"); this.yieldExecutor = requireNonNull(yieldExecutor, "yieldExecutor is null"); this.maxSpill = requireNonNull(maxSpill, "maxSpill is null").toBytes(); this.spillSpaceTracker = requireNonNull(spillSpaceTracker, "spillSpaceTracker is null"); this.queryMemoryContext = new MemoryTrackingContext( newRootAggregatedMemoryContext(new QueryMemoryReservationHandler(this::updateUserMemory, this::tryUpdateUserMemory), GUARANTEED_MEMORY), newRootAggregatedMemoryContext(new QueryMemoryReservationHandler(this::updateRevocableMemory, this::tryReserveMemoryNotSupported), 0L), newRootAggregatedMemoryContext(new QueryMemoryReservationHandler(this::updateSystemMemory, this::tryReserveMemoryNotSupported), 0L)); } public boolean isMemoryLimitsInitialized() { return memoryLimitsInitialized; } // TODO: This method should be removed, and the correct limit set in the constructor. However, due to the way QueryContext is constructed the memory limit is not known in advance public synchronized void initializeMemoryLimits(boolean resourceOverCommit, long maxUserMemory, long maxTotalMemory) { checkArgument(maxUserMemory >= 0, "maxUserMemory must be >= 0, found: %s", maxUserMemory); checkArgument(maxTotalMemory >= 0, "maxTotalMemory must be >= 0, found: %s", maxTotalMemory); this.resourceOverCommit = resourceOverCommit; if (resourceOverCommit) { // Allow the query to use the entire pool. This way the worker will kill the query, if it uses the entire local memory pool. // The coordinator will kill the query if the cluster runs out of memory. this.maxUserMemory = memoryPool.getMaxBytes(); this.maxTotalMemory = memoryPool.getMaxBytes(); } else { this.maxUserMemory = maxUserMemory; this.maxTotalMemory = maxTotalMemory; } memoryLimitsInitialized = true; } @VisibleForTesting MemoryTrackingContext getQueryMemoryContext() { return queryMemoryContext; } @VisibleForTesting public synchronized long getMaxUserMemory() { return maxUserMemory; } @VisibleForTesting public synchronized long getMaxTotalMemory() { return maxTotalMemory; } public QueryId getQueryId() { return queryId; } /** * Deadlock is possible for concurrent user and system allocations when updateSystemMemory()/updateUserMemory * calls queryMemoryContext.getUserMemory()/queryMemoryContext.getSystemMemory(), respectively. * * @see this#updateSystemMemory(String, long) for details. */ private synchronized ListenableFuture<?> updateUserMemory(String allocationTag, long delta) { if (delta >= 0) { enforceUserMemoryLimit(queryMemoryContext.getUserMemory(), delta, maxUserMemory); return memoryPool.reserve(queryId, allocationTag, delta); } memoryPool.free(queryId, allocationTag, -delta); return NOT_BLOCKED; } //TODO Add tagging support for revocable memory reservations if needed private synchronized ListenableFuture<?> updateRevocableMemory(String allocationTag, long delta) { if (delta >= 0) { return memoryPool.reserveRevocable(queryId, delta); } memoryPool.freeRevocable(queryId, -delta); return NOT_BLOCKED; } private synchronized ListenableFuture<?> updateSystemMemory(String allocationTag, long delta) { // We call memoryPool.getQueryMemoryReservation(queryId) instead of calling queryMemoryContext.getUserMemory() to // calculate the total memory size. // // Calling the latter can result in a deadlock: // * A thread doing a user allocation will acquire locks in this order: // 1. monitor of queryMemoryContext.userAggregateMemoryContext // 2. monitor of this (QueryContext) // * The current thread doing a system allocation will acquire locks in this order: // 1. monitor of this (QueryContext) // 2. monitor of queryMemoryContext.userAggregateMemoryContext // Deadlock is possible for concurrent user and system allocations when updateSystemMemory()/updateUserMemory // calls queryMemoryContext.getUserMemory()/queryMemoryContext.getSystemMemory(), respectively. For concurrent // allocations of the same type (e.g., tryUpdateUserMemory/updateUserMemory) it is not possible as they share // the same RootAggregatedMemoryContext instance, and one of the threads will be blocked on the monitor of that // RootAggregatedMemoryContext instance even before calling the QueryContext methods (the monitors of // RootAggregatedMemoryContext instance and this will be acquired in the same order). long totalMemory = memoryPool.getQueryMemoryReservation(queryId); if (delta >= 0) { enforceTotalMemoryLimit(totalMemory, delta, maxTotalMemory); return memoryPool.reserve(queryId, allocationTag, delta); } memoryPool.free(queryId, allocationTag, -delta); return NOT_BLOCKED; } //TODO move spill tracking to the new memory tracking framework public synchronized ListenableFuture<?> reserveSpill(long bytes) { checkArgument(bytes >= 0, "bytes is negative"); if (spillUsed + bytes > maxSpill) { throw exceededPerQueryLocalLimit(succinctBytes(maxSpill)); } ListenableFuture<?> future = spillSpaceTracker.reserve(bytes); spillUsed += bytes; return future; } private synchronized boolean tryUpdateUserMemory(String allocationTag, long delta) { if (delta <= 0) { ListenableFuture<?> future = updateUserMemory(allocationTag, delta); // When delta == 0 and the pool is full the future can still not be done, // but, for negative deltas it must always be done. if (delta < 0) { verify(future.isDone(), "future should be done"); } return true; } if (queryMemoryContext.getUserMemory() + delta > maxUserMemory) { return false; } return memoryPool.tryReserve(queryId, allocationTag, delta); } public synchronized void freeSpill(long bytes) { checkArgument(spillUsed - bytes >= 0, "tried to free more memory than is reserved"); spillUsed -= bytes; spillSpaceTracker.free(bytes); } public synchronized void setMemoryPool(MemoryPool newMemoryPool) { // This method first acquires the monitor of this instance. // After that in this method if we acquire the monitors of the // user/revocable memory contexts in the queryMemoryContext instance // (say, by calling queryMemoryContext.getUserMemory()) it's possible // to have a deadlock. Because, the driver threads running the operators // will allocate memory concurrently through the child memory context -> ... -> // root memory context -> this.updateUserMemory() calls, and will acquire // the monitors of the user/revocable memory contexts in the queryMemoryContext instance // first, and then the monitor of this, which may cause deadlocks. // That's why instead of calling methods on queryMemoryContext to get the // user/revocable memory reservations, we call the MemoryPool to get the same // information. requireNonNull(newMemoryPool, "newMemoryPool is null"); if (memoryPool == newMemoryPool) { // Don't unblock our tasks and thrash the pools, if this is a no-op return; } ListenableFuture<?> future = memoryPool.moveQuery(queryId, newMemoryPool); memoryPool = newMemoryPool; if (resourceOverCommit) { // Reset the memory limits based on the new pool assignment maxUserMemory = memoryPool.getMaxBytes(); maxTotalMemory = memoryPool.getMaxBytes(); } future.addListener(() -> { // Unblock all the tasks, if they were waiting for memory, since we're in a new pool. taskContexts.values().forEach(TaskContext::moreMemoryAvailable); }, directExecutor()); } public synchronized MemoryPool getMemoryPool() { return memoryPool; } public TaskContext addTaskContext( TaskStateMachine taskStateMachine, Session session, Runnable notifyStatusChanged, boolean perOperatorCpuTimerEnabled, boolean cpuTimerEnabled, OptionalInt totalPartitions) { TaskContext taskContext = TaskContext.createTaskContext( this, taskStateMachine, gcMonitor, notificationExecutor, yieldExecutor, session, queryMemoryContext.newMemoryTrackingContext(), notifyStatusChanged, perOperatorCpuTimerEnabled, cpuTimerEnabled, totalPartitions); taskContexts.put(taskStateMachine.getTaskId(), taskContext); return taskContext; } public <C, R> R accept(QueryContextVisitor<C, R> visitor, C context) { return visitor.visitQueryContext(this, context); } public <C, R> List<R> acceptChildren(QueryContextVisitor<C, R> visitor, C context) { return taskContexts.values() .stream() .map(taskContext -> taskContext.accept(visitor, context)) .collect(toList()); } public TaskContext getTaskContextByTaskId(TaskId taskId) { TaskContext taskContext = taskContexts.get(taskId); return verifyNotNull(taskContext, "task does not exist"); } private static class QueryMemoryReservationHandler implements MemoryReservationHandler { private final BiFunction<String, Long, ListenableFuture<?>> reserveMemoryFunction; private final BiPredicate<String, Long> tryReserveMemoryFunction; public QueryMemoryReservationHandler( BiFunction<String, Long, ListenableFuture<?>> reserveMemoryFunction, BiPredicate<String, Long> tryReserveMemoryFunction) { this.reserveMemoryFunction = requireNonNull(reserveMemoryFunction, "reserveMemoryFunction is null"); this.tryReserveMemoryFunction = requireNonNull(tryReserveMemoryFunction, "tryReserveMemoryFunction is null"); } @Override public ListenableFuture<?> reserveMemory(String allocationTag, long delta) { return reserveMemoryFunction.apply(allocationTag, delta); } @Override public boolean tryReserveMemory(String allocationTag, long delta) { return tryReserveMemoryFunction.test(allocationTag, delta); } } private boolean tryReserveMemoryNotSupported(String allocationTag, long bytes) { throw new UnsupportedOperationException("tryReserveMemory is not supported"); } @GuardedBy("this") private void enforceUserMemoryLimit(long allocated, long delta, long maxMemory) { if (allocated + delta > maxMemory) { throw exceededLocalUserMemoryLimit(succinctBytes(maxMemory), getAdditionalFailureInfo(allocated, delta)); } } @GuardedBy("this") private void enforceTotalMemoryLimit(long allocated, long delta, long maxMemory) { if (allocated + delta > maxMemory) { throw exceededLocalTotalMemoryLimit(succinctBytes(maxMemory), getAdditionalFailureInfo(allocated, delta)); } } @GuardedBy("this") private String getAdditionalFailureInfo(long allocated, long delta) { Map<String, Long> queryAllocations = memoryPool.getTaggedMemoryAllocations().get(queryId); String additionalInfo = format("Allocated: %s, Delta: %s", succinctBytes(allocated), succinctBytes(delta)); // It's possible that a query tries allocating more than the available memory // failing immediately before any allocation of that query is tagged if (queryAllocations == null) { return additionalInfo; } String topConsumers = queryAllocations.entrySet().stream() .sorted(comparingByValue(Comparator.reverseOrder())) .limit(3) .filter(e -> e.getValue() >= 0) .collect(toImmutableMap(Entry::getKey, e -> succinctBytes(e.getValue()))) .toString(); return format("%s, Top Consumers: %s", additionalInfo, topConsumers); } }
/** * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.sizeof; import org.ehcache.sizeof.filters.SizeOfFilter; import org.ehcache.sizeof.util.WeakIdentityConcurrentMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.ref.SoftReference; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.IdentityHashMap; import java.util.Stack; /** * This will walk an object graph and let you execute some "function" along the way * * @author Alex Snaps */ final class ObjectGraphWalker { private static final Logger LOG = LoggerFactory.getLogger(ObjectGraphWalker.class); private static final String TC_INTERNAL_FIELD_PREFIX = "$__tc_"; private static final String VERBOSE_DEBUG_LOGGING = "net.sf.ehcache.sizeof.verboseDebugLogging"; private static final String CONTINUE_MESSAGE = "The configured limit of {0} object references was reached while attempting to calculate the size of the object graph." + " Severe performance degradation could occur if the sizing operation continues. This can be avoided by setting the CacheManger" + " or Cache <sizeOfPolicy> element's maxDepthExceededBehavior to \"abort\" or adding stop points with @IgnoreSizeOf annotations." + " If performance degradation is NOT an issue at the configured limit, raise the limit value using the CacheManager or Cache" + " <sizeOfPolicy> element's maxDepth attribute. For more information, see the Ehcache configuration documentation."; private static final String ABORT_MESSAGE = "The configured limit of {0} object references was reached while attempting to calculate the size of the object graph." + " This can be avoided by adding stop points with @IgnoreSizeOf annotations. Since the CacheManger or Cache <sizeOfPolicy>" + " element's maxDepthExceededBehavior is set to \"abort\", the sizing operation has stopped and the reported cache size is not" + " accurate. If performance degradation is NOT an issue at the configured limit, raise the limit value using the CacheManager" + " or Cache <sizeOfPolicy> element's maxDepth attribute. For more information, see the Ehcache configuration documentation."; private static final boolean USE_VERBOSE_DEBUG_LOGGING; private final WeakIdentityConcurrentMap<Class<?>, SoftReference<Collection<Field>>> fieldCache = new WeakIdentityConcurrentMap<Class<?>, SoftReference<Collection<Field>>>(); private final WeakIdentityConcurrentMap<Class<?>, Boolean> classCache = new WeakIdentityConcurrentMap<Class<?>, Boolean>(); private final SizeOfFilter sizeOfFilter; private final Visitor visitor; static { USE_VERBOSE_DEBUG_LOGGING = getVerboseSizeOfDebugLogging(); } /** * Constructor * * @param visitor the visitor to use * @param filter the filtering * @see Visitor * @see SizeOfFilter */ ObjectGraphWalker(Visitor visitor, SizeOfFilter filter) { this.visitor = visitor; this.sizeOfFilter = filter; } private static boolean getVerboseSizeOfDebugLogging() { String verboseString = System.getProperty(VERBOSE_DEBUG_LOGGING, "false").toLowerCase(); return verboseString.equals("true"); } /** * The visitor to execute the function on each node of the graph * This is only to be used for the sizing of an object graph in memory! */ static interface Visitor { /** * The visit method executed on each node * * @param object the reference at that node * @return a long for you to do things with... */ public long visit(Object object); } /** * Walk the graph and call into the "visitor" * * @param maxDepth maximum depth to traverse the object graph * @param abortWhenMaxDepthExceeded true if the object traversal should be aborted when the max depth is exceeded * @param root the roots of the objects (a shared graph will only be visited once) * @return the sum of all Visitor#visit returned values */ long walk(int maxDepth, boolean abortWhenMaxDepthExceeded, Object... root) { final StringBuilder traversalDebugMessage; if (USE_VERBOSE_DEBUG_LOGGING && LOG.isDebugEnabled()) { traversalDebugMessage = new StringBuilder(); } else { traversalDebugMessage = null; } long result = 0; boolean warned = false; Stack<Object> toVisit = new Stack<Object>(); IdentityHashMap<Object, Object> visited = new IdentityHashMap<Object, Object>(); if (root != null) { if (traversalDebugMessage != null) { traversalDebugMessage.append("visiting "); } for (Object object : root) { nullSafeAdd(toVisit, object); if (traversalDebugMessage != null && object != null) { traversalDebugMessage.append(object.getClass().getName()) .append("@").append(System.identityHashCode(object)).append(", "); } } if (traversalDebugMessage != null) { traversalDebugMessage.deleteCharAt(traversalDebugMessage.length() - 2).append("\n"); } } while (!toVisit.isEmpty()) { warned = checkMaxDepth(maxDepth, abortWhenMaxDepthExceeded, warned, visited); Object ref = toVisit.pop(); if (visited.containsKey(ref)) { continue; } Class<?> refClass = ref.getClass(); if (!isSharedFlyweight(ref) && shouldWalkClass(refClass)) { if (refClass.isArray() && !refClass.getComponentType().isPrimitive()) { for (int i = 0; i < Array.getLength(ref); i++) { nullSafeAdd(toVisit, Array.get(ref, i)); } } else { for (Field field : getFilteredFields(refClass)) { try { nullSafeAdd(toVisit, field.get(ref)); } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } } } long visitSize = calculateSize(ref); if (traversalDebugMessage != null) { traversalDebugMessage.append(" ").append(visitSize).append("b\t\t") .append(ref.getClass().getName()).append("@").append(System.identityHashCode(ref)).append("\n"); } result += visitSize; } else if (traversalDebugMessage != null) { traversalDebugMessage.append(" ignored\t") .append(ref.getClass().getName()).append("@").append(System.identityHashCode(ref)).append("\n"); } visited.put(ref, null); } if (traversalDebugMessage != null) { traversalDebugMessage.append("Total size: ").append(result).append(" bytes\n"); LOG.debug(traversalDebugMessage.toString()); } return result; } private long calculateSize(Object ref) { long visitSize; if (ref == null) { return 0; } else { visitSize = visitor.visit(ref); } return visitSize; } private boolean checkMaxDepth(final int maxDepth, final boolean abortWhenMaxDepthExceeded, boolean warned, final IdentityHashMap<Object, Object> visited) { if (visited.size() >= maxDepth) { if (abortWhenMaxDepthExceeded) { throw new IllegalArgumentException(MessageFormat.format(ABORT_MESSAGE, maxDepth)); } else if (!warned) { LOG.warn(MessageFormat.format(CONTINUE_MESSAGE, maxDepth)); warned = true; } } return warned; } /** * Returns the filtered fields for a particular type * * @param refClass the type * @return A collection of fields to be visited */ private Collection<Field> getFilteredFields(Class<?> refClass) { SoftReference<Collection<Field>> ref = fieldCache.get(refClass); Collection<Field> fieldList = ref != null ? ref.get() : null; if (fieldList != null) { return fieldList; } else { Collection<Field> result; result = sizeOfFilter.filterFields(refClass, getAllFields(refClass)); if (USE_VERBOSE_DEBUG_LOGGING && LOG.isDebugEnabled()) { for (Field field : result) { if (Modifier.isTransient(field.getModifiers())) { LOG.debug("SizeOf engine walking transient field '{}' of class {}", field.getName(), refClass.getName()); } } } fieldCache.put(refClass, new SoftReference<Collection<Field>>(result)); return result; } } private boolean shouldWalkClass(Class<?> refClass) { Boolean cached = classCache.get(refClass); if (cached == null) { cached = sizeOfFilter.filterClass(refClass); classCache.put(refClass, cached); } return cached; } private static void nullSafeAdd(final Stack<Object> toVisit, final Object o) { if (o != null) { toVisit.push(o); } } /** * Returns all non-primitive fields for the entire class hierarchy of a type * * @param refClass the type * @return all fields for that type */ private static Collection<Field> getAllFields(Class<?> refClass) { Collection<Field> fields = new ArrayList<Field>(); for (Class<?> klazz = refClass; klazz != null; klazz = klazz.getSuperclass()) { for (Field field : klazz.getDeclaredFields()) { if (!Modifier.isStatic(field.getModifiers()) && !field.getType().isPrimitive() && !field.getName().startsWith(TC_INTERNAL_FIELD_PREFIX)) { try { field.setAccessible(true); } catch (SecurityException e) { LOG.error("Security settings prevent Ehcache from accessing the subgraph beneath '{}'" + " - cache sizes may be underestimated as a result", field, e); continue; } fields.add(field); } } } return fields; } private static boolean isSharedFlyweight(Object obj) { FlyweightType type = FlyweightType.getFlyweightType(obj.getClass()); return type != null && type.isShared(obj); } }
package de.pfabulist.lindwurm.niotest.tests; import de.pfabulist.kleinod.nio.Filess; import de.pfabulist.lindwurm.niotest.tests.topics.*; import de.pfabulist.lindwurm.niotest.Utils; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystemException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileTime; import static de.pfabulist.kleinod.nio.PathIKWID.childGetParent; import static de.pfabulist.kleinod.text.Strings.getBytes; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * ** BEGIN LICENSE BLOCK ***** * BSD License (2 clause) * Copyright (c) 2006 - 2016, Stephan Pfab * All rights reserved. * <p> * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * <p> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL Stephan Pfab BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * **** END LICENSE BLOCK **** */ @SuppressWarnings( { "PMD.ExcessivePublicCount", "PMD.TooManyMethods" } ) public abstract class Tests02Dir extends Tests01NoContent { public Tests02Dir( FSDescription capa ) { super( capa ); } @Test public void testDefaultIsDir() throws Exception { assertThat( pathDefault() ).isDirectory(); } @Test public void testContentOfNonEmptyDir() throws IOException { assertThat( Files.list( getNonEmptyDir() ).findAny() ).isPresent(); // try( DirectoryStream<Path> stream = Files.newDirectoryStream( getNonEmptyDir() ) ) { // assertThat( Utils.getSize( stream )).isNotEqualTo( 0 ); // } } @Test public void testIteratorCanOnlyBeCalledOnceOnDirStream() throws IOException { try( DirectoryStream<Path> stream = Files.newDirectoryStream( getNonEmptyDir() ) ) { stream.iterator(); assertThatThrownBy( stream::iterator ).isInstanceOf( IllegalStateException.class ); } } @Test public void testDirStreamIteratorHasNoRemove() throws IOException { try( DirectoryStream<Path> stream = Files.newDirectoryStream( getNonEmptyDir() ) ) { assertThatThrownBy( () -> stream.iterator().remove() ).isInstanceOf( UnsupportedOperationException.class ); } } @Test public void testContentOfNonEmptyDirFiltered() throws IOException { Path dir = getNonEmptyDir(); int unfilteredSize; try( DirectoryStream<Path> stream = Files.newDirectoryStream( dir ) ) { unfilteredSize = Utils.getSize( stream ); } // filter out first kid DirectoryStream.Filter<Path> filter = new DirectoryStream.Filter<Path>() { boolean first = true; @Override public boolean accept( Path entry ) throws IOException { if( first ) { first = false; return false; } return true; } }; try( DirectoryStream<Path> stream = Files.newDirectoryStream( dir, filter ) ) { assertThat( Utils.getSize( stream ) ).isEqualTo( unfilteredSize - 1 ); } } @Test @Category( Writable.class ) public void testNewDirIsInParentsDirStream() throws IOException { Path dir = dirTA().resolve( nameB() ); Files.createDirectory( dir ); assertThat( Files.list( childGetParent( dir ) ).filter( c -> c.equals( dir ) ).findAny() ).isPresent(); // try( DirectoryStream<Path> kids = Files.newDirectoryStream( childGetParent( dir ) ) ) { // assertThat( dir, isIn( kids ) ); // } } @Test @Category( Writable.class ) public void testNewDirectoryExists() throws IOException { Files.createDirectory( absTA() ); assertThat( absTA() ).exists(); } @Test @Category( { Writable.class, WorkingDirectoryInPlaygroundTree.class } ) public void testNewRelDirectoryExists() throws IOException { Files.createDirectory( relTA() ); assertThat( relTA() ).exists(); } @Test @Category( Writable.class ) public void testCreateDirectoryTwiceThrows() throws IOException { Path newDir = absTA(); Files.createDirectory( newDir ); assertThatThrownBy( () -> Files.createDirectory( newDir ) ).isInstanceOf( FileAlreadyExistsException.class ); } @Test( expected = NoSuchFileException.class ) @Category( Writable.class ) public void testCreateDirectoryWithoutExistingParentFails() throws IOException { Files.createDirectory( absTAB() ); } @Test( expected = FileSystemException.class ) @Category( Writable.class ) public void testCreateDirectoryWithInFileFails() throws IOException { Files.createDirectory( fileTA().resolve( nameC() ) ); } @Test public void testRootisADir() throws IOException { assertThat( defaultRoot() ).isDirectory(); } @Test public void testDefaultExists() throws Exception { assertThat( pathDefault() ).exists(); } // todo defaultfs windows has e: not exists, and can create // @Test // public void testCreateRootFails() throws IOException { // for( Path root : FS.getRootDirectories()) { // assertThat( () -> {Files.createDirectory(root);}, throwsException( FileAlreadyExistsException.class )); // } // } @Test public void testNonExistingAbsolutePathIsNotADirectory() throws IOException { assertThat( Files.isDirectory( getNonExistingPath() ) ).isFalse(); } @Test public void testNonExistingAbsolutePathIsNotADirectoryEvenIfParent() throws IOException { assertThat( Files.isDirectory( childGetParent( getNonExistingPath().resolve( "child" ) ) ) ).isFalse(); } @Test public void testNonExistingRelativePathIsNotADirectory() throws IOException { assertThat( Files.isDirectory( getNonExistingPath() ) ).isFalse(); } @Test( expected = FileAlreadyExistsException.class ) @Category( Writable.class ) public void testCreateDirWithSamePathAsExistingFileFails() throws Exception { Files.createDirectory( fileTA() ); } @Test @Category( { SlowTest.class, Writable.class, Attributes.class, LastModifiedTime.class } ) public void testCreateDirSetsModifiedTimeOfParent() throws IOException, InterruptedException { Path dir = dirTA(); FileTime created = Files.getLastModifiedTime( dir ); waitForAttribute(); Files.createDirectory( dir.resolve( nameB() ) ); assertThat( Files.getLastModifiedTime( dir ) ).isGreaterThan( created ); } @Test @Category( { SlowTest.class, Writable.class, Attributes.class, LastAccessTime.class } ) public void testCreateDirSetsLastAccessTimeOfParent() throws IOException, InterruptedException { Path dir = dirTA(); FileTime before = Files.readAttributes( dir, BasicFileAttributes.class ).lastAccessTime(); waitForAttribute(); Files.createDirectory( dir.resolve( nameB() ) ); assertThat( Files.readAttributes( dir, BasicFileAttributes.class ).lastAccessTime() ).isGreaterThan( before ); } @Test @Category( { SlowTest.class, Writable.class, CreationTime.class } ) public void testCreateDirSetsCreationTime() throws IOException, InterruptedException { Path dir = absTA(); FileTime before = Files.getLastModifiedTime( childGetParent( dir ) ); waitForAttribute(); Files.createDirectory( dir ); BasicFileAttributes atti = Files.readAttributes( dir, BasicFileAttributes.class ); assertThat( atti.creationTime() ).isGreaterThan( before ); } @Test public void testKidsOfAbsoluteDirAreAbsolute() throws Exception { try( DirectoryStream<Path> kids = Files.newDirectoryStream( getNonEmptyDir() ) ) { for( Path kid : kids ) { assertThat( kid ).isAbsolute(); } } } @Test @Category( WorkingDirectoryInPlaygroundTree.class ) public void testKidsOfRelativeDirAreRelative() throws Exception { try( DirectoryStream<Path> kids = Files.newDirectoryStream( childGetParent( relativize( getNonEmptyDir() ) ) ) ) { for( Path kid : kids ) { assertThat( kid ).isRelative(); } } } // todo filter // @Test // public void testFilterOfRel() throws Exception { // TODO // Path abs = nonEmptyDir(); // Path rel = pathDefault().toAbsolutePath().relativize( abs ); // // try( DirectoryStream<Path> kids = Files.newDirectoryStream( rel ) ) { // for( Path kid : kids ) { // assertThat( kid, relative() ); // } // } // } @Test public void testKidsOfRelDirAreLikeTheResultOfResolve() throws Exception { Path dir = getNonEmptyDir(); try( DirectoryStream<Path> kids = Files.newDirectoryStream( dir ) ) { for( Path kid : kids ) { assertThat( kid ).isEqualTo( dir.resolve( kid.getFileName() ) ); } } } @Test @Category( { SlowTest.class, Writable.class, Attributes.class, LastAccessTime.class } ) @SuppressWarnings( "PMD.UnusedLocalVariable" ) public void testReadDirStreamSetsLastAccessTime() throws Exception { Path dir = childGetParent( fileTAB() ); FileTime before = Files.readAttributes( dir, BasicFileAttributes.class ).lastAccessTime(); waitForAttribute(); try( DirectoryStream<Path> kids = Files.newDirectoryStream( dir ) ) { for( Path kid : kids ) { } } assertThat( Files.readAttributes( dir, BasicFileAttributes.class ).lastAccessTime() ).isGreaterThan( before ); } @Test @Category( { SlowTest.class, Writable.class, Attributes.class, LastAccessTime.class } ) @SuppressWarnings( "PMD.UnusedLocalVariable" ) public void testReadEmptyDirStreamSetsLastAccessTime() throws Exception { Path dir = dirTA(); FileTime before = Files.readAttributes( dir, BasicFileAttributes.class ).lastAccessTime(); waitForAttribute(); try( DirectoryStream<Path> kids = Files.newDirectoryStream( dir ) ) { for( Path kid : kids ) { } } assertThat( Files.readAttributes( dir, BasicFileAttributes.class ).lastAccessTime() ).isGreaterThan( before ); } @Test @Category( { SlowTest.class, Writable.class, Attributes.class } ) // changed attis are only relevant in writable cases public void testReadDirStreamDoesNotSetParentsLastAccessTime() throws Exception { Path dir = dirTA(); FileTime before = Files.readAttributes( childGetParent( dir ), BasicFileAttributes.class ).lastAccessTime(); waitForAttribute(); Files.list( dir ).forEach( p -> { } ); assertThat( Files.readAttributes( childGetParent( dir ), BasicFileAttributes.class ).lastAccessTime() ).isEqualTo( before ); } // todo not fully defines // @Test // public void testGetIteratorOfClosedDirStream() throws Exception{ // Path file = fileTAB(); // fileTAC(); // 2nd kid // fileTAD(); // 3rd kid // // // try( DirectoryStream<Path> kids = Files.newDirectoryStream( file.getParent() ) ) { // kids.close(); // int count = 0; // for ( Path kid : kids ) { // count++; // } // // assertThat( count, lessThan(2) ); // } // } @Test @SuppressWarnings( "PMD.UnusedLocalVariable" ) public void testCloseDirStreamInTheMiddleOfIteration() throws Exception { Path dir = getNonEmptyDir(); int size; try( DirectoryStream<Path> kids = Files.newDirectoryStream( dir ) ) { size = Utils.getSize( kids ); } try( DirectoryStream<Path> kids = Files.newDirectoryStream( dir ) ) { int count = 0; for( Path kid : kids ) { count++; if( count == 1 ) { kids.close(); } } assertThat( count ).isLessThan( size ); } } // todo should that work on unix // or only the open part ? @Test public void testReadBytesFromDirectoryThrows() throws IOException { assertThatThrownBy( () -> Files.readAllBytes( dirTA() ) ).isInstanceOf( Exception.class ); } @Test public void testNewDirectoryStreamFromNonExistingDirThrows() throws IOException { assertThatThrownBy( () -> { try( DirectoryStream<Path> kids = Files.newDirectoryStream( getNonExistingPath() ) ) { } } ).isInstanceOf( NoSuchFileException.class ); } /* * ------------------------------------------------------------------------------ */ @SuppressFBWarnings() protected static byte[] CONTENT; @SuppressFBWarnings() protected static byte[] CONTENT_OTHER; @SuppressFBWarnings() protected static byte[] CONTENT_BIG; @SuppressFBWarnings() protected static byte[] CONTENT50; @BeforeClass @SuppressFBWarnings public static void beforeDir() { CONTENT = getBytes( "hi there" ); CONTENT_OTHER = getBytes( "what's up, huh, huh" ); String str = new String( Character.toChars( 0x10400 ) ); for( int i = 0; i < 12; i++ ) { str = str + str; } CONTENT_BIG = getBytes( str + "abcde" ); // not on 2^x bounderies // for( int i = 0; i < 20000; i++ ) { // CONTENT_BIG[ i ] = (byte) ( i ); // } CONTENT50 = new byte[ 50 ]; for( int i = 0; i < 50; i++ ) { CONTENT50[ i ] = (byte) ( i ); } } public Path absT() { Path ret = description.get( Path.class, "playground" ).resolve( testMethodName.getMethodName() ); Filess.createDirectories( ret ); return ret; } public Path absTA() { return absT().resolve( nameA() ); } public Path absTB() { return absT().resolve( nameB() ); } public Path absTC() { return absT().resolve( nameC() ); } public Path relTA() { Path abs = absTA(); return pathDefault().toAbsolutePath().relativize( abs ); } public Path absTAB() { return absTA().resolve( nameB() ); } public Path absTAC() { return absTA().resolve( nameC() ); } public Path dirTA() { Path ret = absTA(); Filess.createDirectories( ret ); return ret; } public Path dirTAB() { Path ret = absTAB(); Filess.createDirectories( ret ); return ret; } public Path dirTBB() { Path ret = absTB().resolve( nameB() ); Filess.createDirectories( ret ); return ret; } public Path dirTB() { Path ret = absTB(); Filess.createDirectories( ret ); return ret; } public Path fileTA() { Path ret = absTA(); if( !Files.exists( ret ) ) { Filess.write( ret, CONTENT ); } return ret; } public Path fileTB() { Path ret = absTB(); if( !Files.exists( ret ) ) { Filess.write( ret, CONTENT ); } return ret; } public Path fileTAB() { Path ret = dirTA().resolve( nameB() ); if( !Files.exists( ret ) ) { Filess.write( ret, CONTENT ); } return ret; } public Path relativize( Path path ) { // return path.absoluteGetRoot().resolve( nameE()).relativize( path ); return pathDefault().toAbsolutePath().relativize( path ); } public Path fileTAC() { Path ret = dirTA().resolve( nameC() ); if( !Files.exists( ret ) ) { Filess.write( ret, CONTENT ); } return ret; } public Path fileTAD() { Path ret = dirTA().resolve( nameD() ); if( !Files.exists( ret ) ) { Filess.write( ret, CONTENT ); } return ret; } public void waitForAttribute() { try { Object del = description.props.get( "attributeDelay" ); if( del != null ) { Thread.sleep( (Integer) del ); } else { Thread.sleep( 50 ); } } catch( InterruptedException e ) { } } public Path getNonEmptyDir() { if( !description.provides( Writable.class ) ) { return description.get( Path.class, "nonemptyDir" ); } Path dir = dirTBB(); Filess.write( dir.resolve( "one" ), CONTENT ); Filess.write( dir.resolve( "two" ), CONTENT ); return dir; } public Path getEmptyDir() { if( !description.provides( Writable.class ) ) { return description.get( Path.class, "emptyDir" ); } return dirTA(); } public Path getNonExistingPath() { if( !description.provides( Writable.class ) ) { return description.get( Path.class, "nonexisting" ); } return absTA().resolve( "notthere" ); } }
/** Copyright 2004 Juan Heyns. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY JUAN HEYNS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JUAN HEYNS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of Juan Heyns. */ package net.sourceforge.jdatepicker; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.Calendar; import java.util.HashSet; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; /** * Created 18 April 2010 * Updated 26 April 2010 * * @author Juan Heyns * * @param <T> */ public abstract class AbstractDateModel<T> implements DateModel<T> { private boolean selected; private Calendar calendarValue; private HashSet<ChangeListener> changeListeners; private HashSet<PropertyChangeListener> propertyChangeListeners; protected AbstractDateModel() { changeListeners = new HashSet<ChangeListener>(); propertyChangeListeners = new HashSet<PropertyChangeListener>(); selected = false; calendarValue = Calendar.getInstance(); } public synchronized void addChangeListener(ChangeListener changeListener) { changeListeners.add(changeListener); } public synchronized void removeChangeListener(ChangeListener changeListener) { changeListeners.remove(changeListener); } protected synchronized void fireChangeEvent() { for (ChangeListener changeListener : changeListeners) { changeListener.stateChanged(new ChangeEvent(this)); } } public synchronized void addPropertyChangeListener(PropertyChangeListener listener) { propertyChangeListeners.add(listener); } public synchronized void removePropertyChangeListener(PropertyChangeListener listener) { propertyChangeListeners.remove(listener); } protected synchronized void firePropertyChange(String propertyName, Object oldValue, Object newValue) { if (oldValue != null && newValue != null && oldValue.equals(newValue)) { return; } for (PropertyChangeListener listener : propertyChangeListeners) { listener.propertyChange(new PropertyChangeEvent(this, propertyName, oldValue, newValue)); } } public int getDay() { return calendarValue.get(Calendar.DATE); } public int getMonth() { return calendarValue.get(Calendar.MONTH); } public int getYear() { return calendarValue.get(Calendar.YEAR); } public T getValue() { if (!selected) { return null; } T value = fromCalendar(calendarValue); return value; } public void setDay(int day) { int oldDayValue = this.calendarValue.get(Calendar.DATE); T oldValue = getValue(); calendarValue.set(Calendar.DATE, day); fireChangeEvent(); firePropertyChange("day", oldDayValue, this.calendarValue.get(Calendar.DATE)); firePropertyChange("value", oldValue, getValue()); } public void addDay(int add) { int oldDayValue = this.calendarValue.get(Calendar.DATE); T oldValue = getValue(); calendarValue.add(Calendar.DATE, add); fireChangeEvent(); firePropertyChange("day", oldDayValue, this.calendarValue.get(Calendar.DATE)); firePropertyChange("value", oldValue, getValue()); } public void setMonth(int month) { int oldMonthValue = this.calendarValue.get(Calendar.MONTH); T oldValue = getValue(); calendarValue.set(Calendar.MONTH, month); fireChangeEvent(); firePropertyChange("month", oldMonthValue, this.calendarValue.get(Calendar.MONTH)); firePropertyChange("value", oldValue, getValue()); } public void addMonth(int add) { int oldMonthValue = this.calendarValue.get(Calendar.MONTH); T oldValue = getValue(); calendarValue.add(Calendar.MONTH, add); fireChangeEvent(); firePropertyChange("month", oldMonthValue, this.calendarValue.get(Calendar.MONTH)); firePropertyChange("value", oldValue, getValue()); } public void setYear(int year) { int oldYearValue = this.calendarValue.get(Calendar.YEAR); T oldValue = getValue(); calendarValue.set(Calendar.YEAR, year); fireChangeEvent(); firePropertyChange("year", oldYearValue, this.calendarValue.get(Calendar.YEAR)); firePropertyChange("value", oldValue, getValue()); } public void addYear(int add) { int oldYearValue = this.calendarValue.get(Calendar.YEAR); T oldValue = getValue(); calendarValue.add(Calendar.YEAR, add); fireChangeEvent(); firePropertyChange("year", oldYearValue, this.calendarValue.get(Calendar.YEAR)); firePropertyChange("value", oldValue, getValue()); } public void setValue(T value) { int oldYearValue = this.calendarValue.get(Calendar.YEAR); int oldMonthValue = this.calendarValue.get(Calendar.MONTH); int oldDayValue = this.calendarValue.get(Calendar.DATE); T oldValue = getValue(); boolean oldSelectedValue = isSelected(); if (value != null) { this.calendarValue = toCalendar(value); setToMidnight(); selected = true; } else { selected = false; } fireChangeEvent(); firePropertyChange("year", oldYearValue, this.calendarValue.get(Calendar.YEAR)); firePropertyChange("month", oldMonthValue, this.calendarValue.get(Calendar.MONTH)); firePropertyChange("day", oldDayValue, this.calendarValue.get(Calendar.DATE)); firePropertyChange("value", oldValue, getValue()); firePropertyChange("selected", oldSelectedValue, this.selected); } public void setDate(int year, int month, int day) { int oldYearValue = this.calendarValue.get(Calendar.YEAR); int oldMonthValue = this.calendarValue.get(Calendar.MONTH); int oldDayValue = this.calendarValue.get(Calendar.DATE); T oldValue = getValue(); calendarValue.set(year, month, day); fireChangeEvent(); firePropertyChange("year", oldYearValue, this.calendarValue.get(Calendar.YEAR)); firePropertyChange("month", oldMonthValue, this.calendarValue.get(Calendar.MONTH)); firePropertyChange("day", oldDayValue, this.calendarValue.get(Calendar.DATE)); firePropertyChange("value", oldValue, getValue()); } public boolean isSelected() { return selected; } public void setSelected(boolean selected) { T oldValue = getValue(); boolean oldSelectedValue = isSelected(); this.selected = selected; fireChangeEvent(); firePropertyChange("value", oldValue, getValue()); firePropertyChange("selected", oldSelectedValue, this.selected); } private void setToMidnight() { calendarValue.set(Calendar.HOUR, 0); calendarValue.set(Calendar.MINUTE, 0); calendarValue.set(Calendar.SECOND, 0); calendarValue.set(Calendar.MILLISECOND, 0); } protected abstract Calendar toCalendar(T from); protected abstract T fromCalendar(Calendar from); }
/* * Copyright 2012-2018 Chronicle Map Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.map; import net.openhft.chronicle.core.OS; import net.openhft.chronicle.core.util.Time; import net.openhft.chronicle.hash.Data; import net.openhft.chronicle.hash.serialization.ListMarshaller; import net.openhft.chronicle.hash.serialization.impl.CharSequenceBytesReader; import net.openhft.chronicle.hash.serialization.impl.CharSequenceBytesWriter; import net.openhft.chronicle.set.*; import org.junit.Test; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; import static java.util.concurrent.TimeUnit.MINUTES; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; public class Issue63Test { // Right now no corresponding "knownUsers" object private ChronicleMap<CharSequence, List<CharSequence>> knownItems; private ChronicleMap<CharSequence, float[]> xVectors; private ChronicleSet<CharSequence> xRecentIDs; private ChronicleMap<CharSequence, float[]> yVectors; private ChronicleSet<CharSequence> yRecentIDs; public static void main(String[] args) throws Exception { new Issue63Test().testChronicleMap(); } private static void putIntoMapAndRecentSet( ChronicleMap<CharSequence, float[]> map, ChronicleSet<CharSequence> recentSet, String key, float[] value) { try (ExternalMapQueryContext<CharSequence, float[], ?> c = map.queryContext(key); ExternalSetQueryContext<CharSequence, ?> setC = recentSet.queryContext(key)) { if (c.writeLock().tryLock(1, MINUTES) && setC.writeLock().tryLock(1, MINUTES)) { putNoReturn(c, value); addNoReturn(setC); } else { throw new RuntimeException("Dead lock"); } } catch (InterruptedException e) { throw new RuntimeException(e); } } private static <K> void addNoReturn(SetQueryContext<K, ?> c) { SetAbsentEntry<K> setAbsentEntry = c.absentEntry(); if (setAbsentEntry != null) { c.insert(setAbsentEntry); } } private static <K, V> void putNoReturn(MapQueryContext<K, V, ?> c, V value) { MapEntry<K, V> entry = c.entry(); Data<V> newValue = c.wrapValueAsData(value); if (entry != null) { c.replaceValue(entry, newValue); } else { MapAbsentEntry<K, V> absentEntry = c.absentEntry(); assert absentEntry != null; c.insert(absentEntry, newValue); } } @Test public void issue63test() throws IOException { Path path = Paths.get(OS.getTarget() + "/test-vectors1-" + Time.uniqueId() + ".dat"); if (Files.exists(path)) Files.delete(path); File mapFile = path.toFile(); mapFile.deleteOnExit(); ChronicleMap<CharSequence, float[]> xVectors = ChronicleMap .of(CharSequence.class, float[].class) // use the actual UUID size .constantKeySizeBySample("2B6EC73CD63ACA5D93F4D5A710AD9CFE") .constantValueSizeBySample(new float[100]) .putReturnsNull(true) .entries(10) .createPersistedTo(mapFile); float[] exp1 = new float[]{ (float) -0.4200737, (float) -0.5428019, (float) 0.25542524, (float) -0.10631648, (float) 0.12206168, (float) 0.0411969, (float) 0.9899967, (float) 0.15887073, (float) -0.09775953, (float) 0.21812996, (float) -0.2724478, (float) 1.1872392, (float) -0.57449555, (float) 0.5036392, (float) 0.1658725, (float) 0.26468855, (float) -0.3454932, (float) 0.61344844, (float) -0.058357887, (float) 0.41589612, (float) -0.30034602, (float) -0.065557495, (float) -0.5450994, (float) 0.24787773, (float) -0.49933347, (float) -0.34362262, (float) -0.116148725, (float) 0.1267731, (float) -0.021314947, (float) 0.4289211, (float) 0.018796312, (float) 1.1027592, (float) 0.26406515, (float) -0.364442, (float) 0.032301463, (float) 0.7497238, (float) 0.022618806, (float) 0.44369924, (float) -0.3347779, (float) -0.21492186, (float) -0.16348012, (float) -0.07863602, (float) 0.22218524, (float) 0.13798094, (float) 0.9739758, (float) 0.18799895, (float) 0.16804655, (float) -0.94723654, (float) -0.09069447, (float) 1.0777866, (float) 0.45763463, (float) -0.99949086, (float) 0.1130747, (float) 1.1800445, (float) -0.7469727, (float) -1.480476, (float) 0.21458353, (float) 0.5420289, (float) 0.44423282, (float) -0.73524255, (float) -0.86806494, (float) 0.77911025, (float) 0.43587336, (float) 0.45608798, (float) -0.52584565, (float) 0.5979028, (float) 0.18747452, (float) -0.9211639, (float) 0.2969087, (float) -0.17334144, (float) -0.30227816, (float) 0.6624411, (float) -1.445531, (float) 0.068452656, (float) -0.54010916, (float) 0.7997881, (float) -1.1808084, (float) 1.0036258, (float) 0.23763403, (float) -0.95869386, (float) 0.2150584, (float) 0.16237195, (float) 0.35550624, (float) -0.59370506, (float) 0.977463, (float) -0.14227587, (float) -1.1346477, (float) -0.29077065, (float) -0.7924145, (float) -0.05505234, (float) -0.4519053, (float) 0.8662279, (float) 0.056166444, (float) -0.6824282, (float) -0.28487095, (float) -0.28058794, (float) -0.868858, (float) 0.4946002, (float) 0.61442167, (float) 0.70633507 }; float[] exp2 = new float[]{ (float) -0.0043417793, (float) -0.004025369, (float) 1.8009785E-4, (float) 5.522854E-4, (float) -2.9725596E-4, (float) 0.0038219264, (float) 0.0057955547, (float) -0.0036915164, (float) 1.2905941E-5, (float) -0.0012608414, (float) 0.0075167217, (float) 1.2714228E-4, (float) 0.004510221, (float) -0.0030373763, (float) -0.0033150043, (float) -0.0027220408, (float) 0.0049406015, (float) 0.007475855, (float) -0.0039889063, (float) 5.387217E-4, (float) 3.014746E-4, (float) -0.0025138916, (float) -0.0014927724, (float) 0.0033432362, (float) 0.0027196375, (float) -0.001453709, (float) -0.004362245, (float) 0.0062709767, (float) 5.681349E-4, (float) 2.963205E-4, (float) 0.002127562, (float) -0.0025758513, (float) -0.0015946038, (float) 0.0020683268, (float) 0.004608029, (float) -0.006912731, (float) -0.003569094, (float) 0.0029314745, (float) -0.0044829296, (float) -0.004087928, (float) -3.7728698E-4, (float) -0.0040272907, (float) -0.006466153, (float) 2.1587547E-4, (float) -4.334211E-5, (float) 0.0013268286, (float) -1.1723964E-4, (float) 0.0017377065, (float) -0.009606785, (float) -0.0059685633, (float) 0.0061167465, (float) 0.00976628, (float) 0.0045020734, (float) 0.0072684726, (float) -0.002317661, (float) 0.0030898168, (float) 0.0013212592, (float) 0.0017718632, (float) 0.002785933, (float) 4.135881E-4, (float) -0.007407679, (float) -0.008016254, (float) -0.0015525677, (float) -5.22596E-4, (float) 0.003450544, (float) -1.4363142E-4, (float) -0.0055779675, (float) -0.002204401, (float) 3.5834382E-4, (float) -0.0043447977, (float) 0.0052861, (float) 0.0024472543, (float) 0.0019035664, (float) -0.0010579216, (float) 0.008568893, (float) -0.0025444124, (float) 0.0041700895, (float) 0.002440465, (float) -9.898118E-4, (float) -0.004972163, (float) 0.00445475, (float) 0.0028563882, (float) -6.568626E-4, (float) 0.0019806502, (float) 0.0021152704, (float) -8.9459366E-4, (float) -5.853446E-4, (float) 0.006775423, (float) -6.2033796E-5, (float) -0.0016326059, (float) 0.0028676696, (float) -0.0020935084, (float) 0.0012473571, (float) -0.00658647, (float) -2.9175522E-4, (float) -0.004172817, (float) -9.5688103E-4, (float) 0.0029572574, (float) 0.0013865299, (float) -0.001356384 }; String key1 = "A2E2CD3EEFF31AE7A2EC455D2D23F8B2"; String key2 = "28C711B859926E05576CAF5084B4D66C"; xVectors.put(key1, exp1); xVectors.put(key2, exp2); xVectors.close(); ChronicleMap<CharSequence, float[]> xVectors2 = ChronicleMap .of(CharSequence.class, float[].class) // use the actual UUID size .constantKeySizeBySample("2B6EC73CD63ACA5D93F4D5A710AD9CFE") .constantValueSizeBySample(new float[100]) .entries(10) .putReturnsNull(true) .recoverPersistedTo(mapFile, true); assertArrayEquals(exp1, xVectors2.get(key1), 0.0f); assertArrayEquals(exp2, xVectors2.get(key2), 0.0f); } void testChronicleMap() throws IOException { int num = 1_000_000; testChronicleMap(OS.getTarget(), num, num); ThreadLocalRandom random = ThreadLocalRandom.current(); for (int i = 0; i < num; i++) { String id = UUID.randomUUID().toString().substring(0, 32); setUserVector(id, new float[random.nextInt(50, 150)]); String id2 = UUID.randomUUID().toString().substring(0, 9); setItemVector(id2, new float[random.nextInt(50, 150)]); ArrayList<CharSequence> items = new ArrayList<>(); for (int j = 0; j < random.nextInt(50, 150); j++) { items.add("average sized known item"); } addKnownItems(knownItems, id, items); } } void testChronicleMap(String persistToDir, int numXIDs, int numYIDs) throws IOException { if (!Files.exists(Paths.get(persistToDir))) Files.createDirectory(Paths.get(persistToDir)); Path knownItemsPath = Paths.get(persistToDir + "/I-known-items.dat"); //System.err.println("Loading knownItems"); ArrayList<CharSequence> averageKnownItems = new ArrayList<>(); for (int i = 0; i < 100; i++) { averageKnownItems.add("average sized known item"); } ChronicleMapBuilder<CharSequence, List<CharSequence>> knownItemsBuilder = ChronicleMap .of(CharSequence.class, (Class<List<CharSequence>>) ((Class) List.class)) .averageKey("2B6EC73CD63ACA5D93F4D5A710AD9CFE") .averageValue(averageKnownItems) .valueMarshaller(ListMarshaller.of( CharSequenceBytesReader.INSTANCE, CharSequenceBytesWriter.INSTANCE)) .entries(numXIDs) .maxBloatFactor(5.0) .putReturnsNull(true); if (Files.exists(knownItemsPath)) { knownItems = knownItemsBuilder.recoverPersistedTo(knownItemsPath.toFile(), true); } else { knownItems = knownItemsBuilder.createPersistedTo(knownItemsPath.toFile()); } //System.err.println("Loading xVectors"); Path xVectorsPath = Paths.get(persistToDir + "/" + "X-vectors.dat"); ChronicleMapBuilder<CharSequence, float[]> xVectorsBuilder = ChronicleMap .of(CharSequence.class, float[].class) .averageKey("2B6EC73CD63ACA5D93F4D5A710AD9CFE") //use the actual UUID size .averageValue(new float[100]) .putReturnsNull(true) .entries(numXIDs); if (Files.exists(xVectorsPath)) { xVectors = xVectorsBuilder.recoverPersistedTo(xVectorsPath.toFile(), true); } else { xVectors = xVectorsBuilder.createPersistedTo(xVectorsPath.toFile()); } //System.err.println("Loading xRecentIds"); Path xRecentIDsPath = Paths.get(persistToDir + "/" + "X-recent-ids.dat"); ChronicleSetBuilder<CharSequence> xRecentBuilder = ChronicleSet .of(CharSequence.class) .entries(numXIDs) .averageKey("2B6EC73CD63ACA5D93F4D5A710AD9CFE"); //use the actual UUID size if (Files.exists(xRecentIDsPath)) { xRecentIDs = xRecentBuilder.recoverPersistedTo(xRecentIDsPath.toFile(), true); } else { xRecentIDs = xRecentBuilder.createPersistedTo(xRecentIDsPath.toFile()); } //System.err.println("Loading yVectors"); Path yVectorsPath = Paths.get(persistToDir + "/" + "Y-vectors.dat"); ChronicleMapBuilder<CharSequence, float[]> yVectorsBuilder = ChronicleMap .of(CharSequence.class, float[].class) .averageKey("198321433") .averageValue(new float[100]) .putReturnsNull(true) .entries(numYIDs); if (Files.exists(yVectorsPath)) { yVectors = yVectorsBuilder.recoverPersistedTo(yVectorsPath.toFile(), true); } else { yVectors = yVectorsBuilder.createPersistedTo(yVectorsPath.toFile()); } //System.err.println("Loading yRecentIDs"); Path yRecentIDsPath = Paths.get(persistToDir + "/" + "Y-recent-ids.dat"); ChronicleSetBuilder<CharSequence> yRecentBuilder = ChronicleSet .of(CharSequence.class) .averageKey("198321433") .entries(numYIDs); if (Files.exists(yRecentIDsPath)) { yRecentIDs = yRecentBuilder.recoverPersistedTo(yRecentIDsPath.toFile(), true); } else { yRecentIDs = yRecentBuilder.createPersistedTo(yRecentIDsPath.toFile()); } } public void setUserVector(String id, float[] arr) { putIntoMapAndRecentSet(xVectors, xRecentIDs, id, arr); } public void setItemVector(String id, float[] arr) { putIntoMapAndRecentSet(yVectors, yRecentIDs, id, arr); } public void addKnownItems(ChronicleMap<CharSequence, List<CharSequence>> knownItems, String id, List<CharSequence> items) { try (ExternalMapQueryContext<CharSequence, List<CharSequence>, ?> c = knownItems.queryContext(id)) { if (c.writeLock().tryLock(1, MINUTES)) { putNoReturn(c, items); } else { throw new RuntimeException("Dead lock"); } } catch (InterruptedException e) { throw new RuntimeException(e); } } @Test public void testKnownItems() throws IOException { ArrayList<CharSequence> averageKnownItems = new ArrayList<>(); for (int i = 0; i < 100; i++) { averageKnownItems.add("average sized known item"); } Path knownItemsPath = Paths.get( OS.getTarget() + "/test-vectors2.dat"); Files.deleteIfExists(knownItemsPath); ChronicleMap<CharSequence, List<CharSequence>> knownItems; ChronicleMapBuilder<CharSequence, List<CharSequence>> knownItemsBuilder = ChronicleMap .of(CharSequence.class, (Class<List<CharSequence>>) ((Class) List.class)) .averageKey("2B6EC73CD63ACA5D93F4D5A710AD9CFE") .averageValue(averageKnownItems) .valueMarshaller(new ListMarshaller<>( CharSequenceBytesReader.INSTANCE, CharSequenceBytesWriter.INSTANCE)) .entries(20) .maxBloatFactor(5.0) .putReturnsNull(true); File mapFile = knownItemsPath.toFile(); mapFile.deleteOnExit(); knownItems = knownItemsBuilder.createPersistedTo(mapFile); ArrayList<CharSequence> ids = new ArrayList<>(); for (int i = 0; i < 5; i++) { String id = UUID.randomUUID().toString(); ids.add(id); addKnownItems(knownItems, id, averageKnownItems); } knownItems.close(); final ChronicleMap<CharSequence, List<CharSequence>> knownItems2 = knownItemsBuilder.recoverPersistedTo(mapFile, true); assertEquals(5, knownItems2.size()); /* ids.forEach((id) -> { System.out.println(knownItems2.get(id.subSequence(0, id.length()))); });*/ // knownItems2.forEach((id, list) -> { // System.out.println(id + " : " + String.join(",", list)); // }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.query; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.QueryEntity; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.processors.cache.index.AbstractIndexingCommonTest; import org.apache.ignite.testframework.GridTestUtils; import org.junit.Test; /** * Tests for GROUP_CONCAT aggregate function in not collocated mode. */ @SuppressWarnings("unchecked") public class IgniteSqlGroupConcatNotCollocatedTest extends AbstractIndexingCommonTest { /** */ private static final int CLIENT = 7; /** */ private static final String CACHE_NAME = "cache"; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setCacheConfiguration( new CacheConfiguration(CACHE_NAME) .setAffinity(new RendezvousAffinityFunction().setPartitions(8)) .setQueryEntities(Collections.singletonList(new QueryEntity(Key.class, Value.class)))); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGridsMultiThreaded(3, false); startClientGrid(CLIENT); IgniteCache c = grid(CLIENT).cache(CACHE_NAME); int k = 0; for (int grp = 1; grp < 7; ++grp) { for (int i = 0; i < grp; ++i) { c.put(new Key(k, grp), new Value(k, Character.toString((char)('A' + k)))); k++; } } } /** * */ @Test public void testGroupConcatSimple() { IgniteCache c = ignite(CLIENT).cache(CACHE_NAME); List<List<Object>> res = c.query( new SqlFieldsQuery("select grp, GROUP_CONCAT(str0) from Value group by grp")).getAll(); for (List<Object> row : res) { int grp = (int)row.get(0); String str = (String)row.get(1); for (int i = 0; i < grp; ++i) { String s = "" + (char)('A' + i + (grp - 1) * grp / 2); assertTrue("Invalid group_concat result: string doesn't contain value: " + "[str=" + str + ", val=" + s, str.contains(s)); } } } /** * */ @Test public void testGroupConcatSeparator() { IgniteCache c = ignite(CLIENT).cache(CACHE_NAME); List<List<Object>> res = c.query( new SqlFieldsQuery("select grp, GROUP_CONCAT(str0 SEPARATOR '.') from Value group by grp")).getAll(); List<List<Object>> expRes = Arrays.asList( Arrays.asList(1, "A"), Arrays.asList(2, "C.B"), Arrays.asList(3, "E.D.F"), Arrays.asList(4, "J.G.I.H"), Arrays.asList(5, "O.L.N.K.M"), Arrays.asList(6, "Q.S.U.P.R.T")); assertEquals(res.size(), expRes.size()); for (int i = 0; i < res.size(); i++) assertEqualsCollections(expRes.get(i), res.get(i)); } /** * */ @Test public void testGroupConcatCountDistinct() { IgniteCache c = ignite(CLIENT).cache(CACHE_NAME); List<List<Object>> res = c.query( new SqlFieldsQuery("select count(distinct str0), group_concat(str0) from Value group by grp")).getAll(); for (List<Object> row : res) { long cnt = (long)row.get(0); String str = (String)row.get(1); for (int i = 0; i < cnt; ++i) { String s = "" + (char)('A' + i + (cnt - 1) * cnt / 2); assertTrue("Invalid group_concat result: string doesn't contain value: " + "[str=" + str + ", val=" + s, str.contains(s)); } } } /** * */ @Test public void testGroupConcatDistributedException() { final IgniteCache c = ignite(CLIENT).cache(CACHE_NAME); GridTestUtils.assertThrowsAnyCause(log, new Callable<Object>() { @Override public Object call() { c.query(new SqlFieldsQuery("select grp, GROUP_CONCAT(str0 ORDER BY str0) " + "from Value group by grp")).getAll(); return null; } }, IgniteSQLException.class, "Clauses DISTINCT and ORDER BY are unsupported for GROUP_CONCAT " + "for not collocated data"); } /** * */ public static class Key { /** */ @QuerySqlField private int id; /** */ @QuerySqlField private int grp; /** * @param id Id. * @param grp Group. */ public Key(int id, int grp) { this.id = id; this.grp = grp; } } /** * */ public static class Value { /** */ @QuerySqlField private String str0; /** */ @QuerySqlField private String str1; /** */ @QuerySqlField private String strId; /** * @param id Id. * @param str String value. */ public Value(int id, String str) { str0 = str; str1 = str + "_1"; strId = "id#" + id; } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.util.io; import com.intellij.ReviseWhenPortedToJDK; import com.intellij.openapi.diagnostic.LoggerRt; import com.intellij.openapi.util.SystemInfoRt; import com.intellij.openapi.util.text.StringUtilRt; import com.intellij.util.ArrayUtilRt; import com.intellij.util.Consumer; import org.jetbrains.annotations.*; import java.io.*; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.URI; import java.net.URISyntaxException; import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; /** * A stripped-down version of {@link com.intellij.openapi.util.io.FileUtil}. * Intended to use by external (out-of-IDE-process) runners and helpers, so it should not contain any library dependencies. */ public class FileUtilRt { private static final int KILOBYTE = 1024; private static final int DEFAULT_INTELLISENSE_LIMIT = 2500 * KILOBYTE; public static final int MEGABYTE = KILOBYTE * KILOBYTE; public static final int LARGE_FOR_CONTENT_LOADING = Math.max(20 * MEGABYTE, Math.max(getUserFileSizeLimit(), getUserContentLoadLimit())); public static final int LARGE_FILE_PREVIEW_SIZE = Math.min(getLargeFilePreviewSize(), LARGE_FOR_CONTENT_LOADING); private static final int MAX_FILE_IO_ATTEMPTS = 10; private static final boolean USE_FILE_CHANNELS = "true".equalsIgnoreCase(System.getProperty("idea.fs.useChannels")); private static String ourCanonicalTempPathCache; public static boolean isJarOrZip(@NotNull File file) { return isJarOrZip(file, true); } public static boolean isJarOrZip(@NotNull File file, boolean isCheckIsDirectory) { if (isCheckIsDirectory && file.isDirectory()) { return false; } // do not use getName to avoid extra String creation (File.getName() calls substring) String path = file.getPath(); return StringUtilRt.endsWithIgnoreCase(path, ".jar") || StringUtilRt.endsWithIgnoreCase(path, ".zip"); } @NotNull public static List<String> splitPath(@NotNull String path, char separatorChar) { List<String> list = new ArrayList<String>(); int index = 0; int nextSeparator; while ((nextSeparator = path.indexOf(separatorChar, index)) != -1) { list.add(path.substring(index, nextSeparator)); index = nextSeparator + 1; } list.add(path.substring(index)); return list; } public static boolean isFilePathAcceptable(@NotNull File root, @Nullable FileFilter fileFilter) { if (fileFilter == null) { return true; } File file = root; do { if (!fileFilter.accept(file)) { return false; } file = file.getParentFile(); } while (file != null); return true; } protected interface SymlinkResolver { @NotNull String resolveSymlinksAndCanonicalize(@NotNull String path, char separatorChar, boolean removeLastSlash); boolean isSymlink(@NotNull CharSequence path); } /* NIO-reflection initialization placed in a separate class for lazy loading */ @ReviseWhenPortedToJDK("7") private static final class NIOReflect { static final boolean IS_AVAILABLE; static Object toPath(File file) throws InvocationTargetException, IllegalAccessException { return ourFileToPathMethod.invoke(file); } static void deleteRecursively(Object path, @Nullable Consumer<Object> callback) throws InvocationTargetException, IllegalAccessException { try { ourCallback.set(callback); ourFilesWalkMethod.invoke(null, path, ourDeletionVisitor); } catch (InvocationTargetException e) { if (!ourNoSuchFileExceptionClass.isInstance(e.getCause())) { throw e; } } finally { ourCallback.remove(); } } private static Method ourFilesDeleteIfExistsMethod; private static Method ourFilesWalkMethod; private static Method ourFileToPathMethod; private static Method ourPathToFileMethod; private static Method ourAttributesIsOtherMethod; private static Object ourDeletionVisitor; private static Class<?> ourNoSuchFileExceptionClass; private static Class<?> ourAccessDeniedExceptionClass; private static final ThreadLocal<Consumer<Object>> ourCallback = new ThreadLocal<Consumer<Object>>(); static { boolean initSuccess = false; try { Class<?> pathClass = Class.forName("java.nio.file.Path"); Class<?> visitorClass = Class.forName("java.nio.file.FileVisitor"); Class<?> filesClass = Class.forName("java.nio.file.Files"); ourNoSuchFileExceptionClass = Class.forName("java.nio.file.NoSuchFileException"); ourAccessDeniedExceptionClass = Class.forName("java.nio.file.AccessDeniedException"); ourFileToPathMethod = Class.forName("java.io.File").getMethod("toPath"); ourPathToFileMethod = pathClass.getMethod("toFile"); ourFilesWalkMethod = filesClass.getMethod("walkFileTree", pathClass, visitorClass); ourAttributesIsOtherMethod = Class.forName("java.nio.file.attribute.BasicFileAttributes").getDeclaredMethod("isOther"); ourFilesDeleteIfExistsMethod = filesClass.getMethod("deleteIfExists", pathClass); final Object Result_Continue = Class.forName("java.nio.file.FileVisitResult").getDeclaredField("CONTINUE").get(null); final Object Result_Skip = Class.forName("java.nio.file.FileVisitResult").getDeclaredField("SKIP_SUBTREE").get(null); ourDeletionVisitor = Proxy.newProxyInstance(FileUtilRt.class.getClassLoader(), new Class[]{visitorClass}, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (args.length == 2) { String methodName = method.getName(); Object second = args[1]; if (second instanceof Throwable) { if (SystemInfoRt.isWindows && "visitFileFailed".equals(methodName) && ourNoSuchFileExceptionClass.isInstance(second)) { performDelete(args[0]); // could be an aimless junction } else { throw (Throwable)second; } } else if ("visitFile".equals(methodName) || "postVisitDirectory".equals(methodName)) { Consumer<Object> consumer = ourCallback.get(); if (consumer != null) consumer.consume(args[0]); performDelete(args[0]); } else if (SystemInfoRt.isWindows && "preVisitDirectory".equals(methodName)) { boolean notDirectory = false; try { notDirectory = Boolean.TRUE.equals(ourAttributesIsOtherMethod.invoke(second)); } catch (Throwable ignored) { } if (notDirectory) { // probably an NTFS reparse point performDelete(args[0]); return Result_Skip; } } } return Result_Continue; } private void performDelete(Object fileObject) throws IOException { for (int attempt = MAX_FILE_IO_ATTEMPTS; attempt > 0; attempt--) { try { //Files.deleteIfExists(file); ourFilesDeleteIfExistsMethod.invoke(null, fileObject); break; } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (!(cause instanceof IOException)) { throw new IllegalStateException(e); } if (!SystemInfoRt.isWindows || attempt == 1) { throw (IOException)cause; } if (ourAccessDeniedExceptionClass.isInstance(cause)) { // a file could be read-only, then fallback to legacy java.io API helps try { File file = (File)ourPathToFileMethod.invoke(fileObject); if (file.delete() || !file.exists()) { break; } } catch (Throwable ignored) { } } } catch (IllegalAccessException e) { throw new IllegalStateException(e); } try { Thread.sleep(10); } catch (InterruptedException ignored) { } } } }); initSuccess = true; } catch (Throwable ignored) { logger().info("Was not able to detect NIO API"); } IS_AVAILABLE = initSuccess; } } /** * Converts given path to canonical representation by eliminating '.'s, traversing '..'s, and omitting duplicate separators. * Please note that this method is symlink-unfriendly (i.e. result of "/path/to/link/../next" most probably will differ from * what {@link File#getCanonicalPath()} will return), so if the path may contain symlinks, * consider using {@link com.intellij.openapi.util.io.FileUtil#toCanonicalPath(String, boolean)} instead. */ @Contract("null, _, _ -> null; !null,_,_->!null") public static String toCanonicalPath(@Nullable String path, char separatorChar, boolean removeLastSlash) { return toCanonicalPath(path, separatorChar, removeLastSlash, null); } @Contract("null, _, _, _ -> null; !null,_,_,_->!null") protected static String toCanonicalPath(@Nullable String path, char separatorChar, boolean removeLastSlash, @Nullable SymlinkResolver resolver) { if (path == null || path.isEmpty()) { return path; } if (path.charAt(0) == '.') { if (path.length() == 1) { return ""; } char c = path.charAt(1); if (c == '/' || c == separatorChar) { path = path.substring(2); } } if (separatorChar != '/') { path = path.replace(separatorChar, '/'); } // trying to speedup the common case when there are no "//" or "/." int index = -1; do { index = path.indexOf('/', index+1); char next = index == path.length() - 1 ? 0 : path.charAt(index + 1); if (next == '.' || next == '/') { break; } } while (index != -1); if (index == -1) { if (removeLastSlash) { int start = processRoot(path, NullAppendable.INSTANCE); int slashIndex = path.lastIndexOf('/'); return slashIndex != -1 && slashIndex > start && slashIndex == path.length() - 1 ? path.substring(0, path.length() - 1) : path; } return path; } StringBuilder result = new StringBuilder(path.length()); int start = processRoot(path, result); int dots = 0; boolean separator = true; for (int i = start; i < path.length(); ++i) { char c = path.charAt(i); if (c == '/') { if (!separator) { if (!processDots(result, dots, start, resolver)) { return resolver.resolveSymlinksAndCanonicalize(path, separatorChar, removeLastSlash); } dots = 0; } separator = true; } else if (c == '.') { if (separator || dots > 0) { ++dots; } else { result.append('.'); } separator = false; } else { while (dots > 0) { result.append('.'); dots--; } result.append(c); separator = false; } } if (dots > 0) { if (!processDots(result, dots, start, resolver)) { return resolver.resolveSymlinksAndCanonicalize(path, separatorChar, removeLastSlash); } } int lastChar = result.length() - 1; if (removeLastSlash && lastChar >= 0 && result.charAt(lastChar) == '/' && lastChar > start) { result.deleteCharAt(lastChar); } return result.toString(); } @SuppressWarnings("DuplicatedCode") private static int processRoot(@NotNull String path, @NotNull Appendable result) { try { if (SystemInfoRt.isWindows && path.length() > 1 && path.charAt(0) == '/' && path.charAt(1) == '/') { result.append("//"); int hostStart = 2; while (hostStart < path.length() && path.charAt(hostStart) == '/') hostStart++; if (hostStart == path.length()) return hostStart; int hostEnd = path.indexOf('/', hostStart); if (hostEnd < 0) hostEnd = path.length(); result.append(path, hostStart, hostEnd); result.append('/'); int shareStart = hostEnd; while (shareStart < path.length() && path.charAt(shareStart) == '/') shareStart++; if (shareStart == path.length()) return shareStart; int shareEnd = path.indexOf('/', shareStart); if (shareEnd < 0) shareEnd = path.length(); result.append(path, shareStart, shareEnd); result.append('/'); return shareEnd; } if (!path.isEmpty() && path.charAt(0) == '/') { result.append('/'); return 1; } if (path.length() > 2 && path.charAt(1) == ':' && path.charAt(2) == '/') { result.append(path, 0, 3); return 3; } return 0; } catch (IOException e) { throw new RuntimeException(e); } } @Contract("_, _, _, null -> true") private static boolean processDots(@NotNull StringBuilder result, int dots, int start, @Nullable SymlinkResolver symlinkResolver) { if (dots == 2) { int pos = -1; if (!StringUtilRt.endsWith(result, "/../") && !"../".contentEquals(result)) { pos = StringUtilRt.lastIndexOf(result, '/', start, result.length() - 1); if (pos >= 0) { ++pos; // separator found, trim to next char } else if (start > 0) { pos = start; // path is absolute, trim to root ('/..' -> '/') } else if (result.length() > 0) { pos = 0; // path is relative, trim to default ('a/..' -> '') } } if (pos >= 0) { if (symlinkResolver != null && symlinkResolver.isSymlink(result)) { return false; } result.delete(pos, result.length()); } else { result.append("../"); // impossible to traverse, keep as-is } } else if (dots != 1) { for (int i = 0; i < dots; i++) { result.append('.'); } result.append('/'); } return true; } @NotNull public static String getExtension(@NotNull String fileName) { int index = fileName.lastIndexOf('.'); if (index < 0) return ""; return fileName.substring(index + 1); } @NotNull public static CharSequence getExtension(@NotNull CharSequence fileName) { return getExtension(fileName, ""); } @Contract("_,!null -> !null") public static CharSequence getExtension(@NotNull CharSequence fileName, @Nullable String defaultValue) { int index = StringUtilRt.lastIndexOf(fileName, '.', 0, fileName.length()); if (index < 0) { return defaultValue; } return fileName.subSequence(index + 1, fileName.length()); } public static boolean extensionEquals(@NotNull @NonNls String filePath, @NotNull @NonNls String extension) { int extLen = extension.length(); if (extLen == 0) { int lastSlash = Math.max(filePath.lastIndexOf('/'), filePath.lastIndexOf('\\')); return filePath.indexOf('.', lastSlash+1) == -1; } int extStart = filePath.length() - extLen; return extStart >= 1 && filePath.charAt(extStart-1) == '.' && filePath.regionMatches(!SystemInfoRt.isFileSystemCaseSensitive, extStart, extension, 0, extLen); } public static boolean fileNameEquals(@NotNull File file, @NonNls @NotNull String name) { return fileNameEquals(file.getName(), name); } public static boolean fileNameEquals(@NotNull @NonNls CharSequence fileName, @NotNull @NonNls CharSequence expectedName) { return StringUtilRt.equal(expectedName, fileName, SystemInfoRt.isFileSystemCaseSensitive); } @NotNull public static String toSystemDependentName(@NotNull String path) { return toSystemDependentName(path, File.separatorChar); } @NotNull public static String toSystemDependentName(@NotNull String path, char separatorChar) { return path.replace('/', separatorChar).replace('\\', separatorChar); } @NotNull public static String toSystemIndependentName(@NotNull String path) { return path.replace('\\', '/'); } /** * <p>Gets the relative path from the {@code base} to the {@code file} regardless existence or the type of the {@code base}.</p> * * <p>NOTE: if a file (not a directory) is passed as the {@code base}, the result cannot be used as a relative path * from the {@code base} parent directory to the {@code file}.</p> * * @param base the base * @param file the file * @return the relative path from the {@code base} to the {@code file}, or {@code null} */ @Nullable public static String getRelativePath(File base, File file) { if (base == null || file == null) return null; if (base.equals(file)) return "."; String filePath = file.getAbsolutePath(); String basePath = base.getAbsolutePath(); return getRelativePath(basePath, filePath, File.separatorChar); } @Nullable public static String getRelativePath(@NotNull String basePath, @NotNull String filePath, char separator) { return getRelativePath(basePath, filePath, separator, SystemInfoRt.isFileSystemCaseSensitive); } @Nullable public static String getRelativePath(@NotNull String basePath, @NotNull String filePath, char separator, boolean caseSensitive) { basePath = ensureEnds(basePath, separator); if (caseSensitive ? basePath.equals(ensureEnds(filePath, separator)) : basePath.equalsIgnoreCase(ensureEnds(filePath, separator))) { return "."; } int len = 0; int lastSeparatorIndex = 0; // need this for cases like this: base="/temp/abc/base" and file="/temp/ab" CharComparingStrategy strategy = caseSensitive ? CharComparingStrategy.IDENTITY : CharComparingStrategy.CASE_INSENSITIVE; while (len < filePath.length() && len < basePath.length() && strategy.charsEqual(filePath.charAt(len), basePath.charAt(len))) { if (basePath.charAt(len) == separator) { lastSeparatorIndex = len; } len++; } if (len == 0) return null; StringBuilder relativePath = new StringBuilder(); for (int i = len; i < basePath.length(); i++) { if (basePath.charAt(i) == separator) { relativePath.append(".."); relativePath.append(separator); } } relativePath.append(filePath.substring(lastSeparatorIndex + 1)); return relativePath.toString(); } @NotNull private static String ensureEnds(@NotNull String s, char endsWith) { return StringUtilRt.endsWithChar(s, endsWith) ? s : s + endsWith; } @NotNull public static CharSequence getNameWithoutExtension(@NotNull CharSequence name) { int i = StringUtilRt.lastIndexOf(name, '.', 0, name.length()); return i < 0 ? name : name.subSequence(0, i); } @NotNull public static String getNameWithoutExtension(@NotNull String name) { return getNameWithoutExtension((CharSequence)name).toString(); } @NotNull public static File createTempDirectory(@NotNull String prefix, @Nullable String suffix) throws IOException { return createTempDirectory(prefix, suffix, true); } @NotNull public static File createTempDirectory(@NotNull String prefix, @Nullable String suffix, boolean deleteOnExit) throws IOException { File dir = new File(getTempDirectory()); return createTempDirectory(dir, prefix, suffix, deleteOnExit); } @NotNull public static File createTempDirectory(@NotNull File dir, @NotNull String prefix, @Nullable String suffix) throws IOException { return createTempDirectory(dir, prefix, suffix, true); } @NotNull public static File createTempDirectory(@NotNull File dir, @NotNull String prefix, @Nullable String suffix, boolean deleteOnExit) throws IOException { File file = doCreateTempFile(dir, prefix, suffix, true); if (deleteOnExit) { // default deleteOnExit does not remove dirs if they are not empty FilesToDeleteHolder.ourFilesToDelete.add(file.getPath()); } if (!file.isDirectory()) { throw new IOException("Cannot create a directory: " + file); } return file; } private static class FilesToDeleteHolder { private static final Queue<String> ourFilesToDelete = createFilesToDelete(); @NotNull private static Queue<String> createFilesToDelete() { final ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<String>(); Runtime.getRuntime().addShutdownHook(new Thread("FileUtil deleteOnExit") { @Override public void run() { String name; while ((name = queue.poll()) != null) { delete(new File(name)); } } }); return queue; } } @NotNull public static File createTempFile(@NotNull String prefix, @Nullable String suffix) throws IOException { return createTempFile(prefix, suffix, false); //false until TeamCity fixes its plugin } @NotNull public static File createTempFile(@NonNls @NotNull String prefix, @NonNls @Nullable String suffix, boolean deleteOnExit) throws IOException { File dir = new File(getTempDirectory()); return createTempFile(dir, prefix, suffix, true, deleteOnExit); } @NotNull public static File createTempFile(@NotNull File dir, @NotNull String prefix, @Nullable String suffix) throws IOException { return createTempFile(dir, prefix, suffix, true, true); } @NotNull public static File createTempFile(@NotNull File dir, @NotNull String prefix, @Nullable String suffix, boolean create) throws IOException { return createTempFile(dir, prefix, suffix, create, true); } @NotNull public static File createTempFile(@NotNull File dir, @NotNull String prefix, @Nullable String suffix, boolean create, boolean deleteOnExit) throws IOException { File file = doCreateTempFile(dir, prefix, suffix, false); if (deleteOnExit) { //noinspection SSBasedInspection file.deleteOnExit(); } if (!create) { if (!file.delete() && file.exists()) { throw new IOException("Cannot delete a file: " + file); } } return file; } private static final Random RANDOM = new Random(); @NotNull private static File doCreateTempFile(@NotNull File dir, @NotNull String prefix, @Nullable String suffix, boolean isDirectory) throws IOException { //noinspection ResultOfMethodCallIgnored dir.mkdirs(); if (prefix.length() < 3) { prefix = (prefix + "___").substring(0, 3); } if (suffix == null) { suffix = ""; } // normalize and use only the file name from the prefix prefix = new File(prefix).getName(); int attempts = 0; int i = 0; int maxFileNumber = 10; IOException exception = null; while (true) { File f = null; try { f = calcName(dir, prefix, suffix, i); boolean success = isDirectory ? f.mkdir() : f.createNewFile(); if (success) { return normalizeFile(f); } } catch (IOException e) { // Win32 createFileExclusively access denied exception = e; } attempts++; int MAX_ATTEMPTS = 100; if (attempts > maxFileNumber / 2 || attempts > MAX_ATTEMPTS) { String[] children = dir.list(); int size = children == null ? 0 : children.length; maxFileNumber = Math.max(10, size * 10); // if too many files are in tmp dir, we need a bigger random range than meager 10 if (attempts > MAX_ATTEMPTS) { throw exception != null ? exception: new IOException("Unable to create a temporary file " + f + "\nDirectory '" + dir + "' list ("+size+" children): " + Arrays.toString(children)); } } i++; // for some reason the file1 can't be created (previous file1 was deleted but got locked by anti-virus?). Try file2. if (i > 2) { i = 2 + RANDOM.nextInt(maxFileNumber); // generate random suffix if too many failures } } } @NotNull private static File calcName(@NotNull File dir, @NotNull String prefix, @NotNull String suffix, int i) throws IOException { prefix = i == 0 ? prefix : prefix + i; if (prefix.endsWith(".") && suffix.startsWith(".")) { prefix = prefix.substring(0, prefix.length() - 1); } String name = prefix + suffix; File f = new File(dir, name); if (!name.equals(f.getName())) { throw new IOException("A generated name is malformed: '" + name + "' (" + f + ")"); } return f; } @NotNull private static File normalizeFile(@NotNull File temp) throws IOException { File canonical = temp.getCanonicalFile(); return SystemInfoRt.isWindows && canonical.getAbsolutePath().contains(" ") ? temp.getAbsoluteFile() : canonical; } @NotNull public static String getTempDirectory() { if (ourCanonicalTempPathCache == null) { ourCanonicalTempPathCache = calcCanonicalTempPath(); } return ourCanonicalTempPathCache; } @NotNull private static String calcCanonicalTempPath() { File file = new File(System.getProperty("java.io.tmpdir")); try { String canonical = file.getCanonicalPath(); if (!SystemInfoRt.isWindows || !canonical.contains(" ")) { return canonical; } } catch (IOException ignore) { } return file.getAbsolutePath(); } @TestOnly static void resetCanonicalTempPathCache(@NotNull String tempPath) { ourCanonicalTempPathCache = tempPath; } @NotNull public static File generateRandomTemporaryPath() throws IOException { File file = new File(getTempDirectory(), UUID.randomUUID().toString()); int i = 0; while (file.exists() && i < 5) { file = new File(getTempDirectory(), UUID.randomUUID().toString()); ++i; } if (file.exists()) { throw new IOException("Couldn't generate unique random path."); } return normalizeFile(file); } @NotNull public static String loadFile(@NotNull File file) throws IOException { return loadFile(file, null, false); } @NotNull public static String loadFile(@NotNull File file, boolean convertLineSeparators) throws IOException { return loadFile(file, null, convertLineSeparators); } @NotNull public static String loadFile(@NotNull File file, @Nullable String encoding) throws IOException { return loadFile(file, encoding, false); } @NotNull public static String loadFile(@NotNull File file, @Nullable String encoding, boolean convertLineSeparators) throws IOException { String s = new String(loadFileText(file, encoding)); return convertLineSeparators ? StringUtilRt.convertLineSeparators(s) : s; } @NotNull public static char[] loadFileText(@NotNull File file) throws IOException { return loadFileText(file, (String)null); } @NotNull public static char[] loadFileText(@NotNull File file, @Nullable String encoding) throws IOException { InputStream stream = new FileInputStream(file); Reader reader = encoding == null ? new InputStreamReader(stream, Charset.defaultCharset()) : new InputStreamReader(stream, encoding); try { return loadText(reader, (int)file.length()); } finally { reader.close(); } } @NotNull public static char[] loadFileText(@NotNull File file, @NotNull Charset encoding) throws IOException { Reader reader = new InputStreamReader(new FileInputStream(file), encoding); try { return loadText(reader, (int)file.length()); } finally { reader.close(); } } @NotNull public static char[] loadText(@NotNull Reader reader, int length) throws IOException { char[] chars = new char[length]; int count = 0; while (count < chars.length) { int n = reader.read(chars, count, chars.length - count); if (n <= 0) break; count += n; } if (count == chars.length) { return chars; } else { return Arrays.copyOf(chars, count); } } @NotNull public static List<String> loadLines(@NotNull File file) throws IOException { return loadLines(file.getPath()); } @NotNull public static List<String> loadLines(@NotNull File file, @Nullable String encoding) throws IOException { return loadLines(file.getPath(), encoding); } @NotNull public static List<String> loadLines(@NotNull String path) throws IOException { return loadLines(path, null); } @NotNull public static List<String> loadLines(@NotNull String path, @Nullable String encoding) throws IOException { InputStream stream = new FileInputStream(path); BufferedReader reader = new BufferedReader(encoding == null ? new InputStreamReader(stream, Charset.defaultCharset()) : new InputStreamReader(stream, encoding)); try { return loadLines(reader); } finally { reader.close(); } } @NotNull public static List<String> loadLines(@NotNull BufferedReader reader) throws IOException { List<String> lines = new ArrayList<String>(); String line; while ((line = reader.readLine()) != null) { lines.add(line); } return lines; } @NotNull public static byte[] loadBytes(@NotNull InputStream stream) throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); copy(stream, buffer); return buffer.toByteArray(); } public static boolean isTooLarge(long len) { return len > LARGE_FOR_CONTENT_LOADING; } @NotNull public static byte[] loadBytes(@NotNull InputStream stream, int length) throws IOException { if (length == 0) { return ArrayUtilRt.EMPTY_BYTE_ARRAY; } byte[] bytes = new byte[length]; int count = 0; while (count < length) { int n = stream.read(bytes, count, length - count); if (n <= 0) break; count += n; } return bytes; } /** * Get parent for the file. The method correctly * processes "." and ".." in file names. The name * remains relative if was relative before. * * @param file a file to analyze * @return files's parent, or {@code null} if the file has no parent. */ @Nullable public static File getParentFile(@NotNull File file) { int skipCount = 0; File parentFile = file; while (true) { parentFile = parentFile.getParentFile(); if (parentFile == null) { return null; } if (".".equals(parentFile.getName())) { continue; } if ("..".equals(parentFile.getName())) { skipCount++; continue; } if (skipCount > 0) { skipCount--; continue; } return parentFile; } } /** * <b>IMPORTANT</b>: the method is not symlinks- or junction-aware when invoked on Java 6 or earlier. * * @param file file or directory to delete * @return {@code true} if the file did not exist or was successfully deleted */ public static boolean delete(@NotNull File file) { if (NIOReflect.IS_AVAILABLE) { try { deleteRecursivelyNIO(NIOReflect.toPath(file), null); return true; } catch (IOException e) { return false; } catch (Exception e) { logger().info(e); return false; } } else { return deleteRecursively(file); } } static void deleteRecursivelyNIO(@NotNull Object path, @Nullable Consumer<Object> callback) throws IOException { try { NIOReflect.deleteRecursively(path, callback); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause instanceof IOException) throw (IOException)cause; if (cause instanceof RuntimeException) throw (RuntimeException)cause; throw new IllegalStateException(e); } catch (IllegalAccessException e) { throw new IllegalStateException(e); } } private static boolean deleteRecursively(@NotNull File file) { File[] files = file.listFiles(); if (files != null) { for (File child : files) { if (!deleteRecursively(child)) return false; } } return deleteFile(file); } public interface RepeatableIOOperation<T, E extends Throwable> { @Nullable T execute(boolean lastAttempt) throws E; } @Nullable public static <T, E extends Throwable> T doIOOperation(@NotNull RepeatableIOOperation<T, E> ioTask) throws E { for (int i = MAX_FILE_IO_ATTEMPTS; i > 0; i--) { T result = ioTask.execute(i == 1); if (result != null) return result; try { Thread.sleep(10); } catch (InterruptedException ignored) { } } return null; } protected static boolean deleteFile(@NotNull final File file) { Boolean result = doIOOperation(new RepeatableIOOperation<Boolean, RuntimeException>() { @Override public Boolean execute(boolean lastAttempt) { if (file.delete() || !file.exists()) return Boolean.TRUE; else if (lastAttempt) return Boolean.FALSE; else return null; } }); return Boolean.TRUE.equals(result); } public static boolean ensureCanCreateFile(@NotNull File file) { if (file.exists()) return file.canWrite(); if (!createIfNotExists(file)) return false; return delete(file); } public static boolean createIfNotExists(@NotNull File file) { if (file.exists()) return true; try { if (!createParentDirs(file)) return false; OutputStream s = new FileOutputStream(file); s.close(); return true; } catch (IOException e) { logger().info(e); return false; } } public static boolean createParentDirs(@NotNull File file) { File parentPath = file.getParentFile(); return parentPath == null || createDirectory(parentPath); } public static boolean createDirectory(@NotNull File path) { return path.isDirectory() || path.mkdirs(); } public static void copy(@NotNull File fromFile, @NotNull File toFile) throws IOException { if (!ensureCanCreateFile(toFile)) { return; } FileOutputStream fos = new FileOutputStream(toFile); try { FileInputStream fis = new FileInputStream(fromFile); try { copy(fis, fos); } finally { fis.close(); } } finally { fos.close(); } long timeStamp = fromFile.lastModified(); if (timeStamp < 0) { logger().warn("Invalid timestamp " + timeStamp + " of '" + fromFile + "'"); } else if (!toFile.setLastModified(timeStamp)) { logger().warn("Unable to set timestamp " + timeStamp + " to '" + toFile + "'"); } } public static void copy(@NotNull InputStream inputStream, @NotNull OutputStream outputStream) throws IOException { if (USE_FILE_CHANNELS && inputStream instanceof FileInputStream && outputStream instanceof FileOutputStream) { FileChannel fromChannel = ((FileInputStream)inputStream).getChannel(); try { FileChannel toChannel = ((FileOutputStream)outputStream).getChannel(); try { fromChannel.transferTo(0, Long.MAX_VALUE, toChannel); } finally { toChannel.close(); } } finally { fromChannel.close(); } } else { byte[] buffer = new byte[8192]; while (true) { int read = inputStream.read(buffer); if (read < 0) break; outputStream.write(buffer, 0, read); } } } public static int getUserFileSizeLimit() { return parseKilobyteProperty("idea.max.intellisense.filesize", DEFAULT_INTELLISENSE_LIMIT); } public static int getUserContentLoadLimit() { return parseKilobyteProperty("idea.max.content.load.filesize", 20 * MEGABYTE); } private static int getLargeFilePreviewSize() { return parseKilobyteProperty("idea.max.content.load.large.preview.size", DEFAULT_INTELLISENSE_LIMIT); } private static int parseKilobyteProperty(String key, int defaultValue) { try { long i = Integer.parseInt(System.getProperty(key, String.valueOf(defaultValue / KILOBYTE))); if (i < 0) return Integer.MAX_VALUE; return (int) Math.min(i * KILOBYTE, Integer.MAX_VALUE); } catch (NumberFormatException e) { return defaultValue; } } private interface CharComparingStrategy { CharComparingStrategy IDENTITY = new CharComparingStrategy() { @Override public boolean charsEqual(char ch1, char ch2) { return ch1 == ch2; } }; CharComparingStrategy CASE_INSENSITIVE = new CharComparingStrategy() { @Override public boolean charsEqual(char ch1, char ch2) { return StringUtilRt.charsEqualIgnoreCase(ch1, ch2); } }; boolean charsEqual(char ch1, char ch2); } private static LoggerRt logger() { return LoggerRt.getInstance("#com.intellij.openapi.util.io.FileUtilRt"); } /** * Energy-efficient variant of {@link File#toURI()}. Unlike the latter, doesn't check whether a given file is a directory, * so URIs never have a trailing slash (but are nevertheless compatible with {@link File#File(URI)}). */ public static @NotNull URI fileToUri(@NotNull File file) { String path = file.getAbsolutePath(); if (File.separatorChar != '/') path = path.replace(File.separatorChar, '/'); if (!path.startsWith("/")) path = '/' + path; if (path.startsWith("//")) path = "//" + path; try { return new URI("file", null, path, null); } catch (URISyntaxException e) { throw new IllegalArgumentException(path, e); // unlikely, as `File#toURI()` doesn't declare any exceptions } } public static int pathHashCode(@Nullable String path) { if (path == null || path.isEmpty()) { return 0; } path = toCanonicalPath(path, File.separatorChar, true); return SystemInfoRt.isFileSystemCaseSensitive ? path.hashCode() : StringUtilRt.stringHashCodeInsensitive(path); } public static boolean filesEqual(@Nullable File file1, @Nullable File file2) { // on macOS java.io.File.equals() is incorrectly case-sensitive return pathsEqual(file1 == null ? null : file1.getPath(), file2 == null ? null : file2.getPath()); } public static boolean pathsEqual(@Nullable String path1, @Nullable String path2) { if (path1 == path2) { return true; } if (path1 == null || path2 == null) { return false; } path1 = toCanonicalPath(path1, File.separatorChar, true); path2 = toCanonicalPath(path2, File.separatorChar, true); if (SystemInfoRt.isFileSystemCaseSensitive) { return path1.equals(path2); } else { return path1.equalsIgnoreCase(path2); } } }
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.storage; import static com.google.cloud.storage.Acl.Project.ProjectRole.VIEWERS; import static com.google.cloud.storage.Acl.Role.READER; import static com.google.cloud.storage.Acl.Role.WRITER; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createStrictMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import com.google.cloud.Page; import com.google.cloud.PageImpl; import com.google.cloud.storage.Acl.Project; import com.google.cloud.storage.Acl.User; import com.google.cloud.storage.BatchResponse.Result; import com.google.cloud.storage.BucketInfo.AgeDeleteRule; import com.google.cloud.storage.BucketInfo.DeleteRule; import com.google.common.collect.ImmutableList; import org.easymock.Capture; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; public class BucketTest { private static final List<Acl> ACL = ImmutableList.of( Acl.of(User.ofAllAuthenticatedUsers(), READER), Acl.of(new Project(VIEWERS, "p1"), WRITER)); private static final String ETAG = "0xFF00"; private static final String GENERATED_ID = "B/N:1"; private static final Long META_GENERATION = 10L; private static final User OWNER = new User("user@gmail.com"); private static final String SELF_LINK = "http://storage/b/n"; private static final Long CREATE_TIME = System.currentTimeMillis(); private static final List<Cors> CORS = Collections.singletonList(Cors.builder().build()); private static final List<Acl> DEFAULT_ACL = Collections.singletonList(Acl.of(User.ofAllAuthenticatedUsers(), WRITER)); private static final List<? extends DeleteRule> DELETE_RULES = Collections.singletonList(new AgeDeleteRule(5)); private static final String INDEX_PAGE = "index.html"; private static final String NOT_FOUND_PAGE = "error.html"; private static final String LOCATION = "ASIA"; private static final String STORAGE_CLASS = "STANDARD"; private static final Boolean VERSIONING_ENABLED = true; private static final BucketInfo FULL_BUCKET_INFO = BucketInfo.builder("b") .acl(ACL) .etag(ETAG) .generatedId(GENERATED_ID) .metageneration(META_GENERATION) .owner(OWNER) .selfLink(SELF_LINK) .cors(CORS) .createTime(CREATE_TIME) .defaultAcl(DEFAULT_ACL) .deleteRules(DELETE_RULES) .indexPage(INDEX_PAGE) .notFoundPage(NOT_FOUND_PAGE) .location(LOCATION) .storageClass(STORAGE_CLASS) .versioningEnabled(VERSIONING_ENABLED) .build(); private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b").metageneration(42L).build(); private static final String CONTENT_TYPE = "text/plain"; private Storage storage; private Storage serviceMockReturnsOptions = createMock(Storage.class); private StorageOptions mockOptions = createMock(StorageOptions.class); private Bucket bucket; private Bucket expectedBucket; private Iterable<Blob> blobResults; @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setUp() { storage = createStrictMock(Storage.class); } @After public void tearDown() throws Exception { verify(storage); } private void initializeExpectedBucket(int optionsCalls) { expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); replay(serviceMockReturnsOptions); expectedBucket = new Bucket(serviceMockReturnsOptions, new BucketInfo.BuilderImpl(BUCKET_INFO)); blobResults = ImmutableList.of( new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n1").build())), new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n2").build())), new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n3").build()))); } private void initializeBucket() { bucket = new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO)); } @Test public void testExists_True() throws Exception { initializeExpectedBucket(4); Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()}; expect(storage.options()).andReturn(mockOptions); expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(expectedBucket); replay(storage); initializeBucket(); assertTrue(bucket.exists()); } @Test public void testExists_False() throws Exception { initializeExpectedBucket(4); Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()}; expect(storage.options()).andReturn(mockOptions); expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(null); replay(storage); initializeBucket(); assertFalse(bucket.exists()); } @Test public void testReload() throws Exception { initializeExpectedBucket(5); BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build(); Bucket expectedUpdatedBucket = new Bucket(serviceMockReturnsOptions, new BucketInfo.BuilderImpl(updatedInfo)); expect(storage.options()).andReturn(mockOptions); expect(storage.get(updatedInfo.name())).andReturn(expectedUpdatedBucket); replay(storage); initializeBucket(); Bucket updatedBucket = bucket.reload(); assertEquals(expectedUpdatedBucket, updatedBucket); } @Test public void testReloadNull() throws Exception { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions); expect(storage.get(BUCKET_INFO.name())).andReturn(null); replay(storage); initializeBucket(); assertNull(bucket.reload()); } @Test public void testReloadWithOptions() throws Exception { initializeExpectedBucket(5); BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build(); Bucket expectedUpdatedBucket = new Bucket(serviceMockReturnsOptions, new BucketInfo.BuilderImpl(updatedInfo)); expect(storage.options()).andReturn(mockOptions); expect(storage.get(updatedInfo.name(), Storage.BucketGetOption.metagenerationMatch(42L))) .andReturn(expectedUpdatedBucket); replay(storage); initializeBucket(); Bucket updatedBucket = bucket.reload(Bucket.BucketSourceOption.metagenerationMatch()); assertEquals(expectedUpdatedBucket, updatedBucket); } @Test public void testUpdate() throws Exception { initializeExpectedBucket(5); Bucket expectedUpdatedBucket = expectedBucket.toBuilder().notFoundPage("p").build(); expect(storage.options()).andReturn(mockOptions).times(2); expect(storage.update(expectedUpdatedBucket)).andReturn(expectedUpdatedBucket); replay(storage); initializeBucket(); Bucket updatedBucket = new Bucket(storage, new BucketInfo.BuilderImpl(expectedUpdatedBucket)); Bucket actualUpdatedBucket = updatedBucket.update(); assertEquals(expectedUpdatedBucket, actualUpdatedBucket); } @Test public void testDelete() throws Exception { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions); expect(storage.delete(BUCKET_INFO.name())).andReturn(true); replay(storage); initializeBucket(); assertTrue(bucket.delete()); } @Test public void testList() throws Exception { initializeExpectedBucket(4); PageImpl<Blob> expectedBlobPage = new PageImpl<>(null, "c", blobResults); expect(storage.options()).andReturn(mockOptions); expect(storage.list(BUCKET_INFO.name())).andReturn(expectedBlobPage); replay(storage); initializeBucket(); Page<Blob> blobPage = bucket.list(); Iterator<Blob> blobInfoIterator = blobPage.values().iterator(); Iterator<Blob> blobIterator = blobPage.values().iterator(); while (blobInfoIterator.hasNext() && blobIterator.hasNext()) { assertEquals(blobInfoIterator.next(), blobIterator.next()); } assertFalse(blobInfoIterator.hasNext()); assertFalse(blobIterator.hasNext()); assertEquals(expectedBlobPage.nextPageCursor(), blobPage.nextPageCursor()); } @Test public void testGet() throws Exception { initializeExpectedBucket(5); Blob expectedBlob = new Blob( serviceMockReturnsOptions, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n").build())); expect(storage.options()).andReturn(mockOptions); expect(storage.get(BlobId.of(expectedBucket.name(), "n"), new Storage.BlobGetOption[0])) .andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.get("n"); assertEquals(expectedBlob, blob); } @Test public void testGetAll() throws Exception { initializeExpectedBucket(4); Capture<BatchRequest> capturedBatchRequest = Capture.newInstance(); List<Result<Blob>> batchResultList = new LinkedList<>(); for (Blob info : blobResults) { batchResultList.add(new Result<>(info)); } BatchResponse response = new BatchResponse(Collections.<Result<Boolean>>emptyList(), Collections.<Result<Blob>>emptyList(), batchResultList); expect(storage.options()).andReturn(mockOptions); expect(storage.submit(capture(capturedBatchRequest))).andReturn(response); expect(storage.options()).andReturn(mockOptions).times(3); replay(storage); initializeBucket(); List<Blob> blobs = bucket.get("n1", "n2", "n3"); Set<BlobId> blobInfoSet = capturedBatchRequest.getValue().toGet().keySet(); assertEquals(batchResultList.size(), blobInfoSet.size()); for (BlobInfo info : blobResults) { assertTrue(blobInfoSet.contains(info.blobId())); } Iterator<Blob> blobIterator = blobs.iterator(); Iterator<Result<Blob>> batchResultIterator = response.gets().iterator(); while (batchResultIterator.hasNext() && blobIterator.hasNext()) { assertEquals(batchResultIterator.next().get(), blobIterator.next()); } assertFalse(batchResultIterator.hasNext()); assertFalse(blobIterator.hasNext()); } @Test public void testCreate() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, content)).andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", content, CONTENT_TYPE); assertEquals(expectedBlob, blob); } @Test public void testCreateNoContentType() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder("b", "n").build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, content)).andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", content); assertEquals(expectedBlob, blob); } @Test public void testCreateWithOptions() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 42L)) .contentType(CONTENT_TYPE) .metageneration(24L) .build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; Storage.PredefinedAcl acl = Storage.PredefinedAcl.ALL_AUTHENTICATED_USERS; expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, content, Storage.BlobTargetOption.generationMatch(), Storage.BlobTargetOption.metagenerationMatch(), Storage.BlobTargetOption.predefinedAcl(acl))).andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.generationMatch(42L), Bucket.BlobTargetOption.metagenerationMatch(24L), Bucket.BlobTargetOption.predefinedAcl(acl)); assertEquals(expectedBlob, blob); } @Test public void testCreateNotExists() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 0L)).contentType(CONTENT_TYPE).build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, content, Storage.BlobTargetOption.generationMatch())) .andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.doesNotExist()); assertEquals(expectedBlob, blob); } @Test public void testCreateWithWrongGenerationOptions() throws Exception { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions); replay(storage); initializeBucket(); byte[] content = {0xD, 0xE, 0xA, 0xD}; thrown.expect(IllegalArgumentException.class); thrown.expectMessage( "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.generationMatch(42L), Bucket.BlobTargetOption.generationNotMatch(24L)); } @Test public void testCreateWithWrongMetagenerationOptions() throws Exception { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions); replay(storage); initializeBucket(); byte[] content = {0xD, 0xE, 0xA, 0xD}; thrown.expect(IllegalArgumentException.class); thrown.expectMessage( "metagenerationMatch and metagenerationNotMatch options can not be both provided"); bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.metagenerationMatch(42L), Bucket.BlobTargetOption.metagenerationNotMatch(24L)); } @Test public void testCreateFromStream() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, streamContent)).andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", streamContent, CONTENT_TYPE); assertEquals(expectedBlob, blob); } @Test public void testCreateFromStreamNoContentType() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder("b", "n").build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, streamContent)).andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", streamContent); assertEquals(expectedBlob, blob); } @Test public void testCreateFromStreamWithOptions() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 42L)) .contentType(CONTENT_TYPE) .metageneration(24L) .crc32c("crc") .md5("md5") .build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; Storage.PredefinedAcl acl = Storage.PredefinedAcl.ALL_AUTHENTICATED_USERS; InputStream streamContent = new ByteArrayInputStream(content); expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, streamContent, Storage.BlobWriteOption.generationMatch(), Storage.BlobWriteOption.metagenerationMatch(), Storage.BlobWriteOption.predefinedAcl(acl), Storage.BlobWriteOption.crc32cMatch(), Storage.BlobWriteOption.md5Match())) .andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.generationMatch(42L), Bucket.BlobWriteOption.metagenerationMatch(24L), Bucket.BlobWriteOption.predefinedAcl(acl), Bucket.BlobWriteOption.crc32cMatch("crc"), Bucket.BlobWriteOption.md5Match("md5")); assertEquals(expectedBlob, blob); } @Test public void testCreateFromStreamNotExists() throws Exception { initializeExpectedBucket(5); BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 0L)).contentType(CONTENT_TYPE).build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, streamContent, Storage.BlobWriteOption.generationMatch())) .andReturn(expectedBlob); replay(storage); initializeBucket(); Blob blob = bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.doesNotExist()); assertEquals(expectedBlob, blob); } @Test public void testCreateFromStreamWithWrongGenerationOptions() throws Exception { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions); replay(storage); initializeBucket(); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); thrown.expect(IllegalArgumentException.class); thrown.expectMessage( "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.generationMatch(42L), Bucket.BlobWriteOption.generationNotMatch(24L)); } @Test public void testCreateFromStreamWithWrongMetagenerationOptions() throws Exception { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions); replay(storage); initializeBucket(); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); thrown.expect(IllegalArgumentException.class); thrown.expectMessage( "metagenerationMatch and metagenerationNotMatch options can not be both provided"); bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.metagenerationMatch(42L), Bucket.BlobWriteOption.metagenerationNotMatch(24L)); } @Test public void testToBuilder() { expect(storage.options()).andReturn(mockOptions).times(4); replay(storage); Bucket fullBucket = new Bucket(storage, new BucketInfo.BuilderImpl(FULL_BUCKET_INFO)); assertEquals(fullBucket, fullBucket.toBuilder().build()); Bucket simpleBlob = new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO)); assertEquals(simpleBlob, simpleBlob.toBuilder().build()); } @Test public void testBuilder() { initializeExpectedBucket(4); expect(storage.options()).andReturn(mockOptions).times(4); replay(storage); Bucket.Builder builder = new Bucket.Builder(new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO))); Bucket bucket = builder.acl(ACL) .etag(ETAG) .generatedId(GENERATED_ID) .metageneration(META_GENERATION) .owner(OWNER) .selfLink(SELF_LINK) .cors(CORS) .createTime(CREATE_TIME) .defaultAcl(DEFAULT_ACL) .deleteRules(DELETE_RULES) .indexPage(INDEX_PAGE) .notFoundPage(NOT_FOUND_PAGE) .location(LOCATION) .storageClass(STORAGE_CLASS) .versioningEnabled(VERSIONING_ENABLED) .build(); assertEquals("b", bucket.name()); assertEquals(ACL, bucket.acl()); assertEquals(ETAG, bucket.etag()); assertEquals(GENERATED_ID, bucket.generatedId()); assertEquals(META_GENERATION, bucket.metageneration()); assertEquals(OWNER, bucket.owner()); assertEquals(SELF_LINK, bucket.selfLink()); assertEquals(CREATE_TIME, bucket.createTime()); assertEquals(CORS, bucket.cors()); assertEquals(DEFAULT_ACL, bucket.defaultAcl()); assertEquals(DELETE_RULES, bucket.deleteRules()); assertEquals(INDEX_PAGE, bucket.indexPage()); assertEquals(NOT_FOUND_PAGE, bucket.notFoundPage()); assertEquals(LOCATION, bucket.location()); assertEquals(STORAGE_CLASS, bucket.storageClass()); assertEquals(VERSIONING_ENABLED, bucket.versioningEnabled()); assertEquals(storage.options(), bucket.storage().options()); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.SequenceInputStream; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HFileProtos; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.Writable; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; /** * File format for hbase. * A file of sorted key/value pairs. Both keys and values are byte arrays. * <p> * The memory footprint of a HFile includes the following (below is taken from the * <a * href=https://issues.apache.org/jira/browse/HADOOP-3315>TFile</a> documentation * but applies also to HFile): * <ul> * <li>Some constant overhead of reading or writing a compressed block. * <ul> * <li>Each compressed block requires one compression/decompression codec for * I/O. * <li>Temporary space to buffer the key. * <li>Temporary space to buffer the value. * </ul> * <li>HFile index, which is proportional to the total number of Data Blocks. * The total amount of memory needed to hold the index can be estimated as * (56+AvgKeySize)*NumBlocks. * </ul> * Suggestions on performance optimization. * <ul> * <li>Minimum block size. We recommend a setting of minimum block size between * 8KB to 1MB for general usage. Larger block size is preferred if files are * primarily for sequential access. However, it would lead to inefficient random * access (because there are more data to decompress). Smaller blocks are good * for random access, but require more memory to hold the block index, and may * be slower to create (because we must flush the compressor stream at the * conclusion of each data block, which leads to an FS I/O flush). Further, due * to the internal caching in Compression codec, the smallest possible block * size would be around 20KB-30KB. * <li>The current implementation does not offer true multi-threading for * reading. The implementation uses FSDataInputStream seek()+read(), which is * shown to be much faster than positioned-read call in single thread mode. * However, it also means that if multiple threads attempt to access the same * HFile (using multiple scanners) simultaneously, the actual I/O is carried out * sequentially even if they access different DFS blocks (Reexamine! pread seems * to be 10% faster than seek+read in my testing -- stack). * <li>Compression codec. Use "none" if the data is not very compressable (by * compressable, I mean a compression ratio at least 2:1). Generally, use "lzo" * as the starting point for experimenting. "gz" overs slightly better * compression ratio over "lzo" but requires 4x CPU to compress and 2x CPU to * decompress, comparing to "lzo". * </ul> * * For more on the background behind HFile, see <a * href=https://issues.apache.org/jira/browse/HBASE-61>HBASE-61</a>. * <p> * File is made of data blocks followed by meta data blocks (if any), a fileinfo * block, data block index, meta data block index, and a fixed size trailer * which records the offsets at which file changes content type. * <pre>&lt;data blocks>&lt;meta blocks>&lt;fileinfo>&lt;data index>&lt;meta index>&lt;trailer></pre> * Each block has a bit of magic at its start. Block are comprised of * key/values. In data blocks, they are both byte arrays. Metadata blocks are * a String key and a byte array value. An empty file looks like this: * <pre>&lt;fileinfo>&lt;trailer></pre>. That is, there are not data nor meta * blocks present. * <p> * TODO: Do scanners need to be able to take a start and end row? * TODO: Should BlockIndex know the name of its file? Should it have a Path * that points at its file say for the case where an index lives apart from * an HFile instance? */ @InterfaceAudience.Private public class HFile { // LOG is being used in HFileBlock and CheckSumUtil static final Log LOG = LogFactory.getLog(HFile.class); /** * Maximum length of key in HFile. */ public final static int MAXIMUM_KEY_LENGTH = Integer.MAX_VALUE; /** * Default compression: none. */ public final static Compression.Algorithm DEFAULT_COMPRESSION_ALGORITHM = Compression.Algorithm.NONE; /** Minimum supported HFile format version */ public static final int MIN_FORMAT_VERSION = 2; /** Maximum supported HFile format version */ public static final int MAX_FORMAT_VERSION = 3; /** * Minimum HFile format version with support for persisting cell tags */ public static final int MIN_FORMAT_VERSION_WITH_TAGS = 3; /** Default compression name: none. */ public final static String DEFAULT_COMPRESSION = DEFAULT_COMPRESSION_ALGORITHM.getName(); /** Meta data block name for bloom filter bits. */ public static final String BLOOM_FILTER_DATA_KEY = "BLOOM_FILTER_DATA"; /** * We assume that HFile path ends with * ROOT_DIR/TABLE_NAME/REGION_NAME/CF_NAME/HFILE, so it has at least this * many levels of nesting. This is needed for identifying table and CF name * from an HFile path. */ public final static int MIN_NUM_HFILE_PATH_LEVELS = 5; /** * The number of bytes per checksum. */ public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; // TODO: This define is done in three places. Fix. public static final ChecksumType DEFAULT_CHECKSUM_TYPE = ChecksumType.CRC32; // For measuring number of checksum failures static final AtomicLong checksumFailures = new AtomicLong(); // for test purpose public static final AtomicLong dataBlockReadCnt = new AtomicLong(0); /** * Number of checksum verification failures. It also * clears the counter. */ public static final long getChecksumFailuresCount() { return checksumFailures.getAndSet(0); } /** API required to write an {@link HFile} */ public interface Writer extends Closeable { /** Max memstore (mvcc) timestamp in FileInfo */ public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY"); /** Add an element to the file info map. */ void appendFileInfo(byte[] key, byte[] value) throws IOException; void append(Cell cell) throws IOException; /** @return the path to this {@link HFile} */ Path getPath(); /** * Adds an inline block writer such as a multi-level block index writer or * a compound Bloom filter writer. */ void addInlineBlockWriter(InlineBlockWriter bloomWriter); // The below three methods take Writables. We'd like to undo Writables but undoing the below would be pretty // painful. Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need // to map between Message and Writable or byte [] and current Writable serialization. This would be a bit of work // to little gain. Thats my thinking at moment. St.Ack 20121129 void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter); /** * Store general Bloom filter in the file. This does not deal with Bloom filter * internals but is necessary, since Bloom filters are stored differently * in HFile version 1 and version 2. */ void addGeneralBloomFilter(BloomFilterWriter bfw); /** * Store delete family Bloom filter in the file, which is only supported in * HFile V2. */ void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException; /** * Return the file context for the HFile this writer belongs to */ HFileContext getFileContext(); } /** * This variety of ways to construct writers is used throughout the code, and * we want to be able to swap writer implementations. */ public static abstract class WriterFactory { protected final Configuration conf; protected final CacheConfig cacheConf; protected FileSystem fs; protected Path path; protected FSDataOutputStream ostream; protected CellComparator comparator = CellComparator.COMPARATOR; protected InetSocketAddress[] favoredNodes; private HFileContext fileContext; WriterFactory(Configuration conf, CacheConfig cacheConf) { this.conf = conf; this.cacheConf = cacheConf; } public WriterFactory withPath(FileSystem fs, Path path) { Preconditions.checkNotNull(fs); Preconditions.checkNotNull(path); this.fs = fs; this.path = path; return this; } public WriterFactory withOutputStream(FSDataOutputStream ostream) { Preconditions.checkNotNull(ostream); this.ostream = ostream; return this; } public WriterFactory withComparator(CellComparator comparator) { Preconditions.checkNotNull(comparator); this.comparator = comparator; return this; } public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) { // Deliberately not checking for null here. this.favoredNodes = favoredNodes; return this; } public WriterFactory withFileContext(HFileContext fileContext) { this.fileContext = fileContext; return this; } public Writer create() throws IOException { if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) { throw new AssertionError("Please specify exactly one of " + "filesystem/path or path"); } if (path != null) { ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes); } return createWriter(fs, path, ostream, comparator, fileContext); } protected abstract Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream, CellComparator comparator, HFileContext fileContext) throws IOException; } /** The configuration key for HFile version to use for new files */ public static final String FORMAT_VERSION_KEY = "hfile.format.version"; public static int getFormatVersion(Configuration conf) { int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION); checkFormatVersion(version); return version; } /** * Returns the factory to be used to create {@link HFile} writers. * Disables block cache access for all writers created through the * returned factory. */ public static final WriterFactory getWriterFactoryNoCache(Configuration conf) { Configuration tempConf = new Configuration(conf); tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f); return HFile.getWriterFactory(conf, new CacheConfig(tempConf)); } /** * Returns the factory to be used to create {@link HFile} writers */ public static final WriterFactory getWriterFactory(Configuration conf, CacheConfig cacheConf) { int version = getFormatVersion(conf); switch (version) { case 2: throw new IllegalArgumentException("This should never happen. " + "Did you change hfile.format.version to read v2? This version of the software writes v3" + " hfiles only (but it can read v2 files without having to update hfile.format.version " + "in hbase-site.xml)"); case 3: return new HFileWriterFactory(conf, cacheConf); default: throw new IllegalArgumentException("Cannot create writer for HFile " + "format version " + version); } } /** * An abstraction used by the block index. * Implementations will check cache for any asked-for block and return cached block if found. * Otherwise, after reading from fs, will try and put block into cache before returning. */ public interface CachingBlockReader { /** * Read in a file block. * @param offset offset to read. * @param onDiskBlockSize size of the block * @param cacheBlock * @param pread * @param isCompaction is this block being read as part of a compaction * @param expectedBlockType the block type we are expecting to read with this read operation, * or null to read whatever block type is available and avoid checking (that might reduce * caching efficiency of encoded data blocks) * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks * to be in, or null to not perform this check and return the block irrespective of the * encoding. This check only applies to data blocks and can be set to null when the caller is * expecting to read a non-data block and has set expectedBlockType accordingly. * @return Block wrapped in a ByteBuffer. * @throws IOException */ HFileBlock readBlock(long offset, long onDiskBlockSize, boolean cacheBlock, final boolean pread, final boolean isCompaction, final boolean updateCacheMetrics, BlockType expectedBlockType, DataBlockEncoding expectedDataBlockEncoding) throws IOException; } /** An interface used by clients to open and iterate an {@link HFile}. */ public interface Reader extends Closeable, CachingBlockReader { /** * Returns this reader's "name". Usually the last component of the path. * Needs to be constant as the file is being moved to support caching on * write. */ String getName(); CellComparator getComparator(); HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction); ByteBuffer getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException; Map<byte[], byte[]> loadFileInfo() throws IOException; byte[] getLastKey(); byte[] midkey() throws IOException; long length(); long getEntries(); byte[] getFirstKey(); long indexSize(); byte[] getFirstRowKey(); byte[] getLastRowKey(); FixedFileTrailer getTrailer(); HFileBlockIndex.BlockIndexReader getDataBlockIndexReader(); HFileScanner getScanner(boolean cacheBlocks, boolean pread); Compression.Algorithm getCompressionAlgorithm(); /** * Retrieves general Bloom filter metadata as appropriate for each * {@link HFile} version. * Knows nothing about how that metadata is structured. */ DataInput getGeneralBloomFilterMetadata() throws IOException; /** * Retrieves delete family Bloom filter metadata as appropriate for each * {@link HFile} version. * Knows nothing about how that metadata is structured. */ DataInput getDeleteBloomFilterMetadata() throws IOException; Path getPath(); /** Close method with optional evictOnClose */ void close(boolean evictOnClose) throws IOException; DataBlockEncoding getDataBlockEncoding(); boolean hasMVCCInfo(); /** * Return the file context of the HFile this reader belongs to */ HFileContext getFileContext(); boolean shouldIncludeMemstoreTS(); boolean isDecodeMemstoreTS(); DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction); @VisibleForTesting HFileBlock.FSReader getUncachedBlockReader(); @VisibleForTesting boolean prefetchComplete(); } /** * Method returns the reader given the specified arguments. * TODO This is a bad abstraction. See HBASE-6635. * * @param path hfile's path * @param fsdis stream of path's file * @param size max size of the trailer. * @param cacheConf Cache configuation values, cannot be null. * @param hfs * @return an appropriate instance of HFileReader * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException */ private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException { FixedFileTrailer trailer = null; try { boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum(); assert !isHBaseChecksum; // Initially we must read with FS checksum. trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size); switch (trailer.getMajorVersion()) { case 2: LOG.debug("Opening HFile v2 with v3 reader"); // Fall through. case 3 : return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf); default: throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion()); } } catch (Throwable t) { try { fsdis.close(); } catch (Throwable t2) { LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2); } throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t); } } /** * @param fs A file system * @param path Path to HFile * @param fsdis a stream of path's file * @param size max size of the trailer. * @param cacheConf Cache configuration for hfile's contents * @param conf Configuration * @return A version specific Hfile Reader * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException */ @SuppressWarnings("resource") public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf) throws IOException { HFileSystem hfs = null; // If the fs is not an instance of HFileSystem, then create an // instance of HFileSystem that wraps over the specified fs. // In this case, we will not be able to avoid checksumming inside // the filesystem. if (!(fs instanceof HFileSystem)) { hfs = new HFileSystem(fs); } else { hfs = (HFileSystem)fs; } return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf); } /** * * @param fs filesystem * @param path Path to file to read * @param cacheConf This must not be null. @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)} * @return an active Reader instance * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid. */ public static Reader createReader( FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException { Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf"); FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path); return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(), cacheConf, stream.getHfs(), conf); } /** * This factory method is used only by unit tests */ static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf) throws IOException { FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis); return pickReaderVersion(path, wrapper, size, cacheConf, null, conf); } /** * Returns true if the specified file has a valid HFile Trailer. * @param fs filesystem * @param path Path to file to verify * @return true if the file has a valid HFile Trailer, otherwise false * @throws IOException if failed to read from the underlying stream */ public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException { return isHFileFormat(fs, fs.getFileStatus(path)); } /** * Returns true if the specified file has a valid HFile Trailer. * @param fs filesystem * @param fileStatus the file to verify * @return true if the file has a valid HFile Trailer, otherwise false * @throws IOException if failed to read from the underlying stream */ public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus) throws IOException { final Path path = fileStatus.getPath(); final long size = fileStatus.getLen(); FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path); try { boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum(); assert !isHBaseChecksum; // Initially we must read with FS checksum. FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size); return true; } catch (IllegalArgumentException e) { return false; } catch (IOException e) { throw e; } finally { try { fsdis.close(); } catch (Throwable t) { LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t); } } } /** * Metadata for this file. Conjured by the writer. Read in by the reader. */ public static class FileInfo implements SortedMap<byte[], byte[]> { static final String RESERVED_PREFIX = "hfile."; static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX); static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY"); static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN"); static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN"); static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS"); static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR"); static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED"); public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN"); private final SortedMap<byte [], byte []> map = new TreeMap<byte [], byte []>(Bytes.BYTES_COMPARATOR); public FileInfo() { super(); } /** * Append the given key/value pair to the file info, optionally checking the * key prefix. * * @param k key to add * @param v value to add * @param checkPrefix whether to check that the provided key does not start * with the reserved prefix * @return this file info object * @throws IOException if the key or value is invalid */ public FileInfo append(final byte[] k, final byte[] v, final boolean checkPrefix) throws IOException { if (k == null || v == null) { throw new NullPointerException("Key nor value may be null"); } if (checkPrefix && isReservedFileInfoKey(k)) { throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX + " are reserved"); } put(k, v); return this; } public void clear() { this.map.clear(); } public Comparator<? super byte[]> comparator() { return map.comparator(); } public boolean containsKey(Object key) { return map.containsKey(key); } public boolean containsValue(Object value) { return map.containsValue(value); } public Set<java.util.Map.Entry<byte[], byte[]>> entrySet() { return map.entrySet(); } public boolean equals(Object o) { return map.equals(o); } public byte[] firstKey() { return map.firstKey(); } public byte[] get(Object key) { return map.get(key); } public int hashCode() { return map.hashCode(); } public SortedMap<byte[], byte[]> headMap(byte[] toKey) { return this.map.headMap(toKey); } public boolean isEmpty() { return map.isEmpty(); } public Set<byte[]> keySet() { return map.keySet(); } public byte[] lastKey() { return map.lastKey(); } public byte[] put(byte[] key, byte[] value) { return this.map.put(key, value); } public void putAll(Map<? extends byte[], ? extends byte[]> m) { this.map.putAll(m); } public byte[] remove(Object key) { return this.map.remove(key); } public int size() { return map.size(); } public SortedMap<byte[], byte[]> subMap(byte[] fromKey, byte[] toKey) { return this.map.subMap(fromKey, toKey); } public SortedMap<byte[], byte[]> tailMap(byte[] fromKey) { return this.map.tailMap(fromKey); } public Collection<byte[]> values() { return map.values(); } /** * Write out this instance on the passed in <code>out</code> stream. * We write it as a protobuf. * @param out * @throws IOException * @see #read(DataInputStream) */ void write(final DataOutputStream out) throws IOException { HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder(); for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) { HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder(); bbpBuilder.setFirst(ByteStringer.wrap(e.getKey())); bbpBuilder.setSecond(ByteStringer.wrap(e.getValue())); builder.addMapEntry(bbpBuilder.build()); } out.write(ProtobufMagic.PB_MAGIC); builder.build().writeDelimitedTo(out); } /** * Populate this instance with what we find on the passed in <code>in</code> stream. * Can deserialize protobuf of old Writables format. * @param in * @throws IOException * @see #write(DataOutputStream) */ void read(final DataInputStream in) throws IOException { // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code. int pblen = ProtobufUtil.lengthOfPBMagic(); byte [] pbuf = new byte[pblen]; if (in.markSupported()) in.mark(pblen); int read = in.read(pbuf); if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen); if (ProtobufUtil.isPBMagicPrefix(pbuf)) { parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in)); } else { if (in.markSupported()) { in.reset(); parseWritable(in); } else { // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS ByteArrayInputStream bais = new ByteArrayInputStream(pbuf); SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams // TODO: Am I leaking anything here wrapping the passed in stream? We are not calling close on the wrapped // streams but they should be let go after we leave this context? I see that we keep a reference to the // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok. parseWritable(new DataInputStream(sis)); } } } /** Now parse the old Writable format. It was a list of Map entries. Each map entry was a key and a value of * a byte []. The old map format had a byte before each entry that held a code which was short for the key or * value type. We know it was a byte [] so in below we just read and dump it. * @throws IOException */ void parseWritable(final DataInputStream in) throws IOException { // First clear the map. Otherwise we will just accumulate entries every time this method is called. this.map.clear(); // Read the number of entries in the map int entries = in.readInt(); // Then read each key/value pair for (int i = 0; i < entries; i++) { byte [] key = Bytes.readByteArray(in); // We used to read a byte that encoded the class type. Read and ignore it because it is always byte [] in hfile in.readByte(); byte [] value = Bytes.readByteArray(in); this.map.put(key, value); } } /** * Fill our map with content of the pb we read off disk * @param fip protobuf message to read */ void parsePB(final HFileProtos.FileInfoProto fip) { this.map.clear(); for (BytesBytesPair pair: fip.getMapEntryList()) { this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray()); } } } /** Return true if the given file info key is reserved for internal use. */ public static boolean isReservedFileInfoKey(byte[] key) { return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES); } /** * Get names of supported compression algorithms. The names are acceptable by * HFile.Writer. * * @return Array of strings, each represents a supported compression * algorithm. Currently, the following compression algorithms are * supported. * <ul> * <li>"none" - No compression. * <li>"gz" - GZIP compression. * </ul> */ public static String[] getSupportedCompressionAlgorithms() { return Compression.getSupportedAlgorithms(); } // Utility methods. /* * @param l Long to convert to an int. * @return <code>l</code> cast as an int. */ static int longToInt(final long l) { // Expecting the size() of a block not exceeding 4GB. Assuming the // size() will wrap to negative integer if it exceeds 2GB (From tfile). return (int)(l & 0x00000000ffffffffL); } /** * Returns all HFiles belonging to the given region directory. Could return an * empty list. * * @param fs The file system reference. * @param regionDir The region directory to scan. * @return The list of files found. * @throws IOException When scanning the files fails. */ static List<Path> getStoreFiles(FileSystem fs, Path regionDir) throws IOException { List<Path> regionHFiles = new ArrayList<Path>(); PathFilter dirFilter = new FSUtils.DirFilter(fs); FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter); for(FileStatus dir : familyDirs) { FileStatus[] files = fs.listStatus(dir.getPath()); for (FileStatus file : files) { if (!file.isDirectory() && (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) && (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) { regionHFiles.add(file.getPath()); } } } return regionHFiles; } /** * Checks the given {@link HFile} format version, and throws an exception if * invalid. Note that if the version number comes from an input file and has * not been verified, the caller needs to re-throw an {@link IOException} to * indicate that this is not a software error, but corrupted input. * * @param version an HFile version * @throws IllegalArgumentException if the version is invalid */ public static void checkFormatVersion(int version) throws IllegalArgumentException { if (version < MIN_FORMAT_VERSION || version > MAX_FORMAT_VERSION) { throw new IllegalArgumentException("Invalid HFile version: " + version + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and " + MAX_FORMAT_VERSION + ")"); } } public static void checkHFileVersion(final Configuration c) { int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION); if (version < MAX_FORMAT_VERSION || version > MAX_FORMAT_VERSION) { throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY + " (in your hbase-*.xml files) is " + version + " which does not match " + MAX_FORMAT_VERSION + "; are you running with a configuration from an older or newer hbase install (an " + "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?"); } } public static void main(String[] args) throws Exception { // delegate to preserve old behavior HFilePrettyPrinter.main(args); } }
/* * @(#)Arrays.java 1.59 04/04/01 * * Copyright 2004 Sun Microsystems, Inc. All rights reserved. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ package wise.java15.util; /** * This class contains various methods for manipulating arrays (such as * sorting and searching). This class also contains a static factory * that allows arrays to be viewed as lists. * * <p>The methods in this class all throw a <tt>NullPointerException</tt> if * the specified array reference is null, except where noted. * * <p>The documentation for the methods contained in this class includes * briefs description of the <i>implementations</i>. Such descriptions should * be regarded as <i>implementation notes</i>, rather than parts of the * <i>specification</i>. Implementors should feel free to substitute other * algorithms, so long as the specification itself is adhered to. (For * example, the algorithm used by <tt>sort(Object[])</tt> does not have to be * a mergesort, but it does have to be <i>stable</i>.) * * <p>This class is a member of the * <a href="{@docRoot}/../guide/collections/index.html"> * Java Collections Framework</a>. * * @author Josh Bloch * @author Neal Gafter * @version 1.59, 04/01/04 * @see Comparable * @see Comparator * @since 1.2 */ public class Arrays { // Suppresses default constructor, ensuring non-instantiability. private Arrays() { } // Sorting /** * Sorts the specified array of ints into ascending numerical order. * The sorting algorithm is a tuned quicksort, adapted from Jon * L. Bentley and M. Douglas McIlroy's "Engineering a Sort Function", * Software-Practice and Experience, Vol. 23(11) P. 1249-1265 (November * 1993). This algorithm offers n*log(n) performance on many data sets * that cause other quicksorts to degrade to quadratic performance. * * @param a the array to be sorted. */ public static void sort(int[] a) { sort1(a, 0, a.length); } /** * Sorts the specified sub-array of integers into ascending order. */ private static void sort1(int x[], int off, int len) { // Insertion sort on smallest arrays if (len < 7) { for (int i=off; i<len+off; i++) for (int j=i; j>off && x[j-1]>x[j]; j--) swap(x, j, j-1); return; } // Choose a partition element, v int m = off + (len >> 1); // Small arrays, middle element if (len > 7) { int l = off; int n = off + len - 1; /* if (len > 40) { // Big arrays, pseudomedian of 9 int s = len/8; l = med3(x, l, l+s, l+2*s); m = med3(x, m-s, m, m+s); n = med3(x, n-2*s, n-s, n); } */ m = med3(x, l, m, n); // Mid-size, med of 3 } int v = x[m]; // Establish Invariant: v* (<v)* (>v)* v* int a = off, b = a, c = off + len - 1, d = c; while(true) { while (b <= c && x[b] <= v) { if (x[b] == v) swap(x, a++, b); b++; } while (c >= b && x[c] >= v) { if (x[c] == v) swap(x, c, d--); c--; } if (b > c) break; swap(x, b++, c--); } // Swap partition elements back to middle int s, n = off + len; s = Math.min(a-off, b-a ); vecswap(x, off, b-s, s); s = Math.min(d-c, n-d-1); vecswap(x, b, n-s, s); // Recursively sort non-partition-elements if ((s = b-a) > 1) sort1(x, off, s); if ((s = d-c) > 1) sort1(x, n-s, s); } /** * Swaps x[a] with x[b]. */ private static void swap(int x[], int a, int b) { int t = x[a]; x[a] = x[b]; x[b] = t; } /** * Swaps x[a .. (a+n-1)] with x[b .. (b+n-1)]. */ private static void vecswap(int x[], int a, int b, int n) { for (int i=0; i<n; i++, a++, b++) swap(x, a, b); } /** * Returns the index of the median of the three indexed integers. */ private static int med3(int x[], int a, int b, int c) { return (x[a] < x[b] ? (x[b] < x[c] ? b : x[a] < x[c] ? c : a) : (x[b] > x[c] ? b : x[a] > x[c] ? c : a)); } /** * Sorts the specified array of objects into ascending order, according to * the <i>natural ordering</i> of its elements. All elements in the array * must implement the <tt>Comparable</tt> interface. Furthermore, all * elements in the array must be <i>mutually comparable</i> (that is, * <tt>e1.compareTo(e2)</tt> must not throw a <tt>ClassCastException</tt> * for any elements <tt>e1</tt> and <tt>e2</tt> in the array).<p> * * This sort is guaranteed to be <i>stable</i>: equal elements will * not be reordered as a result of the sort.<p> * * The sorting algorithm is a modified mergesort (in which the merge is * omitted if the highest element in the low sublist is less than the * lowest element in the high sublist). This algorithm offers guaranteed * n*log(n) performance. * * @param a the array to be sorted. * @throws ClassCastException if the array contains elements that are not * <i>mutually comparable</i> (for example, strings and integers). * @see Comparable */ public static void sort(Object[] a) { Object[] aux = (Object[])a.clone(); mergeSort(aux, a, 0, a.length, 0); } /** * Tuning parameter: list size at or below which insertion sort will be * used in preference to mergesort or quicksort. */ private static final int INSERTIONSORT_THRESHOLD = 7; /** * Src is the source array that starts at index 0 * Dest is the (possibly larger) array destination with a possible offset * low is the index in dest to start sorting * high is the end index in dest to end sorting * off is the offset to generate corresponding low, high in src */ private static void mergeSort(Object[] src, Object[] dest, int low, int high, int off) { int length = high - low; // Insertion sort on smallest arrays if (length < INSERTIONSORT_THRESHOLD) { for (int i=low; i<high; i++) for (int j=i; j>low && ((Comparable) dest[j-1]).compareTo(dest[j])>0; j--) swap(dest, j, j-1); return; } // Recursively sort halves of dest into src int destLow = low; int destHigh = high; low += off; high += off; int mid = (low + high) >> 1; mergeSort(dest, src, low, mid, -off); mergeSort(dest, src, mid, high, -off); // If list is already sorted, just copy from src to dest. This is an // optimization that results in faster sorts for nearly ordered lists. if (((Comparable)src[mid-1]).compareTo(src[mid]) <= 0) { System.arraycopy(src, low, dest, destLow, length); return; } // Merge sorted halves (now in src) into dest for(int i = destLow, p = low, q = mid; i < destHigh; i++) { if (q >= high || p < mid && ((Comparable)src[p]).compareTo(src[q])<=0) dest[i] = src[p++]; else dest[i] = src[q++]; } } /** * Swaps x[a] with x[b]. */ private static void swap(Object[] x, int a, int b) { Object t = x[a]; x[a] = x[b]; x[b] = t; } }
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.segmentrouting; import org.onlab.packet.ARP; import org.onlab.packet.Ethernet; import org.onlab.packet.Ip4Address; import org.onlab.packet.IpAddress; import org.onlab.packet.IpPrefix; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.net.neighbour.NeighbourMessageContext; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DeviceId; import org.onosproject.net.host.HostService; import org.onosproject.segmentrouting.config.DeviceConfigNotFoundException; import org.onosproject.segmentrouting.config.SegmentRoutingAppConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import java.util.stream.Collectors; import static org.onosproject.net.neighbour.NeighbourMessageType.REQUEST; /** * Handler of ARP packets that responses or forwards ARP packets that * are sent to the controller. */ public class ArpHandler extends SegmentRoutingNeighbourHandler { private static Logger log = LoggerFactory.getLogger(ArpHandler.class); /** * Creates an ArpHandler object. * * @param srManager SegmentRoutingManager object */ public ArpHandler(SegmentRoutingManager srManager) { super(srManager); } /** * Processes incoming ARP packets. * * If it is an ARP request to router itself or known hosts, * then it sends ARP response. * If it is an ARP request to unknown hosts in its own subnet, * then it flood the ARP request to the ports. * If it is an ARP response, then set a flow rule for the host * and forward any IP packets to the host in the packet buffer to the host. * <p> * Note: We handles all ARP packet in, even for those ARP packets between * hosts in the same subnet. * For an ARP packet with broadcast destination MAC, * some switches pipelines will send it to the controller due to table miss, * other switches will flood the packets directly in the data plane without * packet in. * We can deal with both cases. * * @param pkt incoming ARP packet and context information * @param hostService the host service */ public void processPacketIn(NeighbourMessageContext pkt, HostService hostService) { SegmentRoutingAppConfig appConfig = srManager.cfgService .getConfig(srManager.appId, SegmentRoutingAppConfig.class); if (appConfig != null && appConfig.suppressSubnet().contains(pkt.inPort())) { // Ignore ARP packets come from suppressed ports pkt.drop(); return; } if (!validateArpSpa(pkt)) { log.debug("Ignore ARP packet discovered on {} with unexpected src protocol address {}.", pkt.inPort(), pkt.sender().getIp4Address()); pkt.drop(); return; } if (pkt.type() == REQUEST) { handleArpRequest(pkt, hostService); } else { handleArpReply(pkt, hostService); } } private void handleArpRequest(NeighbourMessageContext pkt, HostService hostService) { // ARP request for router. Send ARP reply. if (isArpForRouter(pkt)) { MacAddress targetMac = config.getRouterMacForAGatewayIp(pkt.target().getIp4Address()); sendResponse(pkt, targetMac, hostService); } else { // NOTE: Ignore ARP packets except those target for the router // We will reconsider enabling this when we have host learning support /* Set<Host> hosts = hostService.getHostsByIp(pkt.target()); if (hosts.size() > 1) { log.warn("More than one host with the same ip {}", pkt.target()); } Host targetHost = hosts.stream().findFirst().orElse(null); // ARP request for known hosts. Send proxy ARP reply on behalf of the target. if (targetHost != null) { pkt.forward(targetHost.location()); // ARP request for unknown host in the subnet. Flood in the subnet. } else { flood(pkt); } */ } } private void handleArpReply(NeighbourMessageContext pkt, HostService hostService) { // ARP reply for router. Process all pending IP packets. if (isArpForRouter(pkt)) { Ip4Address hostIpAddress = pkt.sender().getIp4Address(); srManager.ipHandler.forwardPackets(pkt.inPort().deviceId(), hostIpAddress); } else { // NOTE: Ignore ARP packets except those target for the router // We will reconsider enabling this when we have host learning support /* HostId targetHostId = HostId.hostId(pkt.dstMac(), pkt.vlan()); Host targetHost = hostService.getHost(targetHostId); // ARP reply for known hosts. Forward to the host. if (targetHost != null) { pkt.forward(targetHost.location()); // ARP reply for unknown host, Flood in the subnet. } else { // Don't flood to non-edge ports if (pkt.vlan().equals(SegmentRoutingManager.INTERNAL_VLAN)) { return; } flood(pkt); } */ } } /** * Check if the source protocol address of an ARP packet belongs to the same * subnet configured on the port it is seen. * * @param pkt ARP packet and context information * @return true if the source protocol address belongs to the configured subnet */ private boolean validateArpSpa(NeighbourMessageContext pkt) { Ip4Address spa = pkt.sender().getIp4Address(); Set<IpPrefix> subnet = config.getPortSubnets(pkt.inPort().deviceId(), pkt.inPort().port()) .stream() .filter(ipPrefix -> ipPrefix.isIp4() && ipPrefix.contains(spa)) .collect(Collectors.toSet()); return !subnet.isEmpty(); } private boolean isArpForRouter(NeighbourMessageContext pkt) { Ip4Address targetProtocolAddress = pkt.target().getIp4Address(); Set<IpAddress> gatewayIpAddresses = null; try { if (targetProtocolAddress.equals(config.getRouterIpv4(pkt.inPort().deviceId()))) { return true; } gatewayIpAddresses = config.getPortIPs(pkt.inPort().deviceId()); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + " Aborting check for router IP in processing arp"); } if (gatewayIpAddresses != null && gatewayIpAddresses.contains(targetProtocolAddress)) { return true; } return false; } /** * Sends an APR request for the target IP address to all ports except in-port. * * @param deviceId Switch device ID * @param targetAddress target IP address for ARP * @param inPort in-port */ public void sendArpRequest(DeviceId deviceId, IpAddress targetAddress, ConnectPoint inPort) { byte[] senderMacAddress = new byte[MacAddress.MAC_ADDRESS_LENGTH]; byte[] senderIpAddress = new byte[Ip4Address.BYTE_LENGTH]; /* * Retrieves device info. */ if (!getSenderInfo(senderMacAddress, senderIpAddress, deviceId, targetAddress)) { log.warn("Aborting sendArpRequest, we cannot get all the information needed"); return; } /* * Creates the request. */ Ethernet arpRequest = ARP.buildArpRequest( senderMacAddress, senderIpAddress, targetAddress.toOctets(), VlanId.NO_VID ); flood(arpRequest, inPort, targetAddress); } }
package org.basex.query.value.type; import org.basex.query.*; import org.basex.query.value.*; import org.basex.query.value.item.*; import org.basex.util.*; /** * XQuery item types. * * @author BaseX Team 2005-15, BSD License * @author Christian Gruen */ public interface Type { /** Type IDs for client/server communication. */ enum ID { // function types /** function(*). */ FUN(7), // node types /** node(). */ NOD(8), /** text(). */ TXT(9), /** processing-instruction(). */ PI(10), /** element(). */ ELM(11), /** document-node(). */ DOC(12), /** document-node(element()). */ DEL(13), /** attribute(). */ ATT(14), /** comment(). */ COM(15), /** namespace-node(). */ NSP(16), /** schema-element(). */ SCE(17), /** schema-attribute(). */ SCA(18), // item type /** item(). */ ITEM(32), // atomic types /** xs:untyped. */ UTY(33), /** xs:anyType. */ ATY(34), /** xs:anySimpleType. */ AST(35), /** xs:anyAtomicType. */ AAT(36), /** xs:untypedAtomic. */ ATM(37), /** xs:string. */ STR(38), /** xs:normalizedString. */ NST(39), /** xs:token. */ TOK(40), /** xs:language. */ LAN(41), /** xs:NMTOKEN. */ NMT(42), /** xs:Name. */ NAM(43), /** xs:NCName. */ NCN(44), /** xs:ID. */ ID(45), /** xs:IDREF. */ IDR(46), /** xs:ENTITY. */ ENT(47), /** xs:float. */ FLT(48), /** xs:double. */ DBL(49), /** xs:decimal. */ DEC(50), /** precisionDecimal(). */ PDC(51), /** xs:integer. */ ITR(52), /** xs:nonPositiveInteger. */ NPI(53), /** xs:negativeInteger. */ NIN(54), /** xs:long. */ LNG(55), /** xs:int. */ INT(56), /** xs:short. */ SHR(57), /** xs:byte. */ BYT(58), /** xs:nonNegativeInteger. */ NNI(59), /** xs:unsignedLong. */ ULN(60), /** xs:unsignedInt. */ UIN(61), /** xs:unsignedShort. */ USH(62), /** xs:unsignedByte. */ UBY(63), /** xs:positiveInteger. */ PIN(64), /** xs:duration. */ DUR(65), /** xs:yearMonthDuration. */ YMD(66), /** xs:dayTimeDuration. */ DTD(67), /** xs:dateTime. */ DTM(68), /** dateTimeStamp(). */ DTS(69), /** xs:date. */ DAT(70), /** xs:time. */ TIM(71), /** xs:gYearMonth. */ YMO(72), /** xs:gYear. */ YEA(73), /** xs:gMonthDay. */ MDA(74), /** xs:gDay. */ DAY(75), /** xs:gMonth. */ MON(76), /** xs:boolean. */ BLN(77), /** binary(). */ BIN(78), /** xs:base64Binary. */ B64(79), /** xs:hexBinary. */ HEX(80), /** xs:anyURI. */ URI(81), /** xs:QName. */ QNM(82), /** xs:NOTATION. */ NOT(83), /** xs:numeric. */ NUM(84), /** java(). */ JAVA(86); /** Cached enums (faster). */ public static final ID[] VALUES = values(); /** Node ID. */ private final byte id; /** * Constructor. * @param id type id */ ID(final int id) { this.id = (byte) id; } /** * Returns the type ID as a byte. * @return type ID */ public byte asByte() { return id; } /** * Wraps the type ID in a byte array. * @return type ID */ public byte[] bytes() { return new byte[] { id }; } /** * Gets the ID for the given byte value. * @param b byte * @return type ID if found, {@code null} otherwise */ public static ID get(final byte b) { for(final ID i : VALUES) if(i.id == b) return i; return null; } /** * Gets the type instance for the given ID. * @param b type ID * @return corresponding type if found, {@code null} otherwise */ public static Type getType(final byte b) { final ID id = get(b); if(id == null) return null; if(id == FUN) return FuncType.ANY_FUN; final Type t = AtomType.getType(id); return t != null ? t : NodeType.getType(id); } } /** * Casts the specified item to this item type. * @param item item to be converted * @param qc query context * @param sc static context * @param ii input info * @return new item * @throws QueryException query exception */ Value cast(final Item item, final QueryContext qc, final StaticContext sc, final InputInfo ii) throws QueryException; /** * Casts the specified Java value to this item type. * @param value Java value * @param qc query context * @param sc static context * @param ii input info * @return new item * @throws QueryException query exception */ Value cast(final Object value, QueryContext qc, final StaticContext sc, final InputInfo ii) throws QueryException; /** * Casts the specified string to this item type. * @param value string object * @param qc query context * @param sc static context * @param ii input info * @return new item * @throws QueryException query exception */ Value castString(final String value, QueryContext qc, final StaticContext sc, final InputInfo ii) throws QueryException; /** * Returns a sequence type with this item type. * @return sequence type */ SeqType seqType(); // PUBLIC AND STATIC METHODS ================================================ /** * Checks if this type is equal to the given one. * @param type other type * @return {@code true} if both types are equal, {@code false} otherwise */ boolean eq(final Type type); /** * Checks if the current type is an instance of the specified type. * @param type type to be checked * @return result of check */ boolean instanceOf(final Type type); /** * Computes the union between this type and the given one, i.e. the least common * ancestor of both types in the type hierarchy. * @param type other type * @return union type */ Type union(final Type type); /** * Computes the intersection between this type and the given one, i.e. the least * specific type that is sub-type of both types. If no such type exists, {@code null} is * returned. * @param type other type * @return intersection type or {@code null} */ Type intersect(final Type type); /** * Checks if the type refers to a number. * @return result of check */ boolean isNumber(); /** * Checks if the type refers to an untyped item. * @return result of check */ boolean isUntyped(); /** * Checks if the type refers to a number or an untyped item. * @return result of check */ boolean isNumberOrUntyped(); /** * Checks if the type refers to a number or a string. * Returns if this item is untyped or a string. * @return result of check */ boolean isStringOrUntyped(); /** * Returns the string representation of this type. * @return name */ byte[] string(); /** * Returns a type id to differentiate all types. * @return id */ ID id(); /** * Checks if the type is namespace-sensitive. * @return result of check */ boolean nsSensitive(); @Override String toString(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io; import org.apache.commons.io.testtools.FileBasedTestCase; import org.apache.commons.io.testtools.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * This is used to test FilenameUtils for correctness. * * @see FilenameUtils */ public class FilenameUtilsTestCase extends FileBasedTestCase { private static final String SEP = "" + File.separatorChar; private static final boolean WINDOWS = File.separatorChar == '\\'; private final File testFile1; private final File testFile2; private final int testFile1Size; private final int testFile2Size; public FilenameUtilsTestCase() { testFile1 = new File(getTestDirectory(), "file1-test.txt"); testFile2 = new File(getTestDirectory(), "file1a-test.txt"); testFile1Size = (int) testFile1.length(); testFile2Size = (int) testFile2.length(); } /** * @see junit.framework.TestCase#setUp() */ @Before public void setUp() throws Exception { getTestDirectory(); if (!testFile1.getParentFile().exists()) { throw new IOException("Cannot create file " + testFile1 + " as the parent directory does not exist"); } try (final BufferedOutputStream output3 = new BufferedOutputStream(new FileOutputStream(testFile1))) { TestUtils.generateTestData(output3, (long) testFile1Size); } if (!testFile2.getParentFile().exists()) { throw new IOException("Cannot create file " + testFile2 + " as the parent directory does not exist"); } try (final BufferedOutputStream output2 = new BufferedOutputStream(new FileOutputStream(testFile2))) { TestUtils.generateTestData(output2, (long) testFile2Size); } FileUtils.deleteDirectory(getTestDirectory()); getTestDirectory(); if (!testFile1.getParentFile().exists()) { throw new IOException("Cannot create file " + testFile1 + " as the parent directory does not exist"); } try (final BufferedOutputStream output1 = new BufferedOutputStream(new FileOutputStream(testFile1))) { TestUtils.generateTestData(output1, (long) testFile1Size); } if (!testFile2.getParentFile().exists()) { throw new IOException("Cannot create file " + testFile2 + " as the parent directory does not exist"); } try (final BufferedOutputStream output = new BufferedOutputStream(new FileOutputStream(testFile2))) { TestUtils.generateTestData(output, (long) testFile2Size); } } /** * @see junit.framework.TestCase#tearDown() */ @After public void tearDown() throws Exception { FileUtils.deleteDirectory(getTestDirectory()); } //----------------------------------------------------------------------- @Test public void testNormalize() throws Exception { assertEquals(null, FilenameUtils.normalize(null)); assertEquals(null, FilenameUtils.normalize(":")); assertEquals(null, FilenameUtils.normalize("1:\\a\\b\\c.txt")); assertEquals(null, FilenameUtils.normalize("1:")); assertEquals(null, FilenameUtils.normalize("1:a")); assertEquals(null, FilenameUtils.normalize("\\\\\\a\\b\\c.txt")); assertEquals(null, FilenameUtils.normalize("\\\\a")); assertEquals("a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("a\\b/c.txt")); assertEquals("" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("\\a\\b/c.txt")); assertEquals("C:" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("C:\\a\\b/c.txt")); assertEquals("" + SEP + "" + SEP + "server" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("\\\\server\\a\\b/c.txt")); assertEquals("~" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("~\\a\\b/c.txt")); assertEquals("~user" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("~user\\a\\b/c.txt")); assertEquals("a" + SEP + "c", FilenameUtils.normalize("a/b/../c")); assertEquals("c", FilenameUtils.normalize("a/b/../../c")); assertEquals("c" + SEP, FilenameUtils.normalize("a/b/../../c/")); assertEquals(null, FilenameUtils.normalize("a/b/../../../c")); assertEquals("a" + SEP, FilenameUtils.normalize("a/b/..")); assertEquals("a" + SEP, FilenameUtils.normalize("a/b/../")); assertEquals("", FilenameUtils.normalize("a/b/../..")); assertEquals("", FilenameUtils.normalize("a/b/../../")); assertEquals(null, FilenameUtils.normalize("a/b/../../..")); assertEquals("a" + SEP + "d", FilenameUtils.normalize("a/b/../c/../d")); assertEquals("a" + SEP + "d" + SEP, FilenameUtils.normalize("a/b/../c/../d/")); assertEquals("a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("a/b//d")); assertEquals("a" + SEP + "b" + SEP, FilenameUtils.normalize("a/b/././.")); assertEquals("a" + SEP + "b" + SEP, FilenameUtils.normalize("a/b/./././")); assertEquals("a" + SEP, FilenameUtils.normalize("./a/")); assertEquals("a", FilenameUtils.normalize("./a")); assertEquals("", FilenameUtils.normalize("./")); assertEquals("", FilenameUtils.normalize(".")); assertEquals(null, FilenameUtils.normalize("../a")); assertEquals(null, FilenameUtils.normalize("..")); assertEquals("", FilenameUtils.normalize("")); assertEquals(SEP + "a", FilenameUtils.normalize("/a")); assertEquals(SEP + "a" + SEP, FilenameUtils.normalize("/a/")); assertEquals(SEP + "a" + SEP + "c", FilenameUtils.normalize("/a/b/../c")); assertEquals(SEP + "c", FilenameUtils.normalize("/a/b/../../c")); assertEquals(null, FilenameUtils.normalize("/a/b/../../../c")); assertEquals(SEP + "a" + SEP, FilenameUtils.normalize("/a/b/..")); assertEquals(SEP + "", FilenameUtils.normalize("/a/b/../..")); assertEquals(null, FilenameUtils.normalize("/a/b/../../..")); assertEquals(SEP + "a" + SEP + "d", FilenameUtils.normalize("/a/b/../c/../d")); assertEquals(SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("/a/b//d")); assertEquals(SEP + "a" + SEP + "b" + SEP, FilenameUtils.normalize("/a/b/././.")); assertEquals(SEP + "a", FilenameUtils.normalize("/./a")); assertEquals(SEP + "", FilenameUtils.normalize("/./")); assertEquals(SEP + "", FilenameUtils.normalize("/.")); assertEquals(null, FilenameUtils.normalize("/../a")); assertEquals(null, FilenameUtils.normalize("/..")); assertEquals(SEP + "", FilenameUtils.normalize("/")); assertEquals("~" + SEP + "a", FilenameUtils.normalize("~/a")); assertEquals("~" + SEP + "a" + SEP, FilenameUtils.normalize("~/a/")); assertEquals("~" + SEP + "a" + SEP + "c", FilenameUtils.normalize("~/a/b/../c")); assertEquals("~" + SEP + "c", FilenameUtils.normalize("~/a/b/../../c")); assertEquals(null, FilenameUtils.normalize("~/a/b/../../../c")); assertEquals("~" + SEP + "a" + SEP, FilenameUtils.normalize("~/a/b/..")); assertEquals("~" + SEP + "", FilenameUtils.normalize("~/a/b/../..")); assertEquals(null, FilenameUtils.normalize("~/a/b/../../..")); assertEquals("~" + SEP + "a" + SEP + "d", FilenameUtils.normalize("~/a/b/../c/../d")); assertEquals("~" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("~/a/b//d")); assertEquals("~" + SEP + "a" + SEP + "b" + SEP, FilenameUtils.normalize("~/a/b/././.")); assertEquals("~" + SEP + "a", FilenameUtils.normalize("~/./a")); assertEquals("~" + SEP, FilenameUtils.normalize("~/./")); assertEquals("~" + SEP, FilenameUtils.normalize("~/.")); assertEquals(null, FilenameUtils.normalize("~/../a")); assertEquals(null, FilenameUtils.normalize("~/..")); assertEquals("~" + SEP, FilenameUtils.normalize("~/")); assertEquals("~" + SEP, FilenameUtils.normalize("~")); assertEquals("~user" + SEP + "a", FilenameUtils.normalize("~user/a")); assertEquals("~user" + SEP + "a" + SEP, FilenameUtils.normalize("~user/a/")); assertEquals("~user" + SEP + "a" + SEP + "c", FilenameUtils.normalize("~user/a/b/../c")); assertEquals("~user" + SEP + "c", FilenameUtils.normalize("~user/a/b/../../c")); assertEquals(null, FilenameUtils.normalize("~user/a/b/../../../c")); assertEquals("~user" + SEP + "a" + SEP, FilenameUtils.normalize("~user/a/b/..")); assertEquals("~user" + SEP + "", FilenameUtils.normalize("~user/a/b/../..")); assertEquals(null, FilenameUtils.normalize("~user/a/b/../../..")); assertEquals("~user" + SEP + "a" + SEP + "d", FilenameUtils.normalize("~user/a/b/../c/../d")); assertEquals("~user" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("~user/a/b//d")); assertEquals("~user" + SEP + "a" + SEP + "b" + SEP, FilenameUtils.normalize("~user/a/b/././.")); assertEquals("~user" + SEP + "a", FilenameUtils.normalize("~user/./a")); assertEquals("~user" + SEP + "", FilenameUtils.normalize("~user/./")); assertEquals("~user" + SEP + "", FilenameUtils.normalize("~user/.")); assertEquals(null, FilenameUtils.normalize("~user/../a")); assertEquals(null, FilenameUtils.normalize("~user/..")); assertEquals("~user" + SEP, FilenameUtils.normalize("~user/")); assertEquals("~user" + SEP, FilenameUtils.normalize("~user")); assertEquals("C:" + SEP + "a", FilenameUtils.normalize("C:/a")); assertEquals("C:" + SEP + "a" + SEP, FilenameUtils.normalize("C:/a/")); assertEquals("C:" + SEP + "a" + SEP + "c", FilenameUtils.normalize("C:/a/b/../c")); assertEquals("C:" + SEP + "c", FilenameUtils.normalize("C:/a/b/../../c")); assertEquals(null, FilenameUtils.normalize("C:/a/b/../../../c")); assertEquals("C:" + SEP + "a" + SEP, FilenameUtils.normalize("C:/a/b/..")); assertEquals("C:" + SEP + "", FilenameUtils.normalize("C:/a/b/../..")); assertEquals(null, FilenameUtils.normalize("C:/a/b/../../..")); assertEquals("C:" + SEP + "a" + SEP + "d", FilenameUtils.normalize("C:/a/b/../c/../d")); assertEquals("C:" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("C:/a/b//d")); assertEquals("C:" + SEP + "a" + SEP + "b" + SEP, FilenameUtils.normalize("C:/a/b/././.")); assertEquals("C:" + SEP + "a", FilenameUtils.normalize("C:/./a")); assertEquals("C:" + SEP + "", FilenameUtils.normalize("C:/./")); assertEquals("C:" + SEP + "", FilenameUtils.normalize("C:/.")); assertEquals(null, FilenameUtils.normalize("C:/../a")); assertEquals(null, FilenameUtils.normalize("C:/..")); assertEquals("C:" + SEP + "", FilenameUtils.normalize("C:/")); assertEquals("C:" + "a", FilenameUtils.normalize("C:a")); assertEquals("C:" + "a" + SEP, FilenameUtils.normalize("C:a/")); assertEquals("C:" + "a" + SEP + "c", FilenameUtils.normalize("C:a/b/../c")); assertEquals("C:" + "c", FilenameUtils.normalize("C:a/b/../../c")); assertEquals(null, FilenameUtils.normalize("C:a/b/../../../c")); assertEquals("C:" + "a" + SEP, FilenameUtils.normalize("C:a/b/..")); assertEquals("C:" + "", FilenameUtils.normalize("C:a/b/../..")); assertEquals(null, FilenameUtils.normalize("C:a/b/../../..")); assertEquals("C:" + "a" + SEP + "d", FilenameUtils.normalize("C:a/b/../c/../d")); assertEquals("C:" + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("C:a/b//d")); assertEquals("C:" + "a" + SEP + "b" + SEP, FilenameUtils.normalize("C:a/b/././.")); assertEquals("C:" + "a", FilenameUtils.normalize("C:./a")); assertEquals("C:" + "", FilenameUtils.normalize("C:./")); assertEquals("C:" + "", FilenameUtils.normalize("C:.")); assertEquals(null, FilenameUtils.normalize("C:../a")); assertEquals(null, FilenameUtils.normalize("C:..")); assertEquals("C:" + "", FilenameUtils.normalize("C:")); assertEquals(SEP + SEP + "server" + SEP + "a", FilenameUtils.normalize("//server/a")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP, FilenameUtils.normalize("//server/a/")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "c", FilenameUtils.normalize("//server/a/b/../c")); assertEquals(SEP + SEP + "server" + SEP + "c", FilenameUtils.normalize("//server/a/b/../../c")); assertEquals(null, FilenameUtils.normalize("//server/a/b/../../../c")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP, FilenameUtils.normalize("//server/a/b/..")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalize("//server/a/b/../..")); assertEquals(null, FilenameUtils.normalize("//server/a/b/../../..")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "d", FilenameUtils.normalize("//server/a/b/../c/../d")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalize("//server/a/b//d")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "b" + SEP, FilenameUtils.normalize("//server/a/b/././.")); assertEquals(SEP + SEP + "server" + SEP + "a", FilenameUtils.normalize("//server/./a")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalize("//server/./")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalize("//server/.")); assertEquals(null, FilenameUtils.normalize("//server/../a")); assertEquals(null, FilenameUtils.normalize("//server/..")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalize("//server/")); } @Test public void testNormalize_with_nullbytes() throws Exception { try { assertEquals("a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("a\\b/c\u0000.txt")); } catch (IllegalArgumentException ignore) { } try { assertEquals("a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalize("\u0000a\\b/c.txt")); } catch (IllegalArgumentException ignore) { } } @Test public void testNormalizeUnixWin() throws Exception { // Normalize (Unix Separator) assertEquals("/a/c/", FilenameUtils.normalize("/a/b/../c/", true)); assertEquals("/a/c/", FilenameUtils.normalize("\\a\\b\\..\\c\\", true)); // Normalize (Windows Separator) assertEquals("\\a\\c\\", FilenameUtils.normalize("/a/b/../c/", false)); assertEquals("\\a\\c\\", FilenameUtils.normalize("\\a\\b\\..\\c\\", false)); } //----------------------------------------------------------------------- @Test public void testNormalizeNoEndSeparator() throws Exception { assertEquals(null, FilenameUtils.normalizeNoEndSeparator(null)); assertEquals(null, FilenameUtils.normalizeNoEndSeparator(":")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("1:\\a\\b\\c.txt")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("1:")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("1:a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("\\\\\\a\\b\\c.txt")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("\\\\a")); assertEquals("a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalizeNoEndSeparator("a\\b/c.txt")); assertEquals("" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalizeNoEndSeparator("\\a\\b/c.txt")); assertEquals("C:" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalizeNoEndSeparator("C:\\a\\b/c.txt")); assertEquals("" + SEP + "" + SEP + "server" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalizeNoEndSeparator("\\\\server\\a\\b/c.txt")); assertEquals("~" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalizeNoEndSeparator("~\\a\\b/c.txt")); assertEquals("~user" + SEP + "a" + SEP + "b" + SEP + "c.txt", FilenameUtils.normalizeNoEndSeparator("~user\\a\\b/c.txt")); assertEquals("a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("a/b/../c")); assertEquals("c", FilenameUtils.normalizeNoEndSeparator("a/b/../../c")); assertEquals("c", FilenameUtils.normalizeNoEndSeparator("a/b/../../c/")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("a/b/../../../c")); assertEquals("a", FilenameUtils.normalizeNoEndSeparator("a/b/..")); assertEquals("a", FilenameUtils.normalizeNoEndSeparator("a/b/../")); assertEquals("", FilenameUtils.normalizeNoEndSeparator("a/b/../..")); assertEquals("", FilenameUtils.normalizeNoEndSeparator("a/b/../../")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("a/b/../../..")); assertEquals("a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("a/b/../c/../d")); assertEquals("a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("a/b/../c/../d/")); assertEquals("a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("a/b//d")); assertEquals("a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("a/b/././.")); assertEquals("a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("a/b/./././")); assertEquals("a", FilenameUtils.normalizeNoEndSeparator("./a/")); assertEquals("a", FilenameUtils.normalizeNoEndSeparator("./a")); assertEquals("", FilenameUtils.normalizeNoEndSeparator("./")); assertEquals("", FilenameUtils.normalizeNoEndSeparator(".")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("..")); assertEquals("", FilenameUtils.normalizeNoEndSeparator("")); assertEquals(SEP + "a", FilenameUtils.normalizeNoEndSeparator("/a")); assertEquals(SEP + "a", FilenameUtils.normalizeNoEndSeparator("/a/")); assertEquals(SEP + "a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("/a/b/../c")); assertEquals(SEP + "c", FilenameUtils.normalizeNoEndSeparator("/a/b/../../c")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("/a/b/../../../c")); assertEquals(SEP + "a", FilenameUtils.normalizeNoEndSeparator("/a/b/..")); assertEquals(SEP + "", FilenameUtils.normalizeNoEndSeparator("/a/b/../..")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("/a/b/../../..")); assertEquals(SEP + "a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("/a/b/../c/../d")); assertEquals(SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("/a/b//d")); assertEquals(SEP + "a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("/a/b/././.")); assertEquals(SEP + "a", FilenameUtils.normalizeNoEndSeparator("/./a")); assertEquals(SEP + "", FilenameUtils.normalizeNoEndSeparator("/./")); assertEquals(SEP + "", FilenameUtils.normalizeNoEndSeparator("/.")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("/../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("/..")); assertEquals(SEP + "", FilenameUtils.normalizeNoEndSeparator("/")); assertEquals("~" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~/a")); assertEquals("~" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~/a/")); assertEquals("~" + SEP + "a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("~/a/b/../c")); assertEquals("~" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("~/a/b/../../c")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~/a/b/../../../c")); assertEquals("~" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~/a/b/..")); assertEquals("~" + SEP + "", FilenameUtils.normalizeNoEndSeparator("~/a/b/../..")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~/a/b/../../..")); assertEquals("~" + SEP + "a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("~/a/b/../c/../d")); assertEquals("~" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("~/a/b//d")); assertEquals("~" + SEP + "a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("~/a/b/././.")); assertEquals("~" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~/./a")); assertEquals("~" + SEP, FilenameUtils.normalizeNoEndSeparator("~/./")); assertEquals("~" + SEP, FilenameUtils.normalizeNoEndSeparator("~/.")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~/../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~/..")); assertEquals("~" + SEP, FilenameUtils.normalizeNoEndSeparator("~/")); assertEquals("~" + SEP, FilenameUtils.normalizeNoEndSeparator("~")); assertEquals("~user" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~user/a")); assertEquals("~user" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~user/a/")); assertEquals("~user" + SEP + "a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("~user/a/b/../c")); assertEquals("~user" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("~user/a/b/../../c")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~user/a/b/../../../c")); assertEquals("~user" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~user/a/b/..")); assertEquals("~user" + SEP + "", FilenameUtils.normalizeNoEndSeparator("~user/a/b/../..")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~user/a/b/../../..")); assertEquals("~user" + SEP + "a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("~user/a/b/../c/../d")); assertEquals("~user" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("~user/a/b//d")); assertEquals("~user" + SEP + "a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("~user/a/b/././.")); assertEquals("~user" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("~user/./a")); assertEquals("~user" + SEP + "", FilenameUtils.normalizeNoEndSeparator("~user/./")); assertEquals("~user" + SEP + "", FilenameUtils.normalizeNoEndSeparator("~user/.")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~user/../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("~user/..")); assertEquals("~user" + SEP, FilenameUtils.normalizeNoEndSeparator("~user/")); assertEquals("~user" + SEP, FilenameUtils.normalizeNoEndSeparator("~user")); assertEquals("C:" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("C:/a")); assertEquals("C:" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("C:/a/")); assertEquals("C:" + SEP + "a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("C:/a/b/../c")); assertEquals("C:" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("C:/a/b/../../c")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:/a/b/../../../c")); assertEquals("C:" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("C:/a/b/..")); assertEquals("C:" + SEP + "", FilenameUtils.normalizeNoEndSeparator("C:/a/b/../..")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:/a/b/../../..")); assertEquals("C:" + SEP + "a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("C:/a/b/../c/../d")); assertEquals("C:" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("C:/a/b//d")); assertEquals("C:" + SEP + "a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("C:/a/b/././.")); assertEquals("C:" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("C:/./a")); assertEquals("C:" + SEP + "", FilenameUtils.normalizeNoEndSeparator("C:/./")); assertEquals("C:" + SEP + "", FilenameUtils.normalizeNoEndSeparator("C:/.")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:/../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:/..")); assertEquals("C:" + SEP + "", FilenameUtils.normalizeNoEndSeparator("C:/")); assertEquals("C:" + "a", FilenameUtils.normalizeNoEndSeparator("C:a")); assertEquals("C:" + "a", FilenameUtils.normalizeNoEndSeparator("C:a/")); assertEquals("C:" + "a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("C:a/b/../c")); assertEquals("C:" + "c", FilenameUtils.normalizeNoEndSeparator("C:a/b/../../c")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:a/b/../../../c")); assertEquals("C:" + "a", FilenameUtils.normalizeNoEndSeparator("C:a/b/..")); assertEquals("C:" + "", FilenameUtils.normalizeNoEndSeparator("C:a/b/../..")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:a/b/../../..")); assertEquals("C:" + "a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("C:a/b/../c/../d")); assertEquals("C:" + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("C:a/b//d")); assertEquals("C:" + "a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("C:a/b/././.")); assertEquals("C:" + "a", FilenameUtils.normalizeNoEndSeparator("C:./a")); assertEquals("C:" + "", FilenameUtils.normalizeNoEndSeparator("C:./")); assertEquals("C:" + "", FilenameUtils.normalizeNoEndSeparator("C:.")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("C:..")); assertEquals("C:" + "", FilenameUtils.normalizeNoEndSeparator("C:")); assertEquals(SEP + SEP + "server" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("//server/a")); assertEquals(SEP + SEP + "server" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("//server/a/")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("//server/a/b/../c")); assertEquals(SEP + SEP + "server" + SEP + "c", FilenameUtils.normalizeNoEndSeparator("//server/a/b/../../c")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("//server/a/b/../../../c")); assertEquals(SEP + SEP + "server" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("//server/a/b/..")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalizeNoEndSeparator("//server/a/b/../..")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("//server/a/b/../../..")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("//server/a/b/../c/../d")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "b" + SEP + "d", FilenameUtils.normalizeNoEndSeparator("//server/a/b//d")); assertEquals(SEP + SEP + "server" + SEP + "a" + SEP + "b", FilenameUtils.normalizeNoEndSeparator("//server/a/b/././.")); assertEquals(SEP + SEP + "server" + SEP + "a", FilenameUtils.normalizeNoEndSeparator("//server/./a")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalizeNoEndSeparator("//server/./")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalizeNoEndSeparator("//server/.")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("//server/../a")); assertEquals(null, FilenameUtils.normalizeNoEndSeparator("//server/..")); assertEquals(SEP + SEP + "server" + SEP + "", FilenameUtils.normalizeNoEndSeparator("//server/")); } @Test public void testNormalizeNoEndSeparatorUnixWin() throws Exception { // Normalize (Unix Separator) assertEquals("/a/c", FilenameUtils.normalizeNoEndSeparator("/a/b/../c/", true)); assertEquals("/a/c", FilenameUtils.normalizeNoEndSeparator("\\a\\b\\..\\c\\", true)); // Normalize (Windows Separator) assertEquals("\\a\\c", FilenameUtils.normalizeNoEndSeparator("/a/b/../c/", false)); assertEquals("\\a\\c", FilenameUtils.normalizeNoEndSeparator("\\a\\b\\..\\c\\", false)); } //----------------------------------------------------------------------- @Test public void testConcat() { assertEquals(null, FilenameUtils.concat("", null)); assertEquals(null, FilenameUtils.concat(null, null)); assertEquals(null, FilenameUtils.concat(null, "")); assertEquals(null, FilenameUtils.concat(null, "a")); assertEquals(SEP + "a", FilenameUtils.concat(null, "/a")); assertEquals(null, FilenameUtils.concat("", ":")); // invalid prefix assertEquals(null, FilenameUtils.concat(":", "")); // invalid prefix assertEquals("f" + SEP, FilenameUtils.concat("", "f/")); assertEquals("f", FilenameUtils.concat("", "f")); assertEquals("a" + SEP + "f" + SEP, FilenameUtils.concat("a/", "f/")); assertEquals("a" + SEP + "f", FilenameUtils.concat("a", "f")); assertEquals("a" + SEP + "b" + SEP + "f" + SEP, FilenameUtils.concat("a/b/", "f/")); assertEquals("a" + SEP + "b" + SEP + "f", FilenameUtils.concat("a/b", "f")); assertEquals("a" + SEP + "f" + SEP, FilenameUtils.concat("a/b/", "../f/")); assertEquals("a" + SEP + "f", FilenameUtils.concat("a/b", "../f")); assertEquals("a" + SEP + "c" + SEP + "g" + SEP, FilenameUtils.concat("a/b/../c/", "f/../g/")); assertEquals("a" + SEP + "c" + SEP + "g", FilenameUtils.concat("a/b/../c", "f/../g")); assertEquals("a" + SEP + "c.txt" + SEP + "f", FilenameUtils.concat("a/c.txt", "f")); assertEquals(SEP + "f" + SEP, FilenameUtils.concat("", "/f/")); assertEquals(SEP + "f", FilenameUtils.concat("", "/f")); assertEquals(SEP + "f" + SEP, FilenameUtils.concat("a/", "/f/")); assertEquals(SEP + "f", FilenameUtils.concat("a", "/f")); assertEquals(SEP + "c" + SEP + "d", FilenameUtils.concat("a/b/", "/c/d")); assertEquals("C:c" + SEP + "d", FilenameUtils.concat("a/b/", "C:c/d")); assertEquals("C:" + SEP + "c" + SEP + "d", FilenameUtils.concat("a/b/", "C:/c/d")); assertEquals("~" + SEP + "c" + SEP + "d", FilenameUtils.concat("a/b/", "~/c/d")); assertEquals("~user" + SEP + "c" + SEP + "d", FilenameUtils.concat("a/b/", "~user/c/d")); assertEquals("~" + SEP, FilenameUtils.concat("a/b/", "~")); assertEquals("~user" + SEP, FilenameUtils.concat("a/b/", "~user")); } //----------------------------------------------------------------------- @Test public void testSeparatorsToUnix() { assertEquals(null, FilenameUtils.separatorsToUnix(null)); assertEquals("/a/b/c", FilenameUtils.separatorsToUnix("/a/b/c")); assertEquals("/a/b/c.txt", FilenameUtils.separatorsToUnix("/a/b/c.txt")); assertEquals("/a/b/c", FilenameUtils.separatorsToUnix("/a/b\\c")); assertEquals("/a/b/c", FilenameUtils.separatorsToUnix("\\a\\b\\c")); assertEquals("D:/a/b/c", FilenameUtils.separatorsToUnix("D:\\a\\b\\c")); } @Test public void testSeparatorsToWindows() { assertEquals(null, FilenameUtils.separatorsToWindows(null)); assertEquals("\\a\\b\\c", FilenameUtils.separatorsToWindows("\\a\\b\\c")); assertEquals("\\a\\b\\c.txt", FilenameUtils.separatorsToWindows("\\a\\b\\c.txt")); assertEquals("\\a\\b\\c", FilenameUtils.separatorsToWindows("\\a\\b/c")); assertEquals("\\a\\b\\c", FilenameUtils.separatorsToWindows("/a/b/c")); assertEquals("D:\\a\\b\\c", FilenameUtils.separatorsToWindows("D:/a/b/c")); } @Test public void testSeparatorsToSystem() { if (WINDOWS) { assertEquals(null, FilenameUtils.separatorsToSystem(null)); assertEquals("\\a\\b\\c", FilenameUtils.separatorsToSystem("\\a\\b\\c")); assertEquals("\\a\\b\\c.txt", FilenameUtils.separatorsToSystem("\\a\\b\\c.txt")); assertEquals("\\a\\b\\c", FilenameUtils.separatorsToSystem("\\a\\b/c")); assertEquals("\\a\\b\\c", FilenameUtils.separatorsToSystem("/a/b/c")); assertEquals("D:\\a\\b\\c", FilenameUtils.separatorsToSystem("D:/a/b/c")); } else { assertEquals(null, FilenameUtils.separatorsToSystem(null)); assertEquals("/a/b/c", FilenameUtils.separatorsToSystem("/a/b/c")); assertEquals("/a/b/c.txt", FilenameUtils.separatorsToSystem("/a/b/c.txt")); assertEquals("/a/b/c", FilenameUtils.separatorsToSystem("/a/b\\c")); assertEquals("/a/b/c", FilenameUtils.separatorsToSystem("\\a\\b\\c")); assertEquals("D:/a/b/c", FilenameUtils.separatorsToSystem("D:\\a\\b\\c")); } } //----------------------------------------------------------------------- @Test public void testGetPrefixLength() { assertEquals(-1, FilenameUtils.getPrefixLength(null)); assertEquals(-1, FilenameUtils.getPrefixLength(":")); assertEquals(-1, FilenameUtils.getPrefixLength("1:\\a\\b\\c.txt")); assertEquals(-1, FilenameUtils.getPrefixLength("1:")); assertEquals(-1, FilenameUtils.getPrefixLength("1:a")); assertEquals(-1, FilenameUtils.getPrefixLength("\\\\\\a\\b\\c.txt")); assertEquals(-1, FilenameUtils.getPrefixLength("\\\\a")); assertEquals(0, FilenameUtils.getPrefixLength("")); assertEquals(1, FilenameUtils.getPrefixLength("\\")); assertEquals(2, FilenameUtils.getPrefixLength("C:")); assertEquals(3, FilenameUtils.getPrefixLength("C:\\")); assertEquals(9, FilenameUtils.getPrefixLength("//server/")); assertEquals(2, FilenameUtils.getPrefixLength("~")); assertEquals(2, FilenameUtils.getPrefixLength("~/")); assertEquals(6, FilenameUtils.getPrefixLength("~user")); assertEquals(6, FilenameUtils.getPrefixLength("~user/")); assertEquals(0, FilenameUtils.getPrefixLength("a\\b\\c.txt")); assertEquals(1, FilenameUtils.getPrefixLength("\\a\\b\\c.txt")); assertEquals(2, FilenameUtils.getPrefixLength("C:a\\b\\c.txt")); assertEquals(3, FilenameUtils.getPrefixLength("C:\\a\\b\\c.txt")); assertEquals(9, FilenameUtils.getPrefixLength("\\\\server\\a\\b\\c.txt")); assertEquals(0, FilenameUtils.getPrefixLength("a/b/c.txt")); assertEquals(1, FilenameUtils.getPrefixLength("/a/b/c.txt")); assertEquals(3, FilenameUtils.getPrefixLength("C:/a/b/c.txt")); assertEquals(9, FilenameUtils.getPrefixLength("//server/a/b/c.txt")); assertEquals(2, FilenameUtils.getPrefixLength("~/a/b/c.txt")); assertEquals(6, FilenameUtils.getPrefixLength("~user/a/b/c.txt")); assertEquals(0, FilenameUtils.getPrefixLength("a\\b\\c.txt")); assertEquals(1, FilenameUtils.getPrefixLength("\\a\\b\\c.txt")); assertEquals(2, FilenameUtils.getPrefixLength("~\\a\\b\\c.txt")); assertEquals(6, FilenameUtils.getPrefixLength("~user\\a\\b\\c.txt")); assertEquals(9, FilenameUtils.getPrefixLength("//server/a/b/c.txt")); assertEquals(-1, FilenameUtils.getPrefixLength("\\\\\\a\\b\\c.txt")); assertEquals(-1, FilenameUtils.getPrefixLength("///a/b/c.txt")); assertEquals(1, FilenameUtils.getPrefixLength("/:foo")); assertEquals(1, FilenameUtils.getPrefixLength("/:/")); assertEquals(1, FilenameUtils.getPrefixLength("/:::::::.txt")); } @Test public void testIndexOfLastSeparator() { assertEquals(-1, FilenameUtils.indexOfLastSeparator(null)); assertEquals(-1, FilenameUtils.indexOfLastSeparator("noseperator.inthispath")); assertEquals(3, FilenameUtils.indexOfLastSeparator("a/b/c")); assertEquals(3, FilenameUtils.indexOfLastSeparator("a\\b\\c")); } @Test public void testIndexOfExtension() { assertEquals(-1, FilenameUtils.indexOfExtension(null)); assertEquals(-1, FilenameUtils.indexOfExtension("file")); assertEquals(4, FilenameUtils.indexOfExtension("file.txt")); assertEquals(13, FilenameUtils.indexOfExtension("a.txt/b.txt/c.txt")); assertEquals(-1, FilenameUtils.indexOfExtension("a/b/c")); assertEquals(-1, FilenameUtils.indexOfExtension("a\\b\\c")); assertEquals(-1, FilenameUtils.indexOfExtension("a/b.notextension/c")); assertEquals(-1, FilenameUtils.indexOfExtension("a\\b.notextension\\c")); } //----------------------------------------------------------------------- @Test public void testGetPrefix() { assertEquals(null, FilenameUtils.getPrefix(null)); assertEquals(null, FilenameUtils.getPrefix(":")); assertEquals(null, FilenameUtils.getPrefix("1:\\a\\b\\c.txt")); assertEquals(null, FilenameUtils.getPrefix("1:")); assertEquals(null, FilenameUtils.getPrefix("1:a")); assertEquals(null, FilenameUtils.getPrefix("\\\\\\a\\b\\c.txt")); assertEquals(null, FilenameUtils.getPrefix("\\\\a")); assertEquals("", FilenameUtils.getPrefix("")); assertEquals("\\", FilenameUtils.getPrefix("\\")); assertEquals("C:", FilenameUtils.getPrefix("C:")); assertEquals("C:\\", FilenameUtils.getPrefix("C:\\")); assertEquals("//server/", FilenameUtils.getPrefix("//server/")); assertEquals("~/", FilenameUtils.getPrefix("~")); assertEquals("~/", FilenameUtils.getPrefix("~/")); assertEquals("~user/", FilenameUtils.getPrefix("~user")); assertEquals("~user/", FilenameUtils.getPrefix("~user/")); assertEquals("", FilenameUtils.getPrefix("a\\b\\c.txt")); assertEquals("\\", FilenameUtils.getPrefix("\\a\\b\\c.txt")); assertEquals("C:\\", FilenameUtils.getPrefix("C:\\a\\b\\c.txt")); assertEquals("\\\\server\\", FilenameUtils.getPrefix("\\\\server\\a\\b\\c.txt")); assertEquals("", FilenameUtils.getPrefix("a/b/c.txt")); assertEquals("/", FilenameUtils.getPrefix("/a/b/c.txt")); assertEquals("C:/", FilenameUtils.getPrefix("C:/a/b/c.txt")); assertEquals("//server/", FilenameUtils.getPrefix("//server/a/b/c.txt")); assertEquals("~/", FilenameUtils.getPrefix("~/a/b/c.txt")); assertEquals("~user/", FilenameUtils.getPrefix("~user/a/b/c.txt")); assertEquals("", FilenameUtils.getPrefix("a\\b\\c.txt")); assertEquals("\\", FilenameUtils.getPrefix("\\a\\b\\c.txt")); assertEquals("~\\", FilenameUtils.getPrefix("~\\a\\b\\c.txt")); assertEquals("~user\\", FilenameUtils.getPrefix("~user\\a\\b\\c.txt")); } @Test public void testGetPrefix_with_nullbyte() { try { assertEquals("~user\\", FilenameUtils.getPrefix("~u\u0000ser\\a\\b\\c.txt")); } catch (IllegalArgumentException ignore) { } } @Test public void testGetPath() { assertEquals(null, FilenameUtils.getPath(null)); assertEquals("", FilenameUtils.getPath("noseperator.inthispath")); assertEquals("", FilenameUtils.getPath("/noseperator.inthispath")); assertEquals("", FilenameUtils.getPath("\\noseperator.inthispath")); assertEquals("a/b/", FilenameUtils.getPath("a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getPath("a/b/c")); assertEquals("a/b/c/", FilenameUtils.getPath("a/b/c/")); assertEquals("a\\b\\", FilenameUtils.getPath("a\\b\\c")); assertEquals(null, FilenameUtils.getPath(":")); assertEquals(null, FilenameUtils.getPath("1:/a/b/c.txt")); assertEquals(null, FilenameUtils.getPath("1:")); assertEquals(null, FilenameUtils.getPath("1:a")); assertEquals(null, FilenameUtils.getPath("///a/b/c.txt")); assertEquals(null, FilenameUtils.getPath("//a")); assertEquals("", FilenameUtils.getPath("")); assertEquals("", FilenameUtils.getPath("C:")); assertEquals("", FilenameUtils.getPath("C:/")); assertEquals("", FilenameUtils.getPath("//server/")); assertEquals("", FilenameUtils.getPath("~")); assertEquals("", FilenameUtils.getPath("~/")); assertEquals("", FilenameUtils.getPath("~user")); assertEquals("", FilenameUtils.getPath("~user/")); assertEquals("a/b/", FilenameUtils.getPath("a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getPath("/a/b/c.txt")); assertEquals("", FilenameUtils.getPath("C:a")); assertEquals("a/b/", FilenameUtils.getPath("C:a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getPath("C:/a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getPath("//server/a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getPath("~/a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getPath("~user/a/b/c.txt")); } @Test(expected = IllegalArgumentException.class) public void testGetPath_with_nullbyte() { assertEquals("a/b/", FilenameUtils.getPath("~user/a/\u0000b/c.txt")); } @Test public void testGetPathNoEndSeparator() { assertEquals(null, FilenameUtils.getPath(null)); assertEquals("", FilenameUtils.getPath("noseperator.inthispath")); assertEquals("", FilenameUtils.getPathNoEndSeparator("/noseperator.inthispath")); assertEquals("", FilenameUtils.getPathNoEndSeparator("\\noseperator.inthispath")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("a/b/c.txt")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("a/b/c")); assertEquals("a/b/c", FilenameUtils.getPathNoEndSeparator("a/b/c/")); assertEquals("a\\b", FilenameUtils.getPathNoEndSeparator("a\\b\\c")); assertEquals(null, FilenameUtils.getPathNoEndSeparator(":")); assertEquals(null, FilenameUtils.getPathNoEndSeparator("1:/a/b/c.txt")); assertEquals(null, FilenameUtils.getPathNoEndSeparator("1:")); assertEquals(null, FilenameUtils.getPathNoEndSeparator("1:a")); assertEquals(null, FilenameUtils.getPathNoEndSeparator("///a/b/c.txt")); assertEquals(null, FilenameUtils.getPathNoEndSeparator("//a")); assertEquals("", FilenameUtils.getPathNoEndSeparator("")); assertEquals("", FilenameUtils.getPathNoEndSeparator("C:")); assertEquals("", FilenameUtils.getPathNoEndSeparator("C:/")); assertEquals("", FilenameUtils.getPathNoEndSeparator("//server/")); assertEquals("", FilenameUtils.getPathNoEndSeparator("~")); assertEquals("", FilenameUtils.getPathNoEndSeparator("~/")); assertEquals("", FilenameUtils.getPathNoEndSeparator("~user")); assertEquals("", FilenameUtils.getPathNoEndSeparator("~user/")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("a/b/c.txt")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("/a/b/c.txt")); assertEquals("", FilenameUtils.getPathNoEndSeparator("C:a")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("C:a/b/c.txt")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("C:/a/b/c.txt")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("//server/a/b/c.txt")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("~/a/b/c.txt")); assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("~user/a/b/c.txt")); } @Test public void testGetPathNoEndSeparator_with_null_byte() { try { assertEquals("a/b", FilenameUtils.getPathNoEndSeparator("~user/a\u0000/b/c.txt")); } catch (IllegalArgumentException ignore) { } } @Test public void testGetFullPath() { assertEquals(null, FilenameUtils.getFullPath(null)); assertEquals("", FilenameUtils.getFullPath("noseperator.inthispath")); assertEquals("a/b/", FilenameUtils.getFullPath("a/b/c.txt")); assertEquals("a/b/", FilenameUtils.getFullPath("a/b/c")); assertEquals("a/b/c/", FilenameUtils.getFullPath("a/b/c/")); assertEquals("a\\b\\", FilenameUtils.getFullPath("a\\b\\c")); assertEquals(null, FilenameUtils.getFullPath(":")); assertEquals(null, FilenameUtils.getFullPath("1:/a/b/c.txt")); assertEquals(null, FilenameUtils.getFullPath("1:")); assertEquals(null, FilenameUtils.getFullPath("1:a")); assertEquals(null, FilenameUtils.getFullPath("///a/b/c.txt")); assertEquals(null, FilenameUtils.getFullPath("//a")); assertEquals("", FilenameUtils.getFullPath("")); assertEquals("C:", FilenameUtils.getFullPath("C:")); assertEquals("C:/", FilenameUtils.getFullPath("C:/")); assertEquals("//server/", FilenameUtils.getFullPath("//server/")); assertEquals("~/", FilenameUtils.getFullPath("~")); assertEquals("~/", FilenameUtils.getFullPath("~/")); assertEquals("~user/", FilenameUtils.getFullPath("~user")); assertEquals("~user/", FilenameUtils.getFullPath("~user/")); assertEquals("a/b/", FilenameUtils.getFullPath("a/b/c.txt")); assertEquals("/a/b/", FilenameUtils.getFullPath("/a/b/c.txt")); assertEquals("C:", FilenameUtils.getFullPath("C:a")); assertEquals("C:a/b/", FilenameUtils.getFullPath("C:a/b/c.txt")); assertEquals("C:/a/b/", FilenameUtils.getFullPath("C:/a/b/c.txt")); assertEquals("//server/a/b/", FilenameUtils.getFullPath("//server/a/b/c.txt")); assertEquals("~/a/b/", FilenameUtils.getFullPath("~/a/b/c.txt")); assertEquals("~user/a/b/", FilenameUtils.getFullPath("~user/a/b/c.txt")); } @Test public void testGetFullPathNoEndSeparator() { assertEquals(null, FilenameUtils.getFullPathNoEndSeparator(null)); assertEquals("", FilenameUtils.getFullPathNoEndSeparator("noseperator.inthispath")); assertEquals("a/b", FilenameUtils.getFullPathNoEndSeparator("a/b/c.txt")); assertEquals("a/b", FilenameUtils.getFullPathNoEndSeparator("a/b/c")); assertEquals("a/b/c", FilenameUtils.getFullPathNoEndSeparator("a/b/c/")); assertEquals("a\\b", FilenameUtils.getFullPathNoEndSeparator("a\\b\\c")); assertEquals(null, FilenameUtils.getFullPathNoEndSeparator(":")); assertEquals(null, FilenameUtils.getFullPathNoEndSeparator("1:/a/b/c.txt")); assertEquals(null, FilenameUtils.getFullPathNoEndSeparator("1:")); assertEquals(null, FilenameUtils.getFullPathNoEndSeparator("1:a")); assertEquals(null, FilenameUtils.getFullPathNoEndSeparator("///a/b/c.txt")); assertEquals(null, FilenameUtils.getFullPathNoEndSeparator("//a")); assertEquals("", FilenameUtils.getFullPathNoEndSeparator("")); assertEquals("C:", FilenameUtils.getFullPathNoEndSeparator("C:")); assertEquals("C:/", FilenameUtils.getFullPathNoEndSeparator("C:/")); assertEquals("//server/", FilenameUtils.getFullPathNoEndSeparator("//server/")); assertEquals("~", FilenameUtils.getFullPathNoEndSeparator("~")); assertEquals("~/", FilenameUtils.getFullPathNoEndSeparator("~/")); assertEquals("~user", FilenameUtils.getFullPathNoEndSeparator("~user")); assertEquals("~user/", FilenameUtils.getFullPathNoEndSeparator("~user/")); assertEquals("a/b", FilenameUtils.getFullPathNoEndSeparator("a/b/c.txt")); assertEquals("/a/b", FilenameUtils.getFullPathNoEndSeparator("/a/b/c.txt")); assertEquals("C:", FilenameUtils.getFullPathNoEndSeparator("C:a")); assertEquals("C:a/b", FilenameUtils.getFullPathNoEndSeparator("C:a/b/c.txt")); assertEquals("C:/a/b", FilenameUtils.getFullPathNoEndSeparator("C:/a/b/c.txt")); assertEquals("//server/a/b", FilenameUtils.getFullPathNoEndSeparator("//server/a/b/c.txt")); assertEquals("~/a/b", FilenameUtils.getFullPathNoEndSeparator("~/a/b/c.txt")); assertEquals("~user/a/b", FilenameUtils.getFullPathNoEndSeparator("~user/a/b/c.txt")); } /** * Test for https://issues.apache.org/jira/browse/IO-248 */ @Test public void testGetFullPathNoEndSeparator_IO_248() { // Test single separator assertEquals("/", FilenameUtils.getFullPathNoEndSeparator("/")); assertEquals("\\", FilenameUtils.getFullPathNoEndSeparator("\\")); // Test one level directory assertEquals("/", FilenameUtils.getFullPathNoEndSeparator("/abc")); assertEquals("\\", FilenameUtils.getFullPathNoEndSeparator("\\abc")); // Test one level directory assertEquals("/abc", FilenameUtils.getFullPathNoEndSeparator("/abc/xyz")); assertEquals("\\abc", FilenameUtils.getFullPathNoEndSeparator("\\abc\\xyz")); } @Test public void testGetName() { assertEquals(null, FilenameUtils.getName(null)); assertEquals("noseperator.inthispath", FilenameUtils.getName("noseperator.inthispath")); assertEquals("c.txt", FilenameUtils.getName("a/b/c.txt")); assertEquals("c", FilenameUtils.getName("a/b/c")); assertEquals("", FilenameUtils.getName("a/b/c/")); assertEquals("c", FilenameUtils.getName("a\\b\\c")); } @Test public void testInjectionFailure() { try { assertEquals("c", FilenameUtils.getName("a\\b\\\u0000c")); } catch (IllegalArgumentException ignore) { } } @Test public void testGetBaseName() { assertEquals(null, FilenameUtils.getBaseName(null)); assertEquals("noseperator", FilenameUtils.getBaseName("noseperator.inthispath")); assertEquals("c", FilenameUtils.getBaseName("a/b/c.txt")); assertEquals("c", FilenameUtils.getBaseName("a/b/c")); assertEquals("", FilenameUtils.getBaseName("a/b/c/")); assertEquals("c", FilenameUtils.getBaseName("a\\b\\c")); assertEquals("file.txt", FilenameUtils.getBaseName("file.txt.bak")); } @Test public void testGetBaseName_with_nullByte() { try { assertEquals("file.txt", FilenameUtils.getBaseName("fil\u0000e.txt.bak")); } catch (IllegalArgumentException ignore) { } } @Test public void testGetExtension() { assertEquals(null, FilenameUtils.getExtension(null)); assertEquals("ext", FilenameUtils.getExtension("file.ext")); assertEquals("", FilenameUtils.getExtension("README")); assertEquals("com", FilenameUtils.getExtension("domain.dot.com")); assertEquals("jpeg", FilenameUtils.getExtension("image.jpeg")); assertEquals("", FilenameUtils.getExtension("a.b/c")); assertEquals("txt", FilenameUtils.getExtension("a.b/c.txt")); assertEquals("", FilenameUtils.getExtension("a/b/c")); assertEquals("", FilenameUtils.getExtension("a.b\\c")); assertEquals("txt", FilenameUtils.getExtension("a.b\\c.txt")); assertEquals("", FilenameUtils.getExtension("a\\b\\c")); assertEquals("", FilenameUtils.getExtension("C:\\temp\\foo.bar\\README")); assertEquals("ext", FilenameUtils.getExtension("../filename.ext")); } @Test public void testRemoveExtension() { assertEquals(null, FilenameUtils.removeExtension(null)); assertEquals("file", FilenameUtils.removeExtension("file.ext")); assertEquals("README", FilenameUtils.removeExtension("README")); assertEquals("domain.dot", FilenameUtils.removeExtension("domain.dot.com")); assertEquals("image", FilenameUtils.removeExtension("image.jpeg")); assertEquals("a.b/c", FilenameUtils.removeExtension("a.b/c")); assertEquals("a.b/c", FilenameUtils.removeExtension("a.b/c.txt")); assertEquals("a/b/c", FilenameUtils.removeExtension("a/b/c")); assertEquals("a.b\\c", FilenameUtils.removeExtension("a.b\\c")); assertEquals("a.b\\c", FilenameUtils.removeExtension("a.b\\c.txt")); assertEquals("a\\b\\c", FilenameUtils.removeExtension("a\\b\\c")); assertEquals("C:\\temp\\foo.bar\\README", FilenameUtils.removeExtension("C:\\temp\\foo.bar\\README")); assertEquals("../filename", FilenameUtils.removeExtension("../filename.ext")); } //----------------------------------------------------------------------- @Test public void testEquals() { assertTrue(FilenameUtils.equals(null, null)); assertFalse(FilenameUtils.equals(null, "")); assertFalse(FilenameUtils.equals("", null)); assertTrue(FilenameUtils.equals("", "")); assertTrue(FilenameUtils.equals("file.txt", "file.txt")); assertFalse(FilenameUtils.equals("file.txt", "FILE.TXT")); assertFalse(FilenameUtils.equals("a\\b\\file.txt", "a/b/file.txt")); } @Test public void testEqualsOnSystem() { assertTrue(FilenameUtils.equalsOnSystem(null, null)); assertFalse(FilenameUtils.equalsOnSystem(null, "")); assertFalse(FilenameUtils.equalsOnSystem("", null)); assertTrue(FilenameUtils.equalsOnSystem("", "")); assertTrue(FilenameUtils.equalsOnSystem("file.txt", "file.txt")); assertEquals(WINDOWS, FilenameUtils.equalsOnSystem("file.txt", "FILE.TXT")); assertFalse(FilenameUtils.equalsOnSystem("a\\b\\file.txt", "a/b/file.txt")); } //----------------------------------------------------------------------- @Test public void testEqualsNormalized() { assertTrue(FilenameUtils.equalsNormalized(null, null)); assertFalse(FilenameUtils.equalsNormalized(null, "")); assertFalse(FilenameUtils.equalsNormalized("", null)); assertTrue(FilenameUtils.equalsNormalized("", "")); assertTrue(FilenameUtils.equalsNormalized("file.txt", "file.txt")); assertFalse(FilenameUtils.equalsNormalized("file.txt", "FILE.TXT")); assertTrue(FilenameUtils.equalsNormalized("a\\b\\file.txt", "a/b/file.txt")); assertFalse(FilenameUtils.equalsNormalized("a/b/", "a/b")); } @Test public void testEqualsNormalizedOnSystem() { assertTrue(FilenameUtils.equalsNormalizedOnSystem(null, null)); assertFalse(FilenameUtils.equalsNormalizedOnSystem(null, "")); assertFalse(FilenameUtils.equalsNormalizedOnSystem("", null)); assertTrue(FilenameUtils.equalsNormalizedOnSystem("", "")); assertTrue(FilenameUtils.equalsNormalizedOnSystem("file.txt", "file.txt")); assertEquals(WINDOWS, FilenameUtils.equalsNormalizedOnSystem("file.txt", "FILE.TXT")); assertTrue(FilenameUtils.equalsNormalizedOnSystem("a\\b\\file.txt", "a/b/file.txt")); assertFalse(FilenameUtils.equalsNormalizedOnSystem("a/b/", "a/b")); } /** * Test for https://issues.apache.org/jira/browse/IO-128 */ @Test public void testEqualsNormalizedError_IO_128() { try { FilenameUtils.equalsNormalizedOnSystem("//file.txt", "file.txt"); fail("Invalid normalized first file"); } catch (final NullPointerException e) { // expected result } try { FilenameUtils.equalsNormalizedOnSystem("file.txt", "//file.txt"); fail("Invalid normalized second file"); } catch (final NullPointerException e) { // expected result } try { FilenameUtils.equalsNormalizedOnSystem("//file.txt", "//file.txt"); fail("Invalid normalized both filse"); } catch (final NullPointerException e) { // expected result } } @Test public void testEquals_fullControl() { assertFalse(FilenameUtils.equals("file.txt", "FILE.TXT", true, IOCase.SENSITIVE)); assertTrue(FilenameUtils.equals("file.txt", "FILE.TXT", true, IOCase.INSENSITIVE)); assertEquals(WINDOWS, FilenameUtils.equals("file.txt", "FILE.TXT", true, IOCase.SYSTEM)); assertFalse(FilenameUtils.equals("file.txt", "FILE.TXT", true, null)); } //----------------------------------------------------------------------- @Test public void testIsExtension() { assertFalse(FilenameUtils.isExtension(null, (String) null)); assertFalse(FilenameUtils.isExtension("file.txt", (String) null)); assertTrue(FilenameUtils.isExtension("file", (String) null)); assertFalse(FilenameUtils.isExtension("file.txt", "")); assertTrue(FilenameUtils.isExtension("file", "")); assertTrue(FilenameUtils.isExtension("file.txt", "txt")); assertFalse(FilenameUtils.isExtension("file.txt", "rtf")); assertFalse(FilenameUtils.isExtension("a/b/file.txt", (String) null)); assertFalse(FilenameUtils.isExtension("a/b/file.txt", "")); assertTrue(FilenameUtils.isExtension("a/b/file.txt", "txt")); assertFalse(FilenameUtils.isExtension("a/b/file.txt", "rtf")); assertFalse(FilenameUtils.isExtension("a.b/file.txt", (String) null)); assertFalse(FilenameUtils.isExtension("a.b/file.txt", "")); assertTrue(FilenameUtils.isExtension("a.b/file.txt", "txt")); assertFalse(FilenameUtils.isExtension("a.b/file.txt", "rtf")); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", (String) null)); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", "")); assertTrue(FilenameUtils.isExtension("a\\b\\file.txt", "txt")); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", "rtf")); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", (String) null)); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", "")); assertTrue(FilenameUtils.isExtension("a.b\\file.txt", "txt")); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", "rtf")); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", "TXT")); } @Test public void testIsExtension_injection() { try { FilenameUtils.isExtension("a.b\\fi\u0000le.txt", "TXT"); fail("Should throw IAE"); } catch (IllegalArgumentException ignore) { } } @Test public void testIsExtensionArray() { assertFalse(FilenameUtils.isExtension(null, (String[]) null)); assertFalse(FilenameUtils.isExtension("file.txt", (String[]) null)); assertTrue(FilenameUtils.isExtension("file", (String[]) null)); assertFalse(FilenameUtils.isExtension("file.txt", new String[0])); assertTrue(FilenameUtils.isExtension("file.txt", new String[]{"txt"})); assertFalse(FilenameUtils.isExtension("file.txt", new String[]{"rtf"})); assertTrue(FilenameUtils.isExtension("file", new String[]{"rtf", ""})); assertTrue(FilenameUtils.isExtension("file.txt", new String[]{"rtf", "txt"})); assertFalse(FilenameUtils.isExtension("a/b/file.txt", (String[]) null)); assertFalse(FilenameUtils.isExtension("a/b/file.txt", new String[0])); assertTrue(FilenameUtils.isExtension("a/b/file.txt", new String[]{"txt"})); assertFalse(FilenameUtils.isExtension("a/b/file.txt", new String[]{"rtf"})); assertTrue(FilenameUtils.isExtension("a/b/file.txt", new String[]{"rtf", "txt"})); assertFalse(FilenameUtils.isExtension("a.b/file.txt", (String[]) null)); assertFalse(FilenameUtils.isExtension("a.b/file.txt", new String[0])); assertTrue(FilenameUtils.isExtension("a.b/file.txt", new String[]{"txt"})); assertFalse(FilenameUtils.isExtension("a.b/file.txt", new String[]{"rtf"})); assertTrue(FilenameUtils.isExtension("a.b/file.txt", new String[]{"rtf", "txt"})); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", (String[]) null)); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", new String[0])); assertTrue(FilenameUtils.isExtension("a\\b\\file.txt", new String[]{"txt"})); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", new String[]{"rtf"})); assertTrue(FilenameUtils.isExtension("a\\b\\file.txt", new String[]{"rtf", "txt"})); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", (String[]) null)); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new String[0])); assertTrue(FilenameUtils.isExtension("a.b\\file.txt", new String[]{"txt"})); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new String[]{"rtf"})); assertTrue(FilenameUtils.isExtension("a.b\\file.txt", new String[]{"rtf", "txt"})); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new String[]{"TXT"})); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new String[]{"TXT", "RTF"})); } @Test public void testIsExtensionCollection() { assertFalse(FilenameUtils.isExtension(null, (Collection<String>) null)); assertFalse(FilenameUtils.isExtension("file.txt", (Collection<String>) null)); assertTrue(FilenameUtils.isExtension("file", (Collection<String>) null)); assertFalse(FilenameUtils.isExtension("file.txt", new ArrayList<String>())); assertTrue(FilenameUtils.isExtension("file.txt", new ArrayList<>(Arrays.asList(new String[]{"txt"})))); assertFalse(FilenameUtils.isExtension("file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf"})))); assertTrue(FilenameUtils.isExtension("file", new ArrayList<>(Arrays.asList(new String[]{"rtf", ""})))); assertTrue(FilenameUtils.isExtension("file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf", "txt"})))); assertFalse(FilenameUtils.isExtension("a/b/file.txt", (Collection<String>) null)); assertFalse(FilenameUtils.isExtension("a/b/file.txt", new ArrayList<String>())); assertTrue(FilenameUtils.isExtension("a/b/file.txt", new ArrayList<>(Arrays.asList(new String[]{"txt"})))); assertFalse(FilenameUtils.isExtension("a/b/file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf"})))); assertTrue(FilenameUtils.isExtension("a/b/file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf", "txt"})))); assertFalse(FilenameUtils.isExtension("a.b/file.txt", (Collection<String>) null)); assertFalse(FilenameUtils.isExtension("a.b/file.txt", new ArrayList<String>())); assertTrue(FilenameUtils.isExtension("a.b/file.txt", new ArrayList<>(Arrays.asList(new String[]{"txt"})))); assertFalse(FilenameUtils.isExtension("a.b/file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf"})))); assertTrue(FilenameUtils.isExtension("a.b/file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf", "txt"})))); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", (Collection<String>) null)); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", new ArrayList<String>())); assertTrue(FilenameUtils.isExtension("a\\b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"txt"})))); assertFalse(FilenameUtils.isExtension("a\\b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf"})))); assertTrue(FilenameUtils.isExtension("a\\b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf", "txt"})))); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", (Collection<String>) null)); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new ArrayList<String>())); assertTrue(FilenameUtils.isExtension("a.b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"txt"})))); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf"})))); assertTrue(FilenameUtils.isExtension("a.b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"rtf", "txt"})))); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"TXT"})))); assertFalse(FilenameUtils.isExtension("a.b\\file.txt", new ArrayList<>(Arrays.asList(new String[]{"TXT", "RTF"})))); } }
/* * Copyright 2015 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.myschedule; import android.content.Intent; import android.net.Uri; import android.os.Looper; import android.support.test.InstrumentationRegistry; import android.support.test.espresso.Espresso; import android.support.test.espresso.IdlingResource; import android.support.test.filters.FlakyTest; import android.support.test.filters.LargeTest; import android.support.test.filters.Suppress; import android.support.test.runner.AndroidJUnit4; import android.util.Log; import com.google.samples.apps.iosched.Config; import com.google.samples.apps.iosched.R; import com.google.samples.apps.iosched.feedback.SessionFeedbackActivity; import com.google.samples.apps.iosched.injection.ModelProvider; import com.google.samples.apps.iosched.mockdata.MyScheduleMockItems; import com.google.samples.apps.iosched.mockdata.StubActivityContext; import com.google.samples.apps.iosched.navigation.NavigationModel; import com.google.samples.apps.iosched.provider.ScheduleContract; import com.google.samples.apps.iosched.settings.SettingsUtils; import com.google.samples.apps.iosched.testutils.BaseActivityTestRule; import com.google.samples.apps.iosched.testutils.NavigationUtils; import com.google.samples.apps.iosched.testutils.OrientationHelper; import com.google.samples.apps.iosched.testutils.ThrottleContentObserverIdlingResource; import com.google.samples.apps.iosched.util.TimeUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Date; import static android.support.test.espresso.Espresso.onView; import static android.support.test.espresso.action.ViewActions.click; import static android.support.test.espresso.assertion.ViewAssertions.doesNotExist; import static android.support.test.espresso.assertion.ViewAssertions.matches; import static android.support.test.espresso.intent.Intents.intended; import static android.support.test.espresso.intent.matcher.IntentMatchers.hasAction; import static android.support.test.espresso.intent.matcher.IntentMatchers.hasComponent; import static android.support.test.espresso.intent.matcher.IntentMatchers.hasData; import static android.support.test.espresso.matcher.ViewMatchers.hasSibling; import static android.support.test.espresso.matcher.ViewMatchers.isDisplayed; import static android.support.test.espresso.matcher.ViewMatchers.withId; import static android.support.test.espresso.matcher.ViewMatchers.withText; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsNot.not; /** * UI tests for {@link MyScheduleActivity} for when the user is attending the conference and the * second day of the conference starts in 3 hours. * <p/> * This should be run on devices with a narrow layout only (phones all orientation, tablets in * portrait mode) */ @RunWith(AndroidJUnit4.class) @LargeTest public class MyScheduleActivityTest { /** * The {@link StubMyScheduleModel} needs a {@link android.content.Context} but at the stage it * is created, {@link #mActivityRule} hasn't got an {@link android.app.Activity} yet so we use * the instrumentation target context. However, an Actiivty Context is required by the model for * carrying certain actions, such as opening the session that was clicked on (which uses {@link * android.content.Context#startActivity(Intent)} and will not work with a non Activity context. * We use this {@link StubActivityContext} to later set an activity context at the start of each * test if needed (if the test needs to start another activity). */ private StubActivityContext mActivityStubContext; private StubMyScheduleModel mStubMyScheduleModel; @Rule public BaseActivityTestRule<MyScheduleActivity> mActivityRule = new BaseActivityTestRule<MyScheduleActivity>(MyScheduleActivity.class) { @Override protected void beforeActivityLaunched() { prepareActivityForInPersonAttendee(); // Create a stub model to simulate a user attending conference, during the // second day mActivityStubContext = new StubActivityContext(InstrumentationRegistry.getTargetContext()); try { /** * {@link MyScheduleModel} uses a Handler, so we need to run this on the * main thread. If we don't, we need to call {@link Looper#prepare()} but * the test runner uses the same non UI thread for setting up each test in a * test class, and therefore, upon trying to run the second test, it * complains that we call {@link Looper#prepare()} on a thread that has * already been prepared. By using the UI thread, we avoid this issue as * the UI thread is already prepared so we don't need to manually do it. */ runOnUiThread(new Runnable() { @Override public void run() { mStubMyScheduleModel = new StubMyScheduleModel( mActivityStubContext, MyScheduleMockItems.getItemsForAttendeeAfter(1, false), MyScheduleMockItems.getItemsForAttendeeBefore(2)); ModelProvider.setStubModel(mStubMyScheduleModel); } }); } catch (Throwable throwable) { Log.e("DEBUG", "Error running test " + throwable); } } }; @Before public void setUp() { // Set up time to start of second day of conference TimeUtils.setCurrentTimeRelativeToStartOfSecondDayOfConference( InstrumentationRegistry.getTargetContext(), 0); // Don't show notifications for sessions as they get in the way of the UI SettingsUtils.setShowSessionReminders(InstrumentationRegistry.getTargetContext(), false); // Mark use as attending conference SettingsUtils.setAttendeeAtVenue(InstrumentationRegistry.getTargetContext(), true); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void day2Selected() { // Given a current time 3 hours after the start of the second day // Then the second day is selected onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).check(matches(isDisplayed())); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void viewDay2_clickOnSession_opensSessionScreenIntentFired() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // When clicking on the session onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).perform(click()); // Then the intent with the session uri is fired Uri expectedSessionUri = ScheduleContract.Sessions.buildSessionUri(MyScheduleMockItems.SESSION_ID); intended(allOf( hasAction(equalTo(Intent.ACTION_VIEW)), hasData(expectedSessionUri))); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void viewDay1_clickOnSession_opensSessionScreenIntentFired() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // When clicking on the session onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).perform(click()); // Then the intent with the session uri is fired Uri expectedSessionUri = ScheduleContract.Sessions.buildSessionUri(MyScheduleMockItems.SESSION_ID); intended(allOf( hasAction(equalTo(Intent.ACTION_VIEW)), hasData(expectedSessionUri))); } @Test public void viewDay1_clickOnRateSession_opensFeedbackScreenIntentFired() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // Given day 1 visible showDay(1); // When clicking on rate session onView(allOf(withText(R.string.my_schedule_rate_this_session), isDisplayed())) .perform(click()); // Then the intent for the feedback screen is fired Uri expectedSessionUri = ScheduleContract.Sessions.buildSessionUri(MyScheduleMockItems.SESSION_ID); intended(allOf( hasAction(equalTo(Intent.ACTION_VIEW)), hasData(expectedSessionUri), hasComponent(SessionFeedbackActivity.class.getName()))); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void viewDay2_clickOnBrowseSession_opensSessionsListScreen() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // When clicking on browse sessions onView(allOf(withText(R.string.browse_sessions), isDisplayed())).perform(click()); // Then the intent for the sessions list screen is fired long slotStart = Config.CONFERENCE_START_MILLIS + 1 * TimeUtils.DAY + MyScheduleMockItems.SESSION_AVAILABLE_SLOT_TIME_OFFSET; Uri expectedTimeIntervalUri = ScheduleContract.Sessions.buildUnscheduledSessionsInInterval(slotStart, slotStart + MyScheduleMockItems.SESSION_AVAILABLE_SLOT_TIME_DURATION); intended(allOf( hasAction(equalTo(Intent.ACTION_VIEW)), hasData(expectedTimeIntervalUri))); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void viewDay2_clickOnTimeSlot_opensSessionsListScreen() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // When clicking on the time of a time slot long slotStart = Config.CONFERENCE_START_MILLIS + 1 * TimeUtils.DAY; onView(allOf(isDisplayed(), withId(R.id.start_time), withText(TimeUtils.formatShortTime(mActivityStubContext, new Date(slotStart))))) .perform(click()); // Then the intent for the sessions list screen is fired Uri expectedTimeIntervalUri = ScheduleContract.Sessions.buildUnscheduledSessionsInInterval(slotStart, slotStart + MyScheduleMockItems.SESSION_AVAILABLE_SLOT_TIME_DURATION); intended(allOf( hasAction(equalTo(Intent.ACTION_VIEW)), hasData(expectedTimeIntervalUri))); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void viewDay2_clickOnMoreButton_opensSessionsListScreen() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // When clicking on the time of the more button next to a time slot long slotStart = Config.CONFERENCE_START_MILLIS + 1 * TimeUtils.DAY; onView(allOf(isDisplayed(), withId(R.id.more), hasSibling( withText(TimeUtils.formatShortTime(mActivityStubContext, new Date(slotStart)))))) .perform(click()); // Then the intent for the sessions list screen is fired Uri expectedTimeIntervalUri = ScheduleContract.Sessions.buildUnscheduledSessionsInInterval(slotStart, slotStart + MyScheduleMockItems.SESSION_AVAILABLE_SLOT_TIME_DURATION); intended(allOf( hasAction(equalTo(Intent.ACTION_VIEW)), hasData(expectedTimeIntervalUri))); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void timeSlotWithNoSessionInSchedule_MoreButton_IsNotVisible() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // More button is not visible for a time slow with no sessions in schedule long slotStartWithAvailableSessionsButNoneInSchedule = Config.CONFERENCE_START_MILLIS + 1 * TimeUtils.DAY + MyScheduleMockItems.SESSION_AVAILABLE_SLOT_TIME_OFFSET; onView(allOf(withId(R.id.more), hasSibling( withText(TimeUtils.formatShortTime(mActivityStubContext, new Date(slotStartWithAvailableSessionsButNoneInSchedule)))))) .check(matches(not(isDisplayed()))); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void timeSlotWithOneSessionInSchedule_MoreButton_IsVisible() { mActivityStubContext.setActivityContext(mActivityRule.getActivity()); // More button is visible for a time slow with 1 session in schedule long slotStartWithOneSessionInSchedule = Config.CONFERENCE_START_MILLIS + MyScheduleMockItems.SESSION_TITLE_AFTER_START_OFFSET; onView(allOf(isDisplayed(), withId(R.id.more), hasSibling( withText(TimeUtils.formatShortTime(mActivityStubContext, new Date(slotStartWithOneSessionInSchedule)))))) .check(matches(isDisplayed())); } @Test public void viewDay1_sessionVisible() { // Given day 1 visible showDay(1); // Then the session in the first day is displayed onView(withText(MyScheduleMockItems.SESSION_TITLE_AFTER)).check(matches(isDisplayed())); } @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void viewDay2_sessionVisible() { // Given day 2 visible // Then the session in the second day is displayed onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).check(matches(isDisplayed())); } @Test public void navigationIcon_DisplaysAsMenu() { NavigationUtils.checkNavigationIconIsMenu(); } @Test public void navigationIcon_OnClick_NavigationDisplayed() { NavigationUtils.checkNavigationIsDisplayedWhenClickingMenuIcon(); } @Test public void navigation_WhenShown_CorrectItemIsSelected() { NavigationUtils .checkNavigationItemIsSelected(NavigationModel.NavigationItemEnum.MY_SCHEDULE); } /** * This test works only on phones, where the layout is the same for both orientations (ie tabs) */ @Test @Suppress // Test isn't deterministic when run as part of the full test suite. public void orientationChange_RetainsDataAndCurrentTab_Flaky() { // Given day 2 visible showDay(2); onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).check(matches(isDisplayed())); // When changing orientation OrientationHelper.rotateOrientation(mActivityRule); // Then day 2 is visible onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).check(matches(isDisplayed())); // And day 0 is selectable and visible showDay(0); onView(withText(R.string.my_schedule_badgepickup)).check(matches(isDisplayed())); // And day 1 is selectable and visible showDay(1); onView(withText(MyScheduleMockItems.SESSION_TITLE_AFTER)).check(matches(isDisplayed())); // When changing orientation again OrientationHelper.rotateOrientation(mActivityRule); // Then day 1 is visible onView(withText(MyScheduleMockItems.SESSION_TITLE_AFTER)).check(matches(isDisplayed())); // And day 0 is selectable and visible showDay(0); onView(withText(R.string.my_schedule_badgepickup)).check(matches(isDisplayed())); // And day 2 is selectable and visible showDay(2); onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)).check(matches(isDisplayed())); } @Test public void newDataObtained_DataUpdated() { IdlingResource idlingResource = null; try { // Given initial data displayed showDay(2); onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)) .check(matches(isDisplayed())); onView(withText(MyScheduleMockItems.SESSION_TITLE_2)).check(doesNotExist()); showDay(1); onView(withText(MyScheduleMockItems.SESSION_TITLE_AFTER)).check(matches(isDisplayed())); onView(withText(MyScheduleMockItems.SESSION_TITLE_1)).check(doesNotExist()); // When new data is available mStubMyScheduleModel.setMockScheduleDataDay1(MyScheduleMockItems .getItemsForAttendee(1, false, MyScheduleMockItems.SESSION_TITLE_1)); mStubMyScheduleModel.setMockScheduleDataDay2(MyScheduleMockItems .getItemsForAttendee(2, false, MyScheduleMockItems.SESSION_TITLE_2)); mStubMyScheduleModel.fireContentObserver(); // Wait for the ThrottleContentObserver to process the event idlingResource = new ThrottleContentObserverIdlingResource( InstrumentationRegistry.getTargetContext()); Espresso.registerIdlingResources(idlingResource); // Then the new data is shown onView(withText(MyScheduleMockItems.SESSION_TITLE_1)).check(matches(isDisplayed())); onView(withText(MyScheduleMockItems.SESSION_TITLE_AFTER)) .check(doesNotExist()); showDay(2); onView(withText(MyScheduleMockItems.SESSION_TITLE_2)).check(matches(isDisplayed())); onView(withText(MyScheduleMockItems.SESSION_TITLE_BEFORE)) .check(doesNotExist()); } finally { if (idlingResource != null) { Espresso.unregisterIdlingResources(idlingResource); } } } private void showDay(int day) { onView(withId(MyScheduleActivity.BASE_TAB_VIEW_ID + day)).perform(click()); } }
/* * The MIT License (MIT) * * Copyright (c) 2010-2015 Carnegie Mellon University * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package edu.cmu.cylab.starslinger.transaction; import java.io.IOException; import java.security.SecureRandom; import java.util.Date; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.text.TextUtils; import com.google.android.gms.gcm.GoogleCloudMessaging; import edu.cmu.cylab.starslinger.MessagingException; import edu.cmu.cylab.starslinger.MyLog; import edu.cmu.cylab.starslinger.SafeSlinger; import edu.cmu.cylab.starslinger.SafeSlingerConfig; import edu.cmu.cylab.starslinger.SafeSlingerConfig.extra; import edu.cmu.cylab.starslinger.SafeSlingerPrefs; import edu.cmu.cylab.starslinger.crypto.CryptTools; import edu.cmu.cylab.starslinger.crypto.CryptoMsgException; import edu.cmu.cylab.starslinger.model.CryptoMsgPrivateData; import edu.cmu.cylab.starslinger.util.SSUtil; /** * Utilities for device registration. Will keep track of the registration token * in a private preference. */ public class C2DMessaging { private static final String TAG = SafeSlingerConfig.LOG_TAG; public static final String EXTRA_SENDER = "sender"; public static final String EXTRA_APPLICATION_PENDING_INTENT = "app"; public static final String REQUEST_UNREGISTRATION_INTENT = "com.google.android.c2dm.intent.UNREGISTER"; public static final String REQUEST_REGISTRATION_INTENT = "com.google.android.c2dm.intent.REGISTER"; public static final String SENDER_ID = "995290364307"; public static final String EXTRA_UNREGISTERED = "unregistered"; public static final String EXTRA_ERROR = "error"; public static final String EXTRA_REGISTRATION_ID = "registration_id"; public static final String C2DM_RETRY = "com.google.android.c2dm.intent.RETRY"; public static final String PUSH_REGISTERED = "PUSH_REGISTERED"; public static final String ERRMSG_ERROR_PREFIX = "Error="; /** * The device can't read the response, or there was a 500/503 from the * server that can be retried later. The application should use exponential * back off and retry. */ public static final String ERRREG_SERVICE_NOT_AVAILABLE = "SERVICE_NOT_AVAILABLE"; /** * There is no Google account on the phone. The application should ask the * user to open the account manager and add a Google account. Fix on the * device side. */ public static final String ERRREG_ACCOUNT_MISSING = "ACCOUNT_MISSING"; /** * Bad password. The application should ask the user to enter his/her * password, and let user retry manually later. Fix on the device side. */ public static final String ERRREG_AUTHENTICATION_FAILED = "AUTHENTICATION_FAILED"; /** * The user has too many applications registered. The application should * tell the user to uninstall some other applications, let user retry * manually. Fix on the device side. */ public static final String ERRREG_TOO_MANY_REGISTRATIONS = "TOO_MANY_REGISTRATIONS"; /** * The sender account is not recognized. */ public static final String ERRREG_INVALID_SENDER = "INVALID_SENDER"; /** * Incorrect phone registration with Google. This phone doesn't currently * support C2DM. */ public static final String ERRREG_PHONE_REGISTRATION_ERROR = "PHONE_REGISTRATION_ERROR"; /*** * Too many messages sent by the sender to a specific device. Retry after a * while. */ public static final String ERRMSG_DEVICE_QUOTA_EXCEEDED = "DeviceQuotaExceeded"; /*** * Missing or bad registration_id. Sender should stop sending messages to * this device. */ public static final String ERRMSG_INVALID_REGISTRATION = "InvalidRegistration"; /*** * The registration_id is no longer valid, for example user has uninstalled * the application or turned off notifications. Sender should stop sending * messages to this device. */ public static final String ERRMSG_NOT_REGISTERED = "NotRegistered"; /*** * The payload of the message is too big, see the limitations. Reduce the * size of the message. */ public static final String ERRMSG_MESSAGE_TOO_BIG = "MessageTooBig"; /*** * Collapse key is required. Include collapse key in the request. */ public static final String ERRMSG_MISSING_COLLAPSE_KEY = "MissingCollapseKey"; /*** * HTTP Error from push service provider. See server logs for more details. */ public static final String ERRMSG_NOTIFCATION_FAIL = "PushNotificationFail"; /*** * Internal server error from push service provider. */ public static final String ERRMSG_SERVICE_FAIL = "PushServiceFail"; /*** * Message id not found on server. Likely it has been cleaned up already * (expired). */ public static final String ERRMSG_MESSAGE_NOT_FOUND = "MessageNotFound"; /*** * The payload data contains a key (such as from or any value prefixed by * google.) that is used internally by GCM and therefore cannot be used. * Note that some words (such as collapse_key) are also used by GCM but are * allowed in the payload, in which case the payload value will be * overridden by the GCM value. */ public static final String ERRMSG_INVALID_DATA_KEY = "InvalidDataKey"; /*** * A message was addressed to a registration ID whose package name did not * match the value passed in the request. */ public static final String ERRMSG_INVALID_PACKAGE_NAME = "InvalidPackageName"; /*** * The rate of messages to a particular device is too high. You should * reduce the number of messages sent to this device and should not retry * sending to this device immediately. */ public static final String ERRMSG_DEVICE_MESSAGE_RATE_EXCEEDED = "DeviceMessageRateExceeded"; /*** * A particular message could not be sent because the GCM servers * encountered an error. */ public static final String ERRMSG_INTERNAL_SERVER_ERROR = "InternalServerError"; /*** * Time to Live value passed is less than zero or more than maximum. */ public static final String ERRMSG_INVALID_TTL = "InvalidTtl"; /*** * The sender_id contained in the registration_id does not match the * sender_id used to register with the GCM servers. */ public static final String ERRMSG_MISMATCH_SENDER_ID = "MismatchSenderId"; /*** * Missing registration_id. */ public static final String ERRMSG_MISSING_REGISTRATION = "MissingRegistration"; /*** * Too many messages sent by the sender. */ public static final String ERRMSG_QUOTA_EXCEEDED = "QuotaExceeded"; /*** * A particular message could not be sent because the GCM servers were not * available. */ public static final String ERRMSG_UNAVAILABLE = "Unavailable"; // wakelock private static final String WAKELOCK_KEY = "C2DM_LIB"; /** * Initiate c2d messaging registration for the current application */ // public static void register(Context context, String senderId) { // Intent registrationIntent = new Intent(REQUEST_REGISTRATION_INTENT); // registrationIntent.putExtra(EXTRA_APPLICATION_PENDING_INTENT, // PendingIntent.getBroadcast(context, 0, new Intent(), 0)); // registrationIntent.putExtra(EXTRA_SENDER, senderId); // context.startService(SSUtil.updateIntentExplicitness(context, // registrationIntent)); // } /** * Unregister the application. New messages will be blocked by server. */ public static void unregister(Context context) { Intent regIntent = new Intent(REQUEST_UNREGISTRATION_INTENT); regIntent.putExtra(EXTRA_APPLICATION_PENDING_INTENT, PendingIntent.getBroadcast(context, 0, new Intent(), 0)); context.startService(SSUtil.updateIntentExplicitness(context, regIntent)); } public static String getRegistrationId(Context context) { String regId = SafeSlingerPrefs.getPushRegistrationId(); if (TextUtils.isEmpty(regId)) return ""; return regId; } public static void registerInBackground(final Context context) { final GoogleCloudMessaging gcm = GoogleCloudMessaging.getInstance(context); new AsyncTask<Void, Void, String>() { @Override protected String doInBackground(Void params[]) { Intent intent = new Intent(); try { String regId = gcm.register(SENDER_ID); intent.putExtra(EXTRA_REGISTRATION_ID, regId); } catch (IOException ex) { // If there is an error, don't just keep trying to register. // Require the user to click a button again, or perform // exponential back-off. intent.putExtra(EXTRA_ERROR, ex.getLocalizedMessage()); } String regId = handleRegistration(context, intent); return regId; } @Override protected void onPostExecute(String registrationId) { if (!TextUtils.isEmpty(registrationId)) { Intent i = new Intent(PUSH_REGISTERED).putExtra(extra.PUSH_REGISTRATION_ID, registrationId); context.sendBroadcast(i); } }; }.execute(); } public static String handleRegistration(final Context context, Intent intent) { String registrationId = intent.getStringExtra(EXTRA_REGISTRATION_ID); String unregistered = intent.getStringExtra(EXTRA_UNREGISTERED); String error = intent.getStringExtra(EXTRA_ERROR); MyLog.d(TAG, "registrationId = " + registrationId + ", error = " + error + ", removed = " + unregistered); if (unregistered != null) { // Remember we are unregistered SafeSlingerPrefs.setPushRegistrationId(null); // clear SafeSlingerPrefs.setPushRegistrationIdPosted(false); // reset SafeSlingerPrefs.setPusgRegBackoff(SafeSlingerPrefs.DEFAULT_PUSHREG_BACKOFF); // onUnregistered(context); return ""; } else if (error != null) { // we are not registered, can try again SafeSlingerPrefs.setPushRegistrationId(null); // clear SafeSlingerPrefs.setPushRegistrationIdPosted(false); // reset // Registration failed MyLog.e(TAG, "push reg error: " + error); // retry registration according to recommendations... if (error.equals(ERRREG_SERVICE_NOT_AVAILABLE)) { long backoff = SafeSlingerPrefs.getPusgRegBackoff(); Intent retryIntent = new Intent(C2DM_RETRY); PendingIntent retryPIntent = PendingIntent.getBroadcast(context, 0 /* requestCode */, retryIntent, 0 /* flags */); Date futureDate = new Date(new Date().getTime() + backoff); AlarmManager am = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); am.set(AlarmManager.RTC_WAKEUP, futureDate.getTime(), retryPIntent); // Next retry should wait longer. backoff *= 2; SafeSlingerPrefs.setPusgRegBackoff(backoff); } } else { // save incoming registration locally SafeSlingerPrefs.setPushRegistrationId(registrationId); SafeSlingerPrefs.setPushRegistrationIdPosted(false); // reset SafeSlingerPrefs.setPusgRegBackoff(SafeSlingerPrefs.DEFAULT_PUSHREG_BACKOFF); // save incoming registration on server try { WebEngine web = new WebEngine(context, SafeSlingerConfig.HTTPURL_MESSENGER_HOST); String pass = SafeSlinger.getCachedPassPhrase(SafeSlingerPrefs.getKeyIdString()); // only update online if we are logged in if (!TextUtils.isEmpty(pass)) { CryptoMsgPrivateData mine = CryptTools.getSecretKey(pass); String keyId = mine.getKeyId(); SecureRandom sr = new SecureRandom(); byte[] nonce = new byte[32]; sr.nextBytes(nonce); String pubkey = mine.getSignPubKey(); String SignKey = mine.getSignPriKey(); // only upload valid registration ids if (!TextUtils.isEmpty(registrationId)) { // post local active reg byte[] result = web.postRegistration(keyId, registrationId, SafeSlingerConfig.NOTIFY_ANDROIDGCM, nonce, pubkey, SignKey); // update local active regisLinked if (result != null) { SafeSlingerPrefs.setPushRegistrationIdPosted(true); } else { SafeSlingerPrefs.setPushRegistrationId(null); // clear SafeSlingerPrefs.setPushRegistrationIdPosted(false); // reset } } } } catch (IOException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (CryptoMsgException e) { e.printStackTrace(); } catch (MessagingException e) { SafeSlingerPrefs.setPushRegistrationId(null); // clear SafeSlingerPrefs.setPushRegistrationIdPosted(false); // reset e.printStackTrace(); } catch (MessageNotFoundException e) { SafeSlingerPrefs.setPushRegistrationId(null); // clear SafeSlingerPrefs.setPushRegistrationIdPosted(false); // reset e.printStackTrace(); } // notify UI that registration is complete for now... // onRegistered(context, registrationId); return registrationId; } return ""; } }
/** */ package com.rockwellcollins.atc.resolute.resolute.impl; import com.rockwellcollins.atc.resolute.resolute.Arg; import com.rockwellcollins.atc.resolute.resolute.Expr; import com.rockwellcollins.atc.resolute.resolute.QuantifiedExpr; import com.rockwellcollins.atc.resolute.resolute.ResolutePackage; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Quantified Expr</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link com.rockwellcollins.atc.resolute.resolute.impl.QuantifiedExprImpl#getQuant <em>Quant</em>}</li> * <li>{@link com.rockwellcollins.atc.resolute.resolute.impl.QuantifiedExprImpl#getArgs <em>Args</em>}</li> * <li>{@link com.rockwellcollins.atc.resolute.resolute.impl.QuantifiedExprImpl#getExpr <em>Expr</em>}</li> * </ul> * * @generated */ public class QuantifiedExprImpl extends ExprImpl implements QuantifiedExpr { /** * The default value of the '{@link #getQuant() <em>Quant</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getQuant() * @generated * @ordered */ protected static final String QUANT_EDEFAULT = null; /** * The cached value of the '{@link #getQuant() <em>Quant</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getQuant() * @generated * @ordered */ protected String quant = QUANT_EDEFAULT; /** * The cached value of the '{@link #getArgs() <em>Args</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getArgs() * @generated * @ordered */ protected EList<Arg> args; /** * The cached value of the '{@link #getExpr() <em>Expr</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getExpr() * @generated * @ordered */ protected Expr expr; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected QuantifiedExprImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return ResolutePackage.Literals.QUANTIFIED_EXPR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getQuant() { return quant; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setQuant(String newQuant) { String oldQuant = quant; quant = newQuant; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ResolutePackage.QUANTIFIED_EXPR__QUANT, oldQuant, quant)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Arg> getArgs() { if (args == null) { args = new EObjectContainmentEList<Arg>(Arg.class, this, ResolutePackage.QUANTIFIED_EXPR__ARGS); } return args; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Expr getExpr() { return expr; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetExpr(Expr newExpr, NotificationChain msgs) { Expr oldExpr = expr; expr = newExpr; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ResolutePackage.QUANTIFIED_EXPR__EXPR, oldExpr, newExpr); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setExpr(Expr newExpr) { if (newExpr != expr) { NotificationChain msgs = null; if (expr != null) msgs = ((InternalEObject)expr).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ResolutePackage.QUANTIFIED_EXPR__EXPR, null, msgs); if (newExpr != null) msgs = ((InternalEObject)newExpr).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ResolutePackage.QUANTIFIED_EXPR__EXPR, null, msgs); msgs = basicSetExpr(newExpr, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ResolutePackage.QUANTIFIED_EXPR__EXPR, newExpr, newExpr)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case ResolutePackage.QUANTIFIED_EXPR__ARGS: return ((InternalEList<?>)getArgs()).basicRemove(otherEnd, msgs); case ResolutePackage.QUANTIFIED_EXPR__EXPR: return basicSetExpr(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case ResolutePackage.QUANTIFIED_EXPR__QUANT: return getQuant(); case ResolutePackage.QUANTIFIED_EXPR__ARGS: return getArgs(); case ResolutePackage.QUANTIFIED_EXPR__EXPR: return getExpr(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case ResolutePackage.QUANTIFIED_EXPR__QUANT: setQuant((String)newValue); return; case ResolutePackage.QUANTIFIED_EXPR__ARGS: getArgs().clear(); getArgs().addAll((Collection<? extends Arg>)newValue); return; case ResolutePackage.QUANTIFIED_EXPR__EXPR: setExpr((Expr)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case ResolutePackage.QUANTIFIED_EXPR__QUANT: setQuant(QUANT_EDEFAULT); return; case ResolutePackage.QUANTIFIED_EXPR__ARGS: getArgs().clear(); return; case ResolutePackage.QUANTIFIED_EXPR__EXPR: setExpr((Expr)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case ResolutePackage.QUANTIFIED_EXPR__QUANT: return QUANT_EDEFAULT == null ? quant != null : !QUANT_EDEFAULT.equals(quant); case ResolutePackage.QUANTIFIED_EXPR__ARGS: return args != null && !args.isEmpty(); case ResolutePackage.QUANTIFIED_EXPR__EXPR: return expr != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (quant: "); result.append(quant); result.append(')'); return result.toString(); } } //QuantifiedExprImpl
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.junit.Assert.assertEquals; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.util.PhoenixRuntime; import org.junit.Test; import com.google.common.collect.Lists; public class QueryMoreIT extends BaseHBaseManagedTimeIT { private String dataTableName; //queryAgainstTenantSpecificView = true, dataTableSalted = true @Test public void testQueryMore1() throws Exception { testQueryMore(true, true); } //queryAgainstTenantSpecificView = false, dataTableSalted = true @Test public void testQueryMore2() throws Exception { testQueryMore(false, true); } //queryAgainstTenantSpecificView = false, dataTableSalted = false @Test public void testQueryMore3() throws Exception { testQueryMore(false, false); } //queryAgainstTenantSpecificView = true, dataTableSalted = false @Test public void testQueryMore4() throws Exception { testQueryMore(true, false); } private void testQueryMore(boolean queryAgainstTenantSpecificView, boolean dataTableSalted) throws Exception { String[] tenantIds = new String[] {"00Dxxxxxtenant1", "00Dxxxxxtenant2", "00Dxxxxxtenant3"}; int numRowsPerTenant = 10; String cursorTableName = "CURSOR_TABLE"; this.dataTableName = "BASE_HISTORY_TABLE" + (dataTableSalted ? "_SALTED" : ""); String cursorTableDDL = "CREATE TABLE IF NOT EXISTS " + cursorTableName + " (\n" + "TENANT_ID VARCHAR(15) NOT NULL\n," + "QUERY_ID VARCHAR(15) NOT NULL,\n" + "CURSOR_ORDER BIGINT NOT NULL \n" + "CONSTRAINT CURSOR_TABLE_PK PRIMARY KEY (TENANT_ID, QUERY_ID, CURSOR_ORDER)) "+ "SALT_BUCKETS = 4, TTL=86400"; String baseDataTableDDL = "CREATE TABLE IF NOT EXISTS " + dataTableName + " (\n" + "TENANT_ID CHAR(15) NOT NULL,\n" + "PARENT_ID CHAR(15) NOT NULL,\n" + "CREATED_DATE DATE NOT NULL,\n" + "ENTITY_HISTORY_ID CHAR(15) NOT NULL,\n" + "DATA_TYPE VARCHAR,\n" + "OLDVAL_STRING VARCHAR,\n" + "NEWVAL_STRING VARCHAR\n" + "CONSTRAINT PK PRIMARY KEY(TENANT_ID, PARENT_ID, CREATED_DATE DESC, ENTITY_HISTORY_ID)) " + "VERSIONS = 1, MULTI_TENANT = true" + (dataTableSalted ? ", SALT_BUCKETS = 4" : ""); //create cursor and data tables. Connection conn = DriverManager.getConnection(getUrl()); conn.createStatement().execute(cursorTableDDL); conn.createStatement().execute(baseDataTableDDL); conn.close(); //upsert rows in the data table for all the tenantIds Map<String, List<String>> historyIdsPerTenant = createHistoryTableRows(dataTableName, tenantIds, numRowsPerTenant); // assert query more for tenantId -> tenantIds[0] String tenantId = tenantIds[0]; String cursorQueryId = "00TcursrqueryId"; String tableOrViewName = queryAgainstTenantSpecificView ? ("\"HISTORY_TABLE" + "_" + tenantId + "\"") : dataTableName; assertEquals(numRowsPerTenant, upsertSelectRecordsInCursorTableForTenant(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId)); /*// assert that the data inserted in cursor table matches the data in the data table for tenantId. String selectDataTable = "SELECT TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID FROM BASE_HISTORY_TABLE WHERE TENANT_ID = ? "; String selectCursorTable = "SELECT TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID FROM CURSOR_TABLE (PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15)) WHERE TENANT_ID = ? "; PreparedStatement stmtData = DriverManager.getConnection(getUrl()).prepareStatement(selectDataTable); stmtData.setString(1, tenantId); ResultSet rsData = stmtData.executeQuery(); PreparedStatement stmtCursor = DriverManager.getConnection(getUrl()).prepareStatement(selectCursorTable); stmtCursor.setString(1, tenantId); ResultSet rsCursor = stmtCursor.executeQuery(); while(rsData.next() && rsCursor.next()) { assertEquals(rsData.getString("TENANT_ID"), rsCursor.getString("TENANT_ID")); assertEquals(rsData.getString("PARENT_ID"), rsCursor.getString("PARENT_ID")); assertEquals(rsData.getDate("CREATED_DATE"), rsCursor.getDate("CREATED_DATE")); assertEquals(rsData.getString("ENTITY_HISTORY_ID"), rsCursor.getString("ENTITY_HISTORY_ID")); } */ Connection conn2 = DriverManager.getConnection(getUrl()); ResultSet rs = conn2.createStatement().executeQuery("SELECT count(*) from " + cursorTableName); rs.next(); assertEquals(numRowsPerTenant, rs.getInt(1)); conn2.close(); int startOrder = 0; int endOrder = 5; int numRecordsThatShouldBeRetrieved = numRowsPerTenant/2; // we will test for two rounds of query more. // get first batch of cursor ids out of the cursor table. String[] cursorIds = getRecordsOutofCursorTable(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId, startOrder, endOrder); assertEquals(numRecordsThatShouldBeRetrieved, cursorIds.length); // now query and fetch first batch of records. List<String> historyIds = doQueryMore(queryAgainstTenantSpecificView, tenantId, tableOrViewName, cursorIds); // assert that history ids match for this tenant assertEquals(historyIdsPerTenant.get(tenantId).subList(startOrder, endOrder), historyIds); // get the next batch of cursor ids out of the cursor table. cursorIds = getRecordsOutofCursorTable(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId, startOrder + numRecordsThatShouldBeRetrieved, endOrder + numRecordsThatShouldBeRetrieved); assertEquals(numRecordsThatShouldBeRetrieved, cursorIds.length); // now query and fetch the next batch of records. historyIds = doQueryMore(queryAgainstTenantSpecificView, tenantId, tableOrViewName, cursorIds); // assert that the history ids match for this tenant assertEquals(historyIdsPerTenant.get(tenantId).subList(startOrder + numRecordsThatShouldBeRetrieved, endOrder+ numRecordsThatShouldBeRetrieved), historyIds); // get the next batch of cursor ids out of the cursor table. cursorIds = getRecordsOutofCursorTable(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId, startOrder + 2 * numRecordsThatShouldBeRetrieved, endOrder + 2 * numRecordsThatShouldBeRetrieved); // assert that there are no more cursorids left for this tenant. assertEquals(0, cursorIds.length); } private Map<String, List<String>> createHistoryTableRows(String dataTableName, String[] tenantIds, int numRowsPerTenant) throws Exception { String upsertDML = "UPSERT INTO " + dataTableName + " VALUES (?, ?, ?, ?, ?, ?, ?)"; Connection conn = DriverManager.getConnection(getUrl()); Map<String, List<String>> historyIdsForTenant = new HashMap<String, List<String>>(); try { PreparedStatement stmt = conn.prepareStatement(upsertDML); for (int j = 0; j < tenantIds.length; j++) { List<String> historyIds = new ArrayList<String>(); for (int i = 0; i < numRowsPerTenant; i++) { stmt.setString(1, tenantIds[j]); String parentId = "parentId" + i; stmt.setString(2, parentId); stmt.setDate(3, new Date(100)); String historyId = "historyId" + i; stmt.setString(4, historyId); stmt.setString(5, "datatype"); stmt.setString(6, "oldval"); stmt.setString(7, "newval"); stmt.executeUpdate(); historyIds.add(historyId); } historyIdsForTenant.put(tenantIds[j], historyIds); } conn.commit(); return historyIdsForTenant; } finally { conn.close(); } } private int upsertSelectRecordsInCursorTableForTenant(String tableOrViewName, boolean queryAgainstTenantView, String tenantId, String cursorQueryId) throws Exception { String sequenceName = "\"" + tenantId + "_SEQ\""; Connection conn = queryAgainstTenantView ? getTenantSpecificConnection(tenantId) : DriverManager.getConnection(getUrl()); // Create a sequence. This sequence is used to fill cursor_order column for each row inserted in the cursor table. conn.createStatement().execute("CREATE SEQUENCE " + sequenceName + " CACHE " + Long.MAX_VALUE); conn.setAutoCommit(true); if (queryAgainstTenantView) { createTenantSpecificViewIfNecessary(tableOrViewName, conn); } try { String tenantIdFilter = queryAgainstTenantView ? "" : " WHERE TENANT_ID = ? "; // Using dynamic columns, we can use the same cursor table for storing primary keys for all the tables. String upsertSelectDML = "UPSERT INTO CURSOR_TABLE " + "(TENANT_ID, QUERY_ID, CURSOR_ORDER, PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15)) " + "SELECT ?, ?, NEXT VALUE FOR " + sequenceName + ", PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID " + " FROM " + tableOrViewName + tenantIdFilter; PreparedStatement stmt = conn.prepareStatement(upsertSelectDML); stmt.setString(1, tenantId); stmt.setString(2, cursorQueryId); if (!queryAgainstTenantView) { stmt.setString(3, tenantId); } int numRecords = stmt.executeUpdate(); return numRecords; } finally { try { conn.createStatement().execute("DROP SEQUENCE " + sequenceName); } finally { conn.close(); } } } private Connection getTenantSpecificConnection(String tenantId) throws Exception { Properties props = new Properties(); props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId); return DriverManager.getConnection(getUrl(), props); } private String createTenantSpecificViewIfNecessary(String tenantViewName, Connection tenantConn) throws Exception { tenantConn.createStatement().execute("CREATE VIEW IF NOT EXISTS " + tenantViewName + " AS SELECT * FROM " + dataTableName); return tenantViewName; } private String[] getRecordsOutofCursorTable(String tableOrViewName, boolean queryAgainstTenantSpecificView, String tenantId, String cursorQueryId, int startOrder, int endOrder) throws Exception { Connection conn = DriverManager.getConnection(getUrl()); List<String> pkIds = new ArrayList<String>(); String cols = queryAgainstTenantSpecificView ? "PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID" : "TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID"; String dynCols = queryAgainstTenantSpecificView ? "(PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15))" : "(TENANT_ID CHAR(15), PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15))"; String selectCursorSql = "SELECT " + cols + " " + "FROM CURSOR_TABLE \n" + dynCols + " \n" + "WHERE TENANT_ID = ? AND \n" + "QUERY_ID = ? AND \n" + "CURSOR_ORDER > ? AND \n" + "CURSOR_ORDER <= ?"; PreparedStatement stmt = conn.prepareStatement(selectCursorSql); stmt.setString(1, tenantId); stmt.setString(2, cursorQueryId); stmt.setInt(3, startOrder); stmt.setInt(4, endOrder); ResultSet rs = stmt.executeQuery(); @SuppressWarnings("unchecked") List<Pair<String, String>> columns = queryAgainstTenantSpecificView ? Lists.newArrayList(new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID")) : Lists.newArrayList(new Pair<String, String>(null, "TENANT_ID"), new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID")); while(rs.next()) { Object[] values = new Object[columns.size()]; for (int i = 0; i < columns.size(); i++) { values[i] = rs.getObject(i + 1); } conn = getTenantSpecificConnection(tenantId); pkIds.add(Base64.encodeBytes(PhoenixRuntime.encodeValues(conn, tableOrViewName, values, columns))); } return pkIds.toArray(new String[pkIds.size()]); } private List<String> doQueryMore(boolean queryAgainstTenantView, String tenantId, String tenantViewName, String[] cursorIds) throws Exception { Connection conn = queryAgainstTenantView ? getTenantSpecificConnection(tenantId) : DriverManager.getConnection(getUrl()); String tableName = queryAgainstTenantView ? tenantViewName : dataTableName; @SuppressWarnings("unchecked") List<Pair<String, String>> columns = queryAgainstTenantView ? Lists.newArrayList(new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID")) : Lists.newArrayList(new Pair<String, String>(null, "TENANT_ID"), new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID")); StringBuilder sb = new StringBuilder(); String where = queryAgainstTenantView ? " WHERE (PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID) IN " : " WHERE (TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID) IN "; sb.append("SELECT ENTITY_HISTORY_ID FROM " + tableName + where); int numPkCols = columns.size(); String query = addRvcInBinds(sb, cursorIds.length, numPkCols); PreparedStatement stmt = conn.prepareStatement(query); int bindCounter = 1; for (int i = 0; i < cursorIds.length; i++) { Object[] pkParts = PhoenixRuntime.decodeValues(conn, tableName, Base64.decode(cursorIds[i]), columns); for (int j = 0; j < pkParts.length; j++) { stmt.setObject(bindCounter++, pkParts[j]); } } ResultSet rs = stmt.executeQuery(); List<String> historyIds = new ArrayList<String>(); while(rs.next()) { historyIds.add(rs.getString(1)); } return historyIds; } private String addRvcInBinds(StringBuilder sb, int numRvcs, int numPkCols) { sb.append("("); for (int i = 0 ; i < numRvcs; i++) { for (int j = 0; j < numPkCols; j++) { if (j == 0) { sb.append("("); } sb.append("?"); if (j < numPkCols - 1) { sb.append(","); } else { sb.append(")"); } } if (i < numRvcs - 1) { sb.append(","); } } sb.append(")"); return sb.toString(); } }
/* $Id$ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.etch.tests; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.util.Set; import org.apache.etch.bindings.java.msg.Field; import org.apache.etch.bindings.java.msg.StructValue; import org.apache.etch.bindings.java.msg.Type; import org.apache.etch.tests.Test1.E1; import org.apache.etch.tests.Test1.Excp1; import org.apache.etch.tests.Test1.Excp2; import org.apache.etch.tests.Test1.Excp3; import org.apache.etch.tests.Test1.Excp4; import org.apache.etch.tests.Test1.Excp5; import org.apache.etch.tests.Test1.Excp6; import org.apache.etch.tests.Test1.S1; import org.apache.etch.tests.Test1.S2; import org.apache.etch.tests.Test1.S3; import org.apache.etch.tests.Test1.S4; /** Test of ValueFactoryTest1 */ public class TestValueFactoryTest1DotJava { private ValueFactoryTest1 vf = new ValueFactoryTest1( "tcp:" ); /** */ @org.junit.Test public void test_E1() { // type // checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, // fields // ValueFactoryTest1._mf_A, ValueFactoryTest1._mf_B, ValueFactoryTest1._mf_C ); } /** */ @org.junit.Test public void test_E1_export() { testEnumExport( E1.A, ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, ValueFactoryTest1._mf_A ); testEnumExport( E1.B, ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, ValueFactoryTest1._mf_B ); testEnumExport( E1.C, ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, ValueFactoryTest1._mf_C ); } /** */ @org.junit.Test public void test_E1_import() { testEnumImport( E1.A, ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, ValueFactoryTest1._mf_A ); testEnumImport( E1.B, ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, ValueFactoryTest1._mf_B ); testEnumImport( E1.C, ValueFactoryTest1._mt_org_apache_etch_tests_Test1_E1, ValueFactoryTest1._mf_C ); } /** */ @org.junit.Test public void test_S1() { // type // checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S1, // fields // ValueFactoryTest1._mf_x, ValueFactoryTest1._mf_y, ValueFactoryTest1._mf_z ); } /** */ @org.junit.Test public void test_S1_export() { StructValue sv = vf.exportCustomValue( new S1( 19, 23, 29 ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S1 ); assertEquals( 3, sv.size() ); assertEquals( 19, sv.get( ValueFactoryTest1._mf_x ) ); assertEquals( 23, sv.get( ValueFactoryTest1._mf_y ) ); assertEquals( 29, sv.get( ValueFactoryTest1._mf_z ) ); } /** */ @org.junit.Test public void test_S1_import() { StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S1, vf ); sv.put( ValueFactoryTest1._mf_x, 101 ); sv.put( ValueFactoryTest1._mf_y, 103 ); sv.put( ValueFactoryTest1._mf_z, 107 ); S1 s = (S1) vf.importCustomValue( sv ); assertEquals( 101, s.x ); assertEquals( 103, s.y ); assertEquals( 107, s.z ); } /** */ @org.junit.Test public void test_S2() { // type // checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S2, // fields // ValueFactoryTest1._mf_a, ValueFactoryTest1._mf_b, ValueFactoryTest1._mf_c ); } /** */ @org.junit.Test public void test_S2_export() { S1 a = new S1( 21, 22, 23 ); S1 b = new S1( 31, 32, 33 ); E1 c = E1.A; StructValue sv = vf.exportCustomValue( new S2( a, b, c ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S2 ); assertEquals( 3, sv.size() ); assertSame( a, sv.get( ValueFactoryTest1._mf_a ) ); assertSame( b, sv.get( ValueFactoryTest1._mf_b ) ); assertSame( c, sv.get( ValueFactoryTest1._mf_c ) ); } /** */ @org.junit.Test public void test_S2_import() { StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S2, vf ); sv.put( ValueFactoryTest1._mf_a, new S1( 21, 22, 23 ) ); sv.put( ValueFactoryTest1._mf_b, new S1( 31, 32, 33 ) ); sv.put( ValueFactoryTest1._mf_c, E1.A ); S2 s = (S2) vf.importCustomValue( sv ); assertEquals( 21, s.a.x ); assertEquals( 22, s.a.y ); assertEquals( 23, s.a.z ); assertEquals( 31, s.b.x ); assertEquals( 32, s.b.y ); assertEquals( 33, s.b.z ); assertEquals( E1.A, s.c ); } /** */ @org.junit.Test public void test_S3() { // type // checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S3, // fields // ValueFactoryTest1._mf_tipe, ValueFactoryTest1._mf_x ); } /** */ @org.junit.Test public void test_S3_export() { testS3Export("boolean", Test1.BOOL1); testS3Export("byte", Test1.BYTE5); testS3Export("short", Test1.SHORT5); testS3Export("int", Test1.INT5); testS3Export("long", Test1.LONG5); testS3Export("float", Test1.FLOAT5); testS3Export("double", Test1.DOUBLE5); testS3Export("string", Test1.STRING3); } /** */ @org.junit.Test public void test_S3_import() { testS3Import("boolean", Test1.BOOL1); testS3Import("byte", Test1.BYTE5); testS3Import("short", Test1.SHORT5); testS3Import("int", Test1.INT5); testS3Import("long", Test1.LONG5); testS3Import("float", Test1.FLOAT5); testS3Import("double", Test1.DOUBLE5); testS3Import("string", Test1.STRING3); } /** */ @org.junit.Test public void test_S4() { // type // checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S4, // fields // ValueFactoryTest1._mf_tipe, ValueFactoryTest1._mf_x ); } /** */ @org.junit.Test public void test_S4_export() { Object[] boolObject = new Object[] {Test1.BOOL1, Test1.BOOL2}; Object[] byteObject = new Object[] {Test1.BYTE1, Test1.BYTE2, Test1.BYTE3, Test1.BYTE4, Test1.BYTE5}; Object[] shortObject = new Object[] {Test1.SHORT1, Test1.SHORT2, Test1.SHORT3, Test1.SHORT4, Test1.SHORT5}; Object[] intObject = new Object[]{Test1.INT1, Test1.INT2, Test1.INT3, Test1.INT4, Test1.INT5}; Object[] longObject = new Object[] {Test1.LONG1, Test1.LONG2, Test1.LONG3, Test1.LONG4, Test1.LONG5}; Object[] floatObject = new Object[]{Test1.FLOAT1, Test1.FLOAT2, Test1.FLOAT3, Test1.FLOAT4, Test1.FLOAT5}; Object[] doubleObject = new Object[] {Test1.DOUBLE1, Test1.DOUBLE2, Test1.DOUBLE3, Test1.DOUBLE4, Test1.DOUBLE5}; Object[] stringObject = new Object []{Test1.STRING1, Test1.STRING2, Test1.STRING3, Test1.STRING4, Test1.STRING5}; testS4Export("boolean", boolObject); testS4Export("byte", byteObject); testS4Export("short", shortObject); testS4Export("int", intObject); testS4Export("long", longObject); testS4Export("float", floatObject); testS4Export("double", doubleObject); testS4Export("string", stringObject); } /** */ @org.junit.Test public void test_S4_import() { Object[] boolObject = new Object[] {Test1.BOOL1, Test1.BOOL2}; Object[] byteObject = new Object[] {Test1.BYTE1, Test1.BYTE2, Test1.BYTE3, Test1.BYTE4, Test1.BYTE5}; Object[] shortObject = new Object[] {Test1.SHORT1, Test1.SHORT2, Test1.SHORT3, Test1.SHORT4, Test1.SHORT5}; Object[] intObject = new Object[]{Test1.INT1, Test1.INT2, Test1.INT3, Test1.INT4, Test1.INT5}; Object[] longObject = new Object[] {Test1.LONG1, Test1.LONG2, Test1.LONG3, Test1.LONG4, Test1.LONG5}; Object[] floatObject = new Object[]{Test1.FLOAT1, Test1.FLOAT2, Test1.FLOAT3, Test1.FLOAT4, Test1.FLOAT5}; Object[] doubleObject = new Object[] {Test1.DOUBLE1, Test1.DOUBLE2, Test1.DOUBLE3, Test1.DOUBLE4, Test1.DOUBLE5}; Object[] stringObject = new Object []{Test1.STRING1, Test1.STRING2, Test1.STRING3, Test1.STRING4, Test1.STRING5}; testS4Import("boolean", boolObject); testS4Import("byte", byteObject); testS4Import("short", shortObject); testS4Import("int", intObject); testS4Import("long", longObject); testS4Import("float", floatObject); testS4Import("double", doubleObject); testS4Import("string", stringObject); } /** */ @org.junit.Test public void test_excps() { // type // checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp1, ValueFactoryTest1._mf_msg, ValueFactoryTest1._mf_code ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp2 ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp3 ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp4 ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp5, ValueFactoryTest1._mf_msg, ValueFactoryTest1._mf_code, ValueFactoryTest1._mf_x ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp6, ValueFactoryTest1._mf_msg, ValueFactoryTest1._mf_code, ValueFactoryTest1._mf_x ); // fields // // ValueFactoryTest1._mf_msg ); // ValueFactoryTest1._mf_code ); // ValueFactoryTest1._mf_x ); } /** */ @org.junit.Test public void test_excps_export() { Object[] boolObject = new Object[] {Test1.BOOL1, Test1.BOOL2}; Object[] byteObject = new Object[] {Test1.BYTE1, Test1.BYTE2, Test1.BYTE3, Test1.BYTE4, Test1.BYTE5}; Object[] shortObject = new Object[] {Test1.SHORT1, Test1.SHORT2, Test1.SHORT3, Test1.SHORT4, Test1.SHORT5}; Object[] intObject = new Object[]{Test1.INT1, Test1.INT2, Test1.INT3, Test1.INT4, Test1.INT5}; Object[] longObject = new Object[] {Test1.LONG1, Test1.LONG2, Test1.LONG3, Test1.LONG4, Test1.LONG5}; Object[] floatObject = new Object[]{Test1.FLOAT1, Test1.FLOAT2, Test1.FLOAT3, Test1.FLOAT4, Test1.FLOAT5}; Object[] doubleObject = new Object[] {Test1.DOUBLE1, Test1.DOUBLE2, Test1.DOUBLE3, Test1.DOUBLE4, Test1.DOUBLE5}; Object[] stringObject = new Object []{Test1.STRING1, Test1.STRING2, Test1.STRING3, Test1.STRING4, Test1.STRING5}; String msg = "Exception"; int code = 500; StructValue sv = vf.exportCustomValue( new Excp1( "abc", 23 ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp1 ); assertEquals( 2, sv.size() ); assertEquals( "abc", sv.get( ValueFactoryTest1._mf_msg ) ); assertEquals( 23, sv.get( ValueFactoryTest1._mf_code ) ); sv = vf.exportCustomValue( new Excp2() ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp2 ); assertEquals( 0, sv.size() ); sv = vf.exportCustomValue( new Excp3() ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp3 ); assertEquals( 0, sv.size() ); sv = vf.exportCustomValue( new Excp4() ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp4 ); assertEquals( 0, sv.size() ); // Import exception with object as param testExcp5Export(msg, code, Test1.BOOL2); testExcp5Export(msg, code, Test1.BYTE5); testExcp5Export(msg, code, Test1.SHORT5); testExcp5Export(msg, code, Test1.INT5); testExcp5Export(msg, code, Test1.LONG5); testExcp5Export(msg, code, Test1.FLOAT5); testExcp5Export(msg, code, Test1.DOUBLE5); testExcp5Export(msg, code, Test1.STRING3); // Import exception with array of object as param testExcp6Export(msg, code, boolObject); testExcp6Export(msg, code, byteObject); testExcp6Export(msg, code, shortObject); testExcp6Export(msg, code, intObject); testExcp6Export(msg, code, longObject); testExcp6Export(msg, code, floatObject); testExcp6Export(msg, code, doubleObject); testExcp6Export(msg, code, stringObject); } /** */ @org.junit.Test public void test_excps_import() { Object[] boolObject = new Object[] {Test1.BOOL1, Test1.BOOL2}; Object[] byteObject = new Object[] {Test1.BYTE1, Test1.BYTE2, Test1.BYTE3, Test1.BYTE4, Test1.BYTE5}; Object[] shortObject = new Object[] {Test1.SHORT1, Test1.SHORT2, Test1.SHORT3, Test1.SHORT4, Test1.SHORT5}; Object[] intObject = new Object[]{Test1.INT1, Test1.INT2, Test1.INT3, Test1.INT4, Test1.INT5}; Object[] longObject = new Object[] {Test1.LONG1, Test1.LONG2, Test1.LONG3, Test1.LONG4, Test1.LONG5}; Object[] floatObject = new Object[]{Test1.FLOAT1, Test1.FLOAT2, Test1.FLOAT3, Test1.FLOAT4, Test1.FLOAT5}; Object[] doubleObject = new Object[] {Test1.DOUBLE1, Test1.DOUBLE2, Test1.DOUBLE3, Test1.DOUBLE4, Test1.DOUBLE5}; Object[] stringObject = new Object []{Test1.STRING1, Test1.STRING2, Test1.STRING3, Test1.STRING4, Test1.STRING5}; String msg = "Exception"; int code = 500; StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp1, vf ); sv.put( ValueFactoryTest1._mf_msg, "def" ); sv.put( ValueFactoryTest1._mf_code, 29 ); Excp1 e1 = (Excp1) vf.importCustomValue( sv ); assertEquals( "def", e1.msg ); assertEquals( 29, e1.code ); e1 = null; sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp2, vf ); Excp2 e2 = (Excp2) vf.importCustomValue( sv ); assertNotNull( e2 ); e2 = null; sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp3, vf ); Excp3 e3 = (Excp3) vf.importCustomValue( sv ); assertNotNull( e3 ); e3 = null; sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp4, vf ); Excp4 e4 = (Excp4) vf.importCustomValue( sv ); assertNotNull( e4 ); e4 = null; // Import exception with object as param testExcp5Import(msg, code, Test1.BOOL2); testExcp5Import(msg, code, Test1.BYTE5); testExcp5Import(msg, code, Test1.SHORT5); testExcp5Import(msg, code, Test1.INT5); testExcp5Import(msg, code, Test1.LONG5); testExcp5Import(msg, code, Test1.FLOAT5); testExcp5Import(msg, code, Test1.DOUBLE5); testExcp5Import(msg, code, Test1.STRING3); // Import exception with array of object as param testExcp6Import(msg, code, boolObject); testExcp6Import(msg, code, byteObject); testExcp6Import(msg, code, shortObject); testExcp6Import(msg, code, intObject); testExcp6Import(msg, code, longObject); testExcp6Import(msg, code, floatObject); testExcp6Import(msg, code, doubleObject); testExcp6Import(msg, code, stringObject); } /** */ @org.junit.Test public void test_method_nothing() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_nothing, ValueFactoryTest1._mf__messageId ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_nothing, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_incr() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_incr, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_x ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_incr, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_sub() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_sub, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_x, ValueFactoryTest1._mf_y ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_sub, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_sum() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_sum, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_x ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_sum, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_trans() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_trans, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_e, ValueFactoryTest1._mf_x ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_trans, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_dist() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_dist, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a, ValueFactoryTest1._mf_b ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_dist, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_fill() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_fill, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_n, ValueFactoryTest1._mf_x ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_fill, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_fillObject() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_fillObject, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_n, ValueFactoryTest1._mf_o ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_fillObject, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_blow() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_blow, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_msg, ValueFactoryTest1._mf_code ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_blow, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_beets() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_beets, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_e ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_beets, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_throwExcp5() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_throwExcp5, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_msg, ValueFactoryTest1._mf_code, ValueFactoryTest1._mf_value ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_throwExcp5, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_throwExcp6() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_throwExcp5, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_msg, ValueFactoryTest1._mf_code, ValueFactoryTest1._mf_value ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_throwExcp6, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_boolean() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_boolean, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_boolean, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_boolean_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_boolean_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_boolean_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_byte() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_byte, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_byte, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_byte_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_byte_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_byte_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_short() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_short, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_short, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_short_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_short_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_short_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_int() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_int, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_int, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_int_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_int_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_int_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_long() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_long, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_long, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_long_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_long_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_long_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_float() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_float, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_float, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_float_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_float_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_float_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_double() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_double, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_double, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_double_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_double_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_double_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_string() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_string, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_string, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_string_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_string_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_string_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_E1() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_E1, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_E1, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_E1_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_E1_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_E1_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_S1() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_S1, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_S1, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_S1_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_S1_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_S1_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_S2() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_S2, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_S2, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_S2_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_S2_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_S2_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_Blob() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_Blob, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_Blob, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_Blob_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_Blob_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_Blob_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_object() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_object, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_object, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_object_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_object_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_object_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_object_struct() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_object_struct, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_object_struct, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } /** */ @org.junit.Test public void test_method_p_object_struct_array() { checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_p_object_struct_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf_a ); checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1__result_p_object_struct_array, ValueFactoryTest1._mf__messageId, ValueFactoryTest1._mf__inReplyTo, ValueFactoryTest1._mf_result ); } ///////////////////// // UTILITY METHODS // ///////////////////// private void checkType( Type type, Field... fields ) { assertNotNull( type ); assertSame( Type.class, type.getClass() ); assertSame( type, vf.getType( type.getId() ) ); Set<Field> tfields = type.getFields(); if (fields != null) { assertEquals( fields.length, tfields.size() ); for (Field f: fields) { assertNotNull( type.getValidator( f ) ); assertSame( f, type.getField( f.getId() ) ); assertSame( f, type.getField( f.getName() ) ); } } else { assertEquals( 0, tfields.size() ); } } private void testEnumExport( E1 e, Type t, Field f ) { StructValue sv = vf.exportCustomValue( e ); sv.checkType( t ); assertEquals( 1, sv.size() ); assertTrue( (Boolean) sv.get( f ) ); } private void testEnumImport( E1 e, Type t, Field f ) { StructValue sv = new StructValue( t, vf ); sv.put( f, true ); E1 a = (E1) vf.importCustomValue( sv ); assertSame( e, a ); } private void testS3Export(String s, Object value) { StructValue sv = vf.exportCustomValue( new S3( s,value ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S3 ); assertEquals( 2, sv.size() ); assertEquals( s, sv.get( ValueFactoryTest1._mf_tipe ) ); assertEquals( value, sv.get( ValueFactoryTest1._mf_x ) ); } private void testS3Import(String s, Object value) { StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S3, vf ); sv.put( ValueFactoryTest1._mf_tipe, s ); sv.put( ValueFactoryTest1._mf_x, value ); S3 myS3 = (S3) vf.importCustomValue( sv ); assertEquals( s, myS3.tipe ); assertEquals( value, myS3.x ); } private void testS4Export(String s, Object[] value) { StructValue sv = vf.exportCustomValue( new S4( s,value ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S4 ); assertEquals( 2, sv.size() ); assertEquals( s, sv.get( ValueFactoryTest1._mf_tipe ) ); assertEquals( value, sv.get( ValueFactoryTest1._mf_x ) ); } private void testS4Import(String s, Object[] value) { StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_S4, vf ); sv.put( ValueFactoryTest1._mf_tipe, s ); sv.put( ValueFactoryTest1._mf_x, value ); S4 myS4 = (S4) vf.importCustomValue( sv ); assertEquals( s, myS4.tipe ); assertArrayEquals( value, myS4.x ); } private void testExcp5Export(String msg, int code, Object value) { StructValue sv = vf.exportCustomValue( new Excp5( msg, code, value ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp5 ); assertEquals( 3, sv.size() ); assertEquals( msg, sv.get( ValueFactoryTest1._mf_msg ) ); assertEquals( code, sv.get( ValueFactoryTest1._mf_code ) ); assertEquals( value, sv.get( ValueFactoryTest1._mf_x ) ); } private void testExcp5Import(String msg, int code, Object value) { StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp5, vf ); sv.put( ValueFactoryTest1._mf_msg, msg ); sv.put( ValueFactoryTest1._mf_code, code ); sv.put( ValueFactoryTest1._mf_x, value ); Excp5 e = (Excp5) vf.importCustomValue( sv ); assertEquals( msg, e.msg ); assertEquals( code, e.code ); assertEquals( value, e.x ); } private void testExcp6Export(String msg, int code, Object[] value) { StructValue sv = vf.exportCustomValue( new Excp6( msg, code, value ) ); sv.checkType( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp6 ); assertEquals( 3, sv.size() ); assertEquals( msg, sv.get( ValueFactoryTest1._mf_msg ) ); assertEquals( code, sv.get( ValueFactoryTest1._mf_code ) ); assertEquals( value, sv.get( ValueFactoryTest1._mf_x ) ); } private void testExcp6Import(String msg, int code, Object[] value) { StructValue sv = new StructValue( ValueFactoryTest1._mt_org_apache_etch_tests_Test1_Excp6, vf ); sv.put( ValueFactoryTest1._mf_msg, msg ); sv.put( ValueFactoryTest1._mf_code, code ); sv.put( ValueFactoryTest1._mf_x, value ); Excp6 e = (Excp6) vf.importCustomValue( sv ); assertEquals( msg, e.msg ); assertEquals( code, e.code ); assertArrayEquals( value, e.x ); } }
/* * Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0, * and the EPL 1.0 (http://h2database.com/html/license.html). * Initial Developer: H2 Group */ package org.h2.test.db; import java.io.StringReader; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Random; import org.h2.test.TestAll; import org.h2.test.TestBase; import org.h2.util.SmallLRUCache; import org.h2.util.SynchronizedVerifier; import org.h2.util.Task; /** * Multi-threaded tests. */ public class TestMultiThread extends TestBase implements Runnable { private boolean stop; private TestMultiThread parent; private Random random; private Connection threadConn; private Statement threadStat; public TestMultiThread() { // nothing to do } private TestMultiThread(TestAll config, TestMultiThread parent) throws SQLException { this.config = config; this.parent = parent; random = new Random(); threadConn = getConnection(); threadStat = threadConn.createStatement(); } /** * Run just this test. * * @param a ignored */ public static void main(String... a) throws Exception { TestBase.createCaller().init().test(); } @Override public void test() throws Exception { testConcurrentSchemaChange(); testConcurrentLobAdd(); testConcurrentView(); testConcurrentAlter(); testConcurrentAnalyze(); testConcurrentInsertUpdateSelect(); testLockModeWithMultiThreaded(); } private void testConcurrentSchemaChange() throws Exception { String db = getTestName(); deleteDb(db); final String url = getURL(db + ";MULTI_THREADED=1", true); Connection conn = getConnection(url); Task[] tasks = new Task[2]; for (int i = 0; i < tasks.length; i++) { final int x = i; Task t = new Task() { @Override public void call() throws Exception { Connection c2 = getConnection(url); Statement stat = c2.createStatement(); try { for (int i = 0; !stop; i++) { stat.execute("create table test" + x + "_" + i); c2.getMetaData().getTables(null, null, null, null); stat.execute("drop table test" + x + "_" + i); } } finally { c2.close(); } } }; tasks[i] = t; t.execute(); } Thread.sleep(1000); for (Task t : tasks) { t.get(); } conn.close(); } private void testConcurrentLobAdd() throws Exception { String db = getTestName(); deleteDb(db); final String url = getURL(db + ";MULTI_THREADED=1", true); Connection conn = getConnection(url); Statement stat = conn.createStatement(); stat.execute("create table test(id identity, data clob)"); Task[] tasks = new Task[2]; for (int i = 0; i < tasks.length; i++) { Task t = new Task() { @Override public void call() throws Exception { Connection c2 = getConnection(url); PreparedStatement p2 = c2 .prepareStatement("insert into test(data) values(?)"); try { while (!stop) { p2.setCharacterStream(1, new StringReader(new String( new char[10 * 1024]))); p2.execute(); } } finally { c2.close(); } } }; tasks[i] = t; t.execute(); } Thread.sleep(500); for (Task t : tasks) { t.get(); } conn.close(); } private void testConcurrentView() throws Exception { if (config.mvcc) { return; } String db = getTestName(); deleteDb(db); final String url = getURL(db + ";MULTI_THREADED=1", true); final Random r = new Random(); Connection conn = getConnection(url); Statement stat = conn.createStatement(); StringBuilder buff = new StringBuilder(); buff.append("create table test(id int"); final int len = 3; for (int i = 0; i < len; i++) { buff.append(", x" + i + " int"); } buff.append(")"); stat.execute(buff.toString()); stat.execute("create view test_view as select * from test"); stat.execute("insert into test(id) select x from system_range(1, 2)"); Task t = new Task() { @Override public void call() throws Exception { Connection c2 = getConnection(url); while (!stop) { c2.prepareStatement("select * from test_view where x" + r.nextInt(len) + "=1"); } c2.close(); } }; t.execute(); SynchronizedVerifier.setDetect(SmallLRUCache.class, true); for (int i = 0; i < 1000; i++) { conn.prepareStatement("select * from test_view where x" + r.nextInt(len) + "=1"); } t.get(); SynchronizedVerifier.setDetect(SmallLRUCache.class, false); conn.close(); } private void testConcurrentAlter() throws Exception { deleteDb(getTestName()); final Connection conn = getConnection(getTestName()); Statement stat = conn.createStatement(); Task t = new Task() { @Override public void call() throws Exception { while (!stop) { conn.prepareStatement("select * from test"); } } }; stat.execute("create table test(id int)"); t.execute(); for (int i = 0; i < 200; i++) { stat.execute("alter table test add column x int"); stat.execute("alter table test drop column x"); } t.get(); conn.close(); } private void testConcurrentAnalyze() throws Exception { if (config.mvcc) { return; } deleteDb(getTestName()); final String url = getURL("concurrentAnalyze;MULTI_THREADED=1", true); Connection conn = getConnection(url); Statement stat = conn.createStatement(); stat.execute("create table test(id bigint primary key) " + "as select x from system_range(1, 1000)"); Task t = new Task() { @Override public void call() throws SQLException { Connection conn2; conn2 = getConnection(url); for (int i = 0; i < 1000; i++) { conn2.createStatement().execute("analyze"); } conn2.close(); } }; t.execute(); Thread.yield(); for (int i = 0; i < 1000; i++) { conn.createStatement().execute("analyze"); } t.get(); stat.execute("drop table test"); conn.close(); } private void testConcurrentInsertUpdateSelect() throws Exception { threadConn = getConnection(); threadStat = threadConn.createStatement(); threadStat.execute("CREATE TABLE TEST(ID IDENTITY, NAME VARCHAR)"); int len = getSize(10, 200); Thread[] threads = new Thread[len]; for (int i = 0; i < len; i++) { threads[i] = new Thread(new TestMultiThread(config, this)); } for (int i = 0; i < len; i++) { threads[i].start(); } int sleep = getSize(400, 10000); Thread.sleep(sleep); this.stop = true; for (int i = 0; i < len; i++) { threads[i].join(); } ResultSet rs = threadStat.executeQuery("SELECT COUNT(*) FROM TEST"); rs.next(); trace("max id=" + rs.getInt(1)); threadConn.close(); } private Connection getConnection() throws SQLException { return getConnection("jdbc:h2:mem:" + getTestName()); } @Override public void run() { try { while (!parent.stop) { threadStat.execute("SELECT COUNT(*) FROM TEST"); threadStat.execute("INSERT INTO TEST VALUES(NULL, 'Hi')"); PreparedStatement prep = threadConn.prepareStatement( "UPDATE TEST SET NAME='Hello' WHERE ID=?"); prep.setInt(1, random.nextInt(10000)); prep.execute(); prep = threadConn.prepareStatement("SELECT * FROM TEST WHERE ID=?"); prep.setInt(1, random.nextInt(10000)); ResultSet rs = prep.executeQuery(); while (rs.next()) { rs.getString("NAME"); } } threadConn.close(); } catch (Exception e) { logError("multi", e); } } private void testLockModeWithMultiThreaded() throws Exception { // currently the combination of LOCK_MODE=0 and MULTI_THREADED // is not supported deleteDb("lockMode"); final String url = getURL("lockMode;MULTI_THREADED=1", true); Connection conn = getConnection(url); DatabaseMetaData meta = conn.getMetaData(); assertFalse(meta.supportsTransactionIsolationLevel( Connection.TRANSACTION_READ_UNCOMMITTED)); conn.close(); deleteDb("lockMode"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.index; import com.google.common.collect.ImmutableList; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants; import org.apache.jackrabbit.oak.query.AbstractQueryTest; import org.junit.Test; import java.util.ArrayList; import java.util.Calendar; import static com.google.common.collect.ImmutableList.of; import static com.google.common.collect.Lists.newArrayList; import static org.apache.jackrabbit.JcrConstants.JCR_CONTENT; import static org.apache.jackrabbit.JcrConstants.JCR_DATA; import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; import static org.apache.jackrabbit.JcrConstants.NT_FILE; import static org.apache.jackrabbit.JcrConstants.NT_FOLDER; import static org.apache.jackrabbit.oak.plugins.memory.BinaryPropertyState.binaryProperty; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public abstract class IndexAggregationCommonTest extends AbstractQueryTest { protected IndexOptions indexOptions; @Override protected void createTestIndexNode() throws Exception { Tree index = root.getTree("/"); Tree indexDefn = createTestIndexNode(index, indexOptions.getIndexType()); TestUtil.useV2(indexDefn); //Aggregates TestUtil.newNodeAggregator(indexDefn) .newRuleWithName(NT_FILE, newArrayList(JCR_CONTENT, JCR_CONTENT + "/*")) .newRuleWithName(NT_FOLDER, newArrayList("myFile", "subfolder/subsubfolder/file")); //Include all properties Tree props = TestUtil.newRulePropTree(indexDefn, "nt:base"); TestUtil.enableForFullText(props, FulltextIndexConstants.REGEX_ALL_PROPS, true); root.commit(); } /** * simple index aggregation from jcr:content to nt:file */ @Test public void testNtFileAggregate() throws Exception { String sqlBase = "SELECT * FROM [nt:file] as f WHERE"; String sqlCat = sqlBase + " CONTAINS (f.*, 'cat')"; String sqlDog = sqlBase + " CONTAINS (f.*, 'dog')"; Tree file = root.getTree("/").addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); root.commit(); assertQuery(sqlDog, ImmutableList.of("/myFile")); // update jcr:data root.getTree("/") .getChild("myFile") .getChild(JCR_CONTENT) .setProperty( binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy cat.")); root.commit(); assertQuery(sqlDog, new ArrayList<String>()); assertQuery(sqlCat, ImmutableList.of("/myFile")); // replace jcr:content with unstructured root.getTree("/").getChild("myFile").getChild(JCR_CONTENT).remove(); Tree unstrContent = root.getTree("/").getChild("myFile") .addChild(JCR_CONTENT); unstrContent.setProperty(JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME); Tree foo = unstrContent.addChild("foo"); foo.setProperty(JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME); foo.setProperty("text", "the quick brown fox jumps over the lazy dog."); root.commit(); assertQuery(sqlDog, ImmutableList.of("/myFile")); assertQuery(sqlCat, new ArrayList<String>()); // remove foo root.getTree("/").getChild("myFile").getChild(JCR_CONTENT) .getChild("foo").remove(); root.commit(); assertQuery(sqlDog, new ArrayList<String>()); assertQuery(sqlCat, new ArrayList<String>()); // replace jcr:content again with resource root.getTree("/").getChild("myFile").getChild(JCR_CONTENT).remove(); resource = root.getTree("/").getChild("myFile").addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy cat.")); root.commit(); assertQuery(sqlDog, new ArrayList<String>()); assertQuery(sqlCat, ImmutableList.of("/myFile")); } @Test public void testChildNodeWithOr() throws Exception { Tree file = root.getTree("/").addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); resource.setProperty("jcr:title", "title"); resource.setProperty("jcr:description", "description"); root.commit(); String matchContentSimple = "//element(*, nt:file)[(jcr:contains(jcr:content, 'dog'))]"; assertQuery(matchContentSimple, "xpath", ImmutableList.of("/myFile")); String matchContent = " //element(*, nt:file)[(jcr:contains(jcr:content, 'dog') or jcr:contains(jcr:content/@jcr:title, 'invalid') or jcr:contains(jcr:content/@jcr:description, 'invalid'))]"; assertQuery(matchContent, "xpath", ImmutableList.of("/myFile")); String matchTitle = " //element(*, nt:file)[(jcr:contains(jcr:content, 'invalid') or jcr:contains(jcr:content/@jcr:title, 'title') or jcr:contains(jcr:content/@jcr:description, 'invalid'))]"; assertQuery(matchTitle, "xpath", ImmutableList.of("/myFile")); String matchDesc = " //element(*, nt:file)[(jcr:contains(jcr:content, 'invalid') or jcr:contains(jcr:content/@jcr:title, 'invalid') or jcr:contains(jcr:content/@jcr:description, 'description'))]"; assertQuery(matchDesc, "xpath", ImmutableList.of("/myFile")); String matchNone = " //element(*, nt:file)[(jcr:contains(jcr:content, 'invalid') or jcr:contains(jcr:content/@jcr:title, 'invalid') or jcr:contains(jcr:content/@jcr:description, 'invalid'))]"; assertQuery(matchNone, "xpath", new ArrayList<String>()); } @Test public void testChildNodeWithOrComposite() throws Exception { Tree folder = root.getTree("/").addChild("myFolder"); folder.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); Tree file = folder.addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); file.setProperty("jcr:title", "title"); file.setProperty("jcr:description", "description"); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); root.commit(); String matchContentSimple = "//element(*, nt:folder)[(jcr:contains(myFile, 'dog'))]"; assertQuery(matchContentSimple, "xpath", ImmutableList.of("/myFolder")); String matchContent = " //element(*, nt:folder)[(jcr:contains(myFile, 'dog') or jcr:contains(myFile/@jcr:title, 'invalid') or jcr:contains(myFile/@jcr:description, 'invalid'))]"; assertQuery(matchContent, "xpath", ImmutableList.of("/myFolder")); String matchTitle = " //element(*, nt:folder)[(jcr:contains(myFile, 'invalid') or jcr:contains(myFile/@jcr:title, 'title') or jcr:contains(myFile/@jcr:description, 'invalid'))]"; assertQuery(matchTitle, "xpath", ImmutableList.of("/myFolder")); String matchDesc = " //element(*, nt:folder)[(jcr:contains(myFile, 'invalid') or jcr:contains(myFile/@jcr:title, 'invalid') or jcr:contains(myFile/@jcr:description, 'description'))]"; assertQuery(matchDesc, "xpath", ImmutableList.of("/myFolder")); String matchNone = " //element(*, nt:folder)[(jcr:contains(myFile, 'invalid') or jcr:contains(myFile/@jcr:title, 'invalid') or jcr:contains(myFile/@jcr:description, 'invalid'))]"; assertQuery(matchNone, "xpath", new ArrayList<String>()); String matchOnlyTitleOr = " //element(*, nt:folder)[(jcr:contains(myFile/@jcr:title, 'title') or jcr:contains(myFile/@jcr:title, 'unknown') )]"; assertQuery(matchOnlyTitleOr, "xpath", ImmutableList.of("/myFolder")); } @Test public void testNodeTypes() throws Exception { Tree folder = root.getTree("/").addChild("myFolder"); folder.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); Tree file = folder.addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); file.setProperty("jcr:title", "title"); file.setProperty("jcr:description", "description"); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); root.commit(); String matchContentSimple = "//*[( jcr:contains(., 'dog') and @jcr:primaryType = 'nt:file' )]"; assertQuery(matchContentSimple, "xpath", ImmutableList.of("/myFolder/myFile")); String matchContentDouble = "//*[( jcr:contains(., 'dog') and (@jcr:primaryType = 'nt:file' or @jcr:primaryType = 'nt:folder') )]"; assertQuery(matchContentDouble, "xpath", ImmutableList.of("/myFolder", "/myFolder/myFile")); } @Test public void testNodeTypesDeep() throws Exception { Tree folder = root.getTree("/").addChild("myFolder"); folder.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); Tree folder2 = folder.addChild("subfolder"); folder2.setProperty(JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME); Tree folder3 = folder2.addChild("subsubfolder"); folder3.setProperty(JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME); file(folder3, "file"); root.commit(); String xpath = "//element(*, nt:folder)[jcr:contains(., 'dog')]"; assertQuery(xpath, "xpath", ImmutableList.of("/myFolder")); } private static void file(Tree parent, String name) { Tree file = parent.addChild(name); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); } @Test public void testChildNodeProperty() throws Exception { Tree file = root.getTree("/").addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); resource.setProperty("jcr:title", "title"); resource.setProperty("jcr:description", "description"); root.commit(); String matchChildSimple = "//*[( jcr:contains(@jcr:title, 'title') )]"; assertQuery(matchChildSimple, "xpath", ImmutableList.of("/myFile/jcr:content")); String matchChildWithStar = "//*[( jcr:contains(., 'dog') and jcr:contains(@jcr:title, 'title') )]"; assertQuery(matchChildWithStar, "xpath", ImmutableList.of("/myFile/jcr:content")); } @Test public void testChildNodeProperty2() throws Exception { Tree file = root.getTree("/").addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); resource.setProperty("jcr:title", "title"); resource.setProperty("jcr:description", "description"); Tree file2 = root.getTree("/").addChild("myFile2"); file2.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); Tree resource2 = file2.addChild(JCR_CONTENT); resource2.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource2.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); resource2.setProperty("jcr:title", "other"); resource.setProperty("jcr:description", "title"); root.commit(); String matchChildSimple = "//*[( jcr:contains(jcr:content/@jcr:title, 'title') )]"; assertQuery(matchChildSimple, "xpath", ImmutableList.of("/myFile")); } @Test public void testPreventDoubleAggregation() throws Exception { Tree file = root.getTree("/").addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); file.setProperty("jcr:title", "fox"); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); root.commit(); String matchChildSimple = "//element(*, nt:file)[( jcr:contains(., 'fox') )]"; assertQuery(matchChildSimple, "xpath", ImmutableList.of("/myFile")); } @Test public void testDifferentNodes() throws Exception { Tree folder = root.getTree("/").addChild("myFolder"); folder.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); Tree file = folder.addChild("myFile"); file.setProperty(JCR_PRIMARYTYPE, NT_FILE, Type.NAME); file.setProperty("jcr:title", "title"); file.setProperty("jcr:description", "description"); Tree resource = file.addChild(JCR_CONTENT); resource.setProperty(JCR_PRIMARYTYPE, "nt:resource", Type.NAME); resource.setProperty("jcr:lastModified", Calendar.getInstance()); resource.setProperty("jcr:encoding", "UTF-8"); resource.setProperty("jcr:mimeType", "text/plain"); resource.setProperty(binaryProperty(JCR_DATA, "the quick brown fox jumps over the lazy dog.")); root.commit(); assertQuery( "//element(*, nt:file)[jcr:contains(., 'dog')]", "xpath", ImmutableList.of("/myFolder/myFile")); assertQuery( "//element(*, nt:file)[jcr:contains(., 'title')]", "xpath", ImmutableList.of("/myFolder/myFile")); assertQuery( "//element(*, nt:file)[jcr:contains(., 'dog') and jcr:contains(., 'title')]", "xpath", ImmutableList.of("/myFolder/myFile")); // double aggregation dupes assertQuery( "//*[(jcr:contains(., 'dog') or jcr:contains(jcr:content, 'dog') )]", "xpath", ImmutableList.of("/myFolder", "/myFolder/myFile", "/myFolder/myFile/jcr:content")); } @Test public void oak3371AggregateV2() throws CommitFailedException { oak3371(); } @Test public void oak3371AggregateV1() throws CommitFailedException { Tree indexdef = root.getTree("/oak:index/" + TEST_INDEX_NAME); assertNotNull(indexdef); assertTrue(indexdef.exists()); indexdef.setProperty(FulltextIndexConstants.COMPAT_MODE, 1L); indexdef.setProperty(IndexConstants.REINDEX_PROPERTY_NAME, true); root.commit(); oak3371(); } private void oak3371() throws CommitFailedException { setTraversalEnabled(false); Tree test, t; test = root.getTree("/").addChild("test"); t = test.addChild("a"); t.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); t.setProperty("foo", "bar"); t = test.addChild("b"); t.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); t.setProperty("foo", "cat"); t = test.addChild("c"); t.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); t = test.addChild("d"); t.setProperty(JCR_PRIMARYTYPE, NT_FOLDER, Type.NAME); t.setProperty("foo", "bar cat"); root.commit(); assertQuery( "SELECT * FROM [nt:folder] WHERE ISDESCENDANTNODE('/test') AND CONTAINS(foo, 'bar')", of("/test/a", "/test/d")); assertQuery( "SELECT * FROM [nt:folder] WHERE ISDESCENDANTNODE('/test') AND NOT CONTAINS(foo, 'bar')", of("/test/b", "/test/c")); assertQuery( "SELECT * FROM [nt:folder] WHERE ISDESCENDANTNODE('/test') AND CONTAINS(foo, 'bar cat')", of("/test/d")); assertQuery( "SELECT * FROM [nt:folder] WHERE ISDESCENDANTNODE('/test') AND NOT CONTAINS(foo, 'bar cat')", of("/test/c")); setTraversalEnabled(true); } }
/* * Copyright 2010 Phil Burk, Mobileer Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jsyn.unitgen; import com.jsyn.data.SegmentedEnvelope; import com.jsyn.engine.SynthesisEngine; import com.jsyn.ports.UnitInputPort; import com.jsyn.ports.UnitOutputPort; /** * Six stage envelope similar to an ADSR. DAHDSR is like an ADSR but with an additional Delay stage * before the attack, and a Hold stage after the Attack. If Delay and Hold are both set to zero then * it will act like an ADSR. The envelope is triggered when the input goes above THRESHOLD. The * envelope is released when the input goes below THRESHOLD. The THRESHOLD is currently 0.01 but may * change so it would be best to use an input signal that went from 0 to 1. Mathematically an * exponential Release will never reach 0.0. But when it reaches -96 dB the DAHDSR just sets its * output to 0.0 and stops. There is an example program in the ZIP archive called HearDAHDSR. It * drives a DAHDSR with a square wave. * * @author Phil Burk (C) 2010 Mobileer Inc * @see SegmentedEnvelope */ public class EnvelopeDAHDSR extends UnitGate implements UnitSource { private static final double MIN_DURATION = (1.0 / 100000.0); /** * Time in seconds for first stage of the envelope, before the attack. Typically zero. */ public UnitInputPort delay; /** * Time in seconds for the rising stage of the envelope to go from 0.0 to 1.0. The attack is a * linear ramp. */ public UnitInputPort attack; /** Time in seconds for the plateau between the attack and decay stages. */ public UnitInputPort hold; /** * Time in seconds for the falling stage to go from 0 dB to -90 dB. The decay stage will stop at * the sustain level. But we calculate the time to fall to -90 dB so that the decay * <em>rate</em> will be unaffected by the sustain level. */ public UnitInputPort decay; /** * Level for the sustain stage. The envelope will hold here until the input goes to zero or * less. This should be set between 0.0 and 1.0. */ public UnitInputPort sustain; /** * Time in seconds to go from 0 dB to -90 dB. This stage is triggered when the input goes to * zero or less. The release stage will start from the sustain level. But we calculate the time * to fall from full amplitude so that the release <em>rate</em> will be unaffected by the * sustain level. */ public UnitInputPort release; public UnitInputPort amplitude; enum State { IDLE, DELAYING, ATTACKING, HOLDING, DECAYING, SUSTAINING, RELEASING } private State state = State.IDLE; private double countdown; private double scaler = 1.0; private double level; private double increment; public EnvelopeDAHDSR() { super(); addPort(delay = new UnitInputPort("Delay", 0.0)); delay.setup(0.0, 0.0, 2.0); addPort(attack = new UnitInputPort("Attack", 0.1)); attack.setup(0.01, 0.1, 8.0); addPort(hold = new UnitInputPort("Hold", 0.0)); hold.setup(0.0, 0.0, 2.0); addPort(decay = new UnitInputPort("Decay", 0.2)); decay.setup(0.01, 0.2, 8.0); addPort(sustain = new UnitInputPort("Sustain", 0.5)); sustain.setup(0.0, 0.5, 1.0); addPort(release = new UnitInputPort("Release", 0.3)); release.setup(0.01, 0.3, 8.0); addPort(amplitude = new UnitInputPort("Amplitude", 1.0)); } @Override public void generate(int start, int limit) { double[] sustains = sustain.getValues(); double[] amplitudes = amplitude.getValues(); double[] outputs = output.getValues(); for (int i = start; i < limit;) { boolean triggered = input.checkGate(i); switch (state) { case IDLE: for (; i < limit; i++) { outputs[i] = level * amplitudes[i]; if (triggered) { startDelay(i); break; } } break; case DELAYING: for (; i < limit; i++) { outputs[i] = level * amplitudes[i]; if (input.isOff()) { startRelease(i); break; } else { countdown -= 1; if (countdown <= 0) { startAttack(i); break; } } } break; case ATTACKING: for (; i < limit; i++) { // Increment first so we can render fast attacks. level += increment; if (level >= 1.0) { level = 1.0; outputs[i] = level * amplitudes[i]; startHold(i); break; } else { outputs[i] = level * amplitudes[i]; if (input.isOff()) { startRelease(i); break; } } } break; case HOLDING: for (; i < limit; i++) { outputs[i] = amplitudes[i]; // level is 1.0 countdown -= 1; if (countdown <= 0) { startDecay(i); break; } else if (input.isOff()) { startRelease(i); break; } } break; case DECAYING: for (; i < limit; i++) { outputs[i] = level * amplitudes[i]; level *= scaler; // exponential decay if (triggered) { startDelay(i); break; } else if (level < sustains[i]) { level = sustains[i]; startSustain(i); break; } else if (level < SynthesisEngine.DB96) { input.checkAutoDisable(); startIdle(); break; } else if (input.isOff()) { startRelease(i); break; } } break; case SUSTAINING: for (; i < limit; i++) { level = sustains[i]; outputs[i] = level * amplitudes[i]; if (triggered) { startDelay(i); break; } else if (input.isOff()) { startRelease(i); break; } } break; case RELEASING: for (; i < limit; i++) { outputs[i] = level * amplitudes[i]; level *= scaler; // exponential decay if (triggered) { startDelay(i); break; } else if (level < SynthesisEngine.DB96) { input.checkAutoDisable(); startIdle(); break; } } break; } } } private void startIdle() { state = State.IDLE; level = 0.0; } private void startDelay(int i) { double[] delays = delay.getValues(); if (delays[i] <= 0.0) { startAttack(i); } else { countdown = (int) (delays[i] * getFrameRate()); state = State.DELAYING; } } private void startAttack(int i) { double[] attacks = attack.getValues(); double duration = attacks[i]; if (duration < MIN_DURATION) { level = 1.0; startHold(i); } else { increment = getFramePeriod() / duration; state = State.ATTACKING; } } private void startHold(int i) { double[] holds = hold.getValues(); if (holds[i] <= 0.0) { startDecay(i); } else { countdown = (int) (holds[i] * getFrameRate()); state = State.HOLDING; } } private void startDecay(int i) { double[] decays = decay.getValues(); double duration = decays[i]; if (duration < MIN_DURATION) { startSustain(i); } else { scaler = getSynthesisEngine().convertTimeToExponentialScaler(duration); state = State.DECAYING; } } private void startSustain(int i) { state = State.SUSTAINING; } private void startRelease(int i) { double[] releases = release.getValues(); double duration = releases[i]; if (duration < MIN_DURATION) { duration = MIN_DURATION; } scaler = getSynthesisEngine().convertTimeToExponentialScaler(duration); state = State.RELEASING; } public void export(Circuit circuit, String prefix) { circuit.addPort(attack, prefix + attack.getName()); circuit.addPort(decay, prefix + decay.getName()); circuit.addPort(sustain, prefix + sustain.getName()); circuit.addPort(release, prefix + release.getName()); } @Override public UnitOutputPort getOutput() { return output; } }
/* * Copyright 2011 - 2013 NTB University of Applied Sciences in Technology * Buchs, Switzerland, http://www.ntb.ch/inf * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ch.ntb.inf.deep.comp.targettest.objects; import ch.ntb.inf.junitTarget.Assert; import ch.ntb.inf.junitTarget.CmdTransmitter; import ch.ntb.inf.junitTarget.MaxErrors; import ch.ntb.inf.junitTarget.Test; import ch.ntb.inf.deep.comp.targettest.objects.helper.exampleA.*; import ch.ntb.inf.deep.comp.targettest.objects.helper.exampleB.*; import ch.ntb.inf.deep.comp.targettest.objects.helper.exampleC.*; import ch.ntb.inf.deep.comp.targettest.objects.helper.exampleD.*; /** * NTB 12.03.2013 * * @author Urs Graf * * Tests for interfaces (instanceof and methods) */ @MaxErrors(100) @SuppressWarnings("static-access") public class InterfaceTest { @Test // tests constants public static void testConst() { Assert.assertEquals("var0", 23,CAexD.var0); Assert.assertEquals("var1", 2.125,CAexD.var1, 0.01); Assert.assertEquals("var2", 1.15f, CAexD.var2 , 0.01f); Assert.assertTrue("var3", CAexD.var3); Assert.assertEquals("var4", 0x5555555555L, CAexD.var4); Assert.assertEquals("var5", (byte)127, CAexD.var5); Assert.assertEquals("var6", (short)256,CAexD.var6); Assert.assertEquals("var7", 'Z', CAexD.var7); Assert.assertEquals("i1Var0", 23,CBexD.getVar0()); Assert.assertEquals("i1Var1", 2.125,CBexD.getVar1(), 0.01); Assert.assertEquals("i1Var2", 1.15f, CBexD.getVar2() , 0.01f); Assert.assertTrue("i1Var3", CBexD.getVar3()); Assert.assertEquals("i1Var4", 0x5555555555L, CBexD.getVar4()); Assert.assertEquals("i1Var5", (byte)127, CBexD.getVar5()); Assert.assertEquals("i1Var6", (short)256,CBexD.getVar6()); Assert.assertEquals("i1Var7", 'Z', CBexD.getVar7()); Assert.assertEquals("i2Var0", 45,CBexD.getI2Var0()); Assert.assertEquals("i2Var1", 3.1459,CBexD.getI2Var1(), 0.01); Assert.assertEquals("i2Var2", 3.33f, CBexD.getI2Var2() , 0.01f); Assert.assertFalse("i2Var3", CBexD.getI2Var3()); Assert.assertEquals("i2Var4", 0xAAAAAAAAAAL, CBexD.getI2Var4()); Assert.assertEquals("i2Var5", (byte)-128, CBexD.getI2Var5()); Assert.assertEquals("i2Var6", (short)-264,CBexD.getI2Var6()); Assert.assertEquals("i2Var7", 'B', CBexD.getI2Var7()); Assert.assertEquals("var0", 23,CCexD.var0); Assert.assertEquals("var1", 2.125,CCexD.var1, 0.01); Assert.assertEquals("var2", 1.15f, CCexD.var2 , 0.01f); Assert.assertTrue("var3", CCexD.var3); Assert.assertEquals("var4", 0x5555555555L, CCexD.var4); Assert.assertEquals("var5", (byte)127, CCexD.var5); Assert.assertEquals("var6", (short)256,CCexD.var6); Assert.assertEquals("var7", 'Z', CCexD.var7); Assert.assertEquals("var8", 89,CCexD.var8); Assert.assertEquals("var9", 7.775,CCexD.var9, 0.01); Assert.assertEquals("var10", 9.32f, CCexD.var10 , 0.01f); Assert.assertFalse("var11", CCexD.var11); Assert.assertEquals("var12", 0xBBBBBBBBBBL, CCexD.var12); Assert.assertEquals("var13", (byte)64, CCexD.var13); Assert.assertEquals("var14", (short)1023,CCexD.var14); Assert.assertEquals("var15", 'L', CCexD.var15); Assert.assertEquals("var0", 23,CDexD.var0); Assert.assertEquals("var1", 2.125,CDexD.var1, 0.01); Assert.assertEquals("var2", 1.15f, CDexD.var2 , 0.01f); Assert.assertTrue("var3", CDexD.var3); Assert.assertEquals("var4", 0x5555555555L, CDexD.var4); Assert.assertEquals("var5", (byte)127, CDexD.var5); Assert.assertEquals("var6", (short)256,CDexD.var6); Assert.assertEquals("var7", 'Z', CDexD.var7); CmdTransmitter.sendDone(); } public static void testInstance1() { Object clz1 = new CAexD(); Object clz2 = new CBexD(); Object clz3 = new CCexD(); Object clz4 = new CDexD(); Object clz5 = new CEexD(); Assert.assertTrue("instance1", clz1 instanceof IAexD); Assert.assertFalse("instance2", clz1 instanceof IBexD); Assert.assertFalse("instance3", clz1 instanceof ICexD); Assert.assertFalse("instance4", clz1 instanceof IDexD); Assert.assertTrue("instance11", clz2 instanceof IAexD); Assert.assertFalse("instance12", clz2 instanceof IBexD); Assert.assertTrue("instance13", clz2 instanceof ICexD); Assert.assertFalse("instance14", clz2 instanceof IDexD); Assert.assertTrue("instance21", clz3 instanceof IAexD); Assert.assertFalse("instance22", clz3 instanceof IBexD); Assert.assertTrue("instance23", clz3 instanceof ICexD); Assert.assertFalse("instance24", clz3 instanceof IDexD); Assert.assertTrue("instance31", clz4 instanceof IAexD); Assert.assertFalse("instance32", clz4 instanceof IBexD); Assert.assertFalse("instance33", clz4 instanceof ICexD); Assert.assertFalse("instance34", clz4 instanceof IDexD); Assert.assertTrue("instance41", clz5 instanceof IAexD); Assert.assertTrue("instance42", clz5 instanceof IBexD); Assert.assertFalse("instance43", clz5 instanceof ICexD); Assert.assertTrue("instance44", clz5 instanceof IDexD); CmdTransmitter.sendDone(); } @Test public static void testInstance2() { Object cls = new CAexD[2]; Assert.assertFalse("instance1", cls instanceof IAexD); Assert.assertTrue("instance2", cls instanceof IAexD[]); Assert.assertFalse("instance3", cls instanceof IAexD[][]); Assert.assertFalse("instance9", cls instanceof IBexD); Assert.assertFalse("instance10", cls instanceof IBexD[]); Assert.assertFalse("instance11", cls instanceof IBexD[][]); cls = new CEexD[1]; Assert.assertTrue("instance21", cls instanceof IAexD[]); Assert.assertTrue("instance22", cls instanceof IBexD[]); Assert.assertFalse("instance23", cls instanceof IAexD[][]); Assert.assertFalse("instance26", cls instanceof ICexD[]); Assert.assertTrue("instance27", cls instanceof IDexD[]); Assert.assertFalse("instance28", cls instanceof IDexD[][]); cls = new CEexD[1][2]; Assert.assertFalse("instance41", cls instanceof IAexD[]); Assert.assertTrue("instance42", cls instanceof IAexD[][]); Assert.assertFalse("instance43", cls instanceof IAexD[][][]); Assert.assertFalse("instance44", cls instanceof IBexD[]); Assert.assertTrue("instance45", cls instanceof IBexD[][]); Assert.assertFalse("instance46", cls instanceof IBexD[][][]); Assert.assertFalse("instance47", cls instanceof ICexD[]); Assert.assertFalse("instance48", cls instanceof ICexD[][]); Assert.assertFalse("instance49", cls instanceof IDexD[]); Assert.assertTrue("instance50", cls instanceof IDexD[][]); CmdTransmitter.sendDone(); } @Test public static void testInstance3() { Object o = new CXexA(); Assert.assertFalse("instance1", o instanceof IAexD); Assert.assertTrue("instance2", o instanceof IAexA); Assert.assertFalse("instance3", o instanceof IAexA[]); o = new CYexA(); Assert.assertTrue("instance11", o instanceof IBexA); Assert.assertFalse("instance12", o instanceof IBexA[]); Assert.assertFalse("instance13", o instanceof IAexA); o = new CYexA[2]; Assert.assertTrue("instance21", o instanceof IBexA[]); Assert.assertFalse("instance22", o instanceof IBexA); Assert.assertFalse("instance23", o instanceof IAexA[]); o = new CEexA[2]; Assert.assertFalse("instance31", o instanceof IAexA[]); Assert.assertTrue("instance33", o instanceof IBexA[]); Assert.assertTrue("instance34", o instanceof ICexA[]); Assert.assertTrue("instance35", o instanceof IDexA[]); Assert.assertTrue("instance36", o instanceof IEexA[]); Assert.assertFalse("instance38", o instanceof IBexA); Assert.assertFalse("instance40", o instanceof IDexA[][]); o = new IAexA[2]; Assert.assertTrue("instance50", o instanceof IAexA[]); Assert.assertFalse("instance51", o instanceof IBexA[]); o = new IDexA[2]; Assert.assertFalse("instance60", o instanceof IAexA[]); Assert.assertTrue("instance61", o instanceof ICexA[]); Assert.assertTrue("instance62", o instanceof IDexA[]); Assert.assertFalse("instance63", o instanceof CCexA[]); Assert.assertFalse("instance64", o instanceof ICexA); Assert.assertFalse("instance65", o instanceof ICexA[][]); CmdTransmitter.sendDone(); } @Test public static void testInstance4() { Object o = new CDexC(); Assert.assertTrue("test1", o instanceof IAexC); Assert.assertTrue("test2", o instanceof IBexC); Assert.assertTrue("test3", o instanceof ICexC); Assert.assertTrue("test4", o instanceof IDexC); Assert.assertTrue("test5", o instanceof IEexC); o = new CEexC(); Assert.assertTrue("test10", o instanceof IAexC); Assert.assertTrue("test11", o instanceof IBexC); Assert.assertTrue("test12", o instanceof ICexC); Assert.assertTrue("test13", o instanceof IDexC); Assert.assertTrue("test14", o instanceof IEexC); o = new CCexC(); Assert.assertTrue("test20", o instanceof IAexC); Assert.assertTrue("test21", o instanceof IBexC); Assert.assertTrue("test22", o instanceof ICexC); Assert.assertFalse("test23", o instanceof IDexC); Assert.assertFalse("test24", o instanceof IEexC); o = new CBexC(); Assert.assertFalse("test30", o instanceof IAexC); Assert.assertFalse("test31", o instanceof IBexC); Assert.assertFalse("test32", o instanceof ICexC); Assert.assertFalse("test33", o instanceof IDexC); Assert.assertFalse("test34", o instanceof IEexC); CmdTransmitter.sendDone(); } @Test // tests a class implementing a single interface with a single method public static void testMethods1() { IAexA cls = new CXexA(); Assert.assertEquals("test1", 1, cls.ima11()); IAexB cls1 = new CXexB(); Assert.assertEquals("test1", 101, cls1.ima11()); CmdTransmitter.sendDone(); } @Test // tests a class implementing a single interface with several methods public static void testMethods2(){ IBexA cls = new CYexA(); Assert.assertEquals("test1", 22 ,cls.imb11()); Assert.assertEquals("test2", 23 ,cls.imb12()); cls = new CAexA(); Assert.assertEquals("test3", 26 ,cls.imb11()); Assert.assertEquals("test4", 27 ,cls.imb12()); IAexD cls1 = new CAexD(); Assert.assertEquals("test5", 5 ,cls1.ima11()); Assert.assertEquals("test6", 266 ,cls1.ima12(10)); CmdTransmitter.sendDone(); } @Test // tests a class implementing several interfaces public static void testMethods3(){ IBexA cls1 = new CCexA(); Assert.assertEquals("test1", 26 ,cls1.imb11()); Assert.assertEquals("test2", 27 ,cls1.imb12()); ICexA cls2 = new CCexA(); Assert.assertEquals("test3", 32, cls2.imc11()); Assert.assertEquals("test4", 33, cls2.imc12()); IDexA cls3 = new CZexA(); Assert.assertEquals("test10", 122, cls3.imc11()); Assert.assertEquals("test11", 123, cls3.imc12()); Assert.assertEquals("test12", 124, cls3.imd21()); CEexA cls4 = new CEexA(); Assert.assertEquals("test20", 40, cls4.cme41()); Assert.assertEquals("test21", 41, cls4.imd21()); CmdTransmitter.sendDone(); } @Test //Test overriding methods and interface methods public static void testMethods4(){ CAexD cls1 = new CAexD(); CBexD cls2 = new CBexD(); Assert.assertEquals("test1", 5, cls1.ima11()); Assert.assertEquals("test2", 259, cls1.ima12(3)); Assert.assertEquals("test11", -1, cls2.ima11()); Assert.assertEquals("test12", 25, cls2.ima12(5)); Assert.assertEquals("test13", 30, cls2.imc11()); CmdTransmitter.sendDone(); } @Test // tests a class implementing several interfaces public static void testMethods5(){ IEexB cls1 = new CZexB(); Assert.assertEquals("test1", 104, cls1.imXY()); Assert.assertEquals("test2", 102, cls1.imX1()); IGexB cls2 = new CTexB(2); Assert.assertEquals("test3", 43, cls2.imXY()); Assert.assertEquals("test4", 42, cls2.imf21()); Assert.assertEquals("test5", 61, cls2.img31()); IBexB cls3 = new CRexB(); Assert.assertEquals("test6", 31, cls3.ima11()); Assert.assertEquals("test7", 32, cls3.imX1()); CmdTransmitter.sendDone(); } @SuppressWarnings("unused") @Test // tests the class constructor of an interface public static void testInterfaceConstructor1() { Object cls = new CXexA(); int a = ((CXexA)cls).ima11(); Object o = ((CXexA)cls).cmx11(); Assert.assertTrue("test1", o != null); String str = ((CXexA)cls).str1; Assert.assertEquals("test2", str, "xyz"); str = ((CXexA)cls).str2[0]; Assert.assertEquals("test3", str, "abc"); str = ((CXexA)cls).str2[2]; Assert.assertEquals("test4", str, "ghi"); CmdTransmitter.sendDone(); } @Test // tests the class constructor of an interface public static void testInterfaceConstructor2() { Object cls = new CAexA(); String str = ((CAexA)cls).str1; Assert.assertEquals("test1", str, "xyz"); str = ((CAexA)cls).str2[0]; Assert.assertEquals("test2", str, "abc"); str = ((CAexA)cls).str2[2]; Assert.assertEquals("test3", str, "ghi"); CmdTransmitter.sendDone(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.storm.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class ProfileRequest implements org.apache.thrift.TBase<ProfileRequest, ProfileRequest._Fields>, java.io.Serializable, Cloneable, Comparable<ProfileRequest> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ProfileRequest"); private static final org.apache.thrift.protocol.TField NODE_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("nodeInfo", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField ACTION_FIELD_DESC = new org.apache.thrift.protocol.TField("action", org.apache.thrift.protocol.TType.I32, (short)2); private static final org.apache.thrift.protocol.TField TIME_STAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("time_stamp", org.apache.thrift.protocol.TType.I64, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new ProfileRequestStandardSchemeFactory()); schemes.put(TupleScheme.class, new ProfileRequestTupleSchemeFactory()); } private NodeInfo nodeInfo; // required private ProfileAction action; // required private long time_stamp; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NODE_INFO((short)1, "nodeInfo"), /** * * @see ProfileAction */ ACTION((short)2, "action"), TIME_STAMP((short)3, "time_stamp"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NODE_INFO return NODE_INFO; case 2: // ACTION return ACTION; case 3: // TIME_STAMP return TIME_STAMP; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __TIME_STAMP_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.TIME_STAMP}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NODE_INFO, new org.apache.thrift.meta_data.FieldMetaData("nodeInfo", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NodeInfo.class))); tmpMap.put(_Fields.ACTION, new org.apache.thrift.meta_data.FieldMetaData("action", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, ProfileAction.class))); tmpMap.put(_Fields.TIME_STAMP, new org.apache.thrift.meta_data.FieldMetaData("time_stamp", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ProfileRequest.class, metaDataMap); } public ProfileRequest() { } public ProfileRequest( NodeInfo nodeInfo, ProfileAction action) { this(); this.nodeInfo = nodeInfo; this.action = action; } /** * Performs a deep copy on <i>other</i>. */ public ProfileRequest(ProfileRequest other) { __isset_bitfield = other.__isset_bitfield; if (other.is_set_nodeInfo()) { this.nodeInfo = new NodeInfo(other.nodeInfo); } if (other.is_set_action()) { this.action = other.action; } this.time_stamp = other.time_stamp; } public ProfileRequest deepCopy() { return new ProfileRequest(this); } @Override public void clear() { this.nodeInfo = null; this.action = null; set_time_stamp_isSet(false); this.time_stamp = 0; } public NodeInfo get_nodeInfo() { return this.nodeInfo; } public void set_nodeInfo(NodeInfo nodeInfo) { this.nodeInfo = nodeInfo; } public void unset_nodeInfo() { this.nodeInfo = null; } /** Returns true if field nodeInfo is set (has been assigned a value) and false otherwise */ public boolean is_set_nodeInfo() { return this.nodeInfo != null; } public void set_nodeInfo_isSet(boolean value) { if (!value) { this.nodeInfo = null; } } /** * * @see ProfileAction */ public ProfileAction get_action() { return this.action; } /** * * @see ProfileAction */ public void set_action(ProfileAction action) { this.action = action; } public void unset_action() { this.action = null; } /** Returns true if field action is set (has been assigned a value) and false otherwise */ public boolean is_set_action() { return this.action != null; } public void set_action_isSet(boolean value) { if (!value) { this.action = null; } } public long get_time_stamp() { return this.time_stamp; } public void set_time_stamp(long time_stamp) { this.time_stamp = time_stamp; set_time_stamp_isSet(true); } public void unset_time_stamp() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __TIME_STAMP_ISSET_ID); } /** Returns true if field time_stamp is set (has been assigned a value) and false otherwise */ public boolean is_set_time_stamp() { return EncodingUtils.testBit(__isset_bitfield, __TIME_STAMP_ISSET_ID); } public void set_time_stamp_isSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __TIME_STAMP_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case NODE_INFO: if (value == null) { unset_nodeInfo(); } else { set_nodeInfo((NodeInfo)value); } break; case ACTION: if (value == null) { unset_action(); } else { set_action((ProfileAction)value); } break; case TIME_STAMP: if (value == null) { unset_time_stamp(); } else { set_time_stamp((Long)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case NODE_INFO: return get_nodeInfo(); case ACTION: return get_action(); case TIME_STAMP: return get_time_stamp(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case NODE_INFO: return is_set_nodeInfo(); case ACTION: return is_set_action(); case TIME_STAMP: return is_set_time_stamp(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof ProfileRequest) return this.equals((ProfileRequest)that); return false; } public boolean equals(ProfileRequest that) { if (that == null) return false; boolean this_present_nodeInfo = true && this.is_set_nodeInfo(); boolean that_present_nodeInfo = true && that.is_set_nodeInfo(); if (this_present_nodeInfo || that_present_nodeInfo) { if (!(this_present_nodeInfo && that_present_nodeInfo)) return false; if (!this.nodeInfo.equals(that.nodeInfo)) return false; } boolean this_present_action = true && this.is_set_action(); boolean that_present_action = true && that.is_set_action(); if (this_present_action || that_present_action) { if (!(this_present_action && that_present_action)) return false; if (!this.action.equals(that.action)) return false; } boolean this_present_time_stamp = true && this.is_set_time_stamp(); boolean that_present_time_stamp = true && that.is_set_time_stamp(); if (this_present_time_stamp || that_present_time_stamp) { if (!(this_present_time_stamp && that_present_time_stamp)) return false; if (this.time_stamp != that.time_stamp) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_nodeInfo = true && (is_set_nodeInfo()); list.add(present_nodeInfo); if (present_nodeInfo) list.add(nodeInfo); boolean present_action = true && (is_set_action()); list.add(present_action); if (present_action) list.add(action.getValue()); boolean present_time_stamp = true && (is_set_time_stamp()); list.add(present_time_stamp); if (present_time_stamp) list.add(time_stamp); return list.hashCode(); } @Override public int compareTo(ProfileRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(is_set_nodeInfo()).compareTo(other.is_set_nodeInfo()); if (lastComparison != 0) { return lastComparison; } if (is_set_nodeInfo()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.nodeInfo, other.nodeInfo); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_action()).compareTo(other.is_set_action()); if (lastComparison != 0) { return lastComparison; } if (is_set_action()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.action, other.action); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_time_stamp()).compareTo(other.is_set_time_stamp()); if (lastComparison != 0) { return lastComparison; } if (is_set_time_stamp()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.time_stamp, other.time_stamp); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("ProfileRequest("); boolean first = true; sb.append("nodeInfo:"); if (this.nodeInfo == null) { sb.append("null"); } else { sb.append(this.nodeInfo); } first = false; if (!first) sb.append(", "); sb.append("action:"); if (this.action == null) { sb.append("null"); } else { sb.append(this.action); } first = false; if (is_set_time_stamp()) { if (!first) sb.append(", "); sb.append("time_stamp:"); sb.append(this.time_stamp); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!is_set_nodeInfo()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'nodeInfo' is unset! Struct:" + toString()); } if (!is_set_action()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'action' is unset! Struct:" + toString()); } // check for sub-struct validity if (nodeInfo != null) { nodeInfo.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class ProfileRequestStandardSchemeFactory implements SchemeFactory { public ProfileRequestStandardScheme getScheme() { return new ProfileRequestStandardScheme(); } } private static class ProfileRequestStandardScheme extends StandardScheme<ProfileRequest> { public void read(org.apache.thrift.protocol.TProtocol iprot, ProfileRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NODE_INFO if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.nodeInfo = new NodeInfo(); struct.nodeInfo.read(iprot); struct.set_nodeInfo_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // ACTION if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.action = org.apache.storm.generated.ProfileAction.findByValue(iprot.readI32()); struct.set_action_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // TIME_STAMP if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.time_stamp = iprot.readI64(); struct.set_time_stamp_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, ProfileRequest struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.nodeInfo != null) { oprot.writeFieldBegin(NODE_INFO_FIELD_DESC); struct.nodeInfo.write(oprot); oprot.writeFieldEnd(); } if (struct.action != null) { oprot.writeFieldBegin(ACTION_FIELD_DESC); oprot.writeI32(struct.action.getValue()); oprot.writeFieldEnd(); } if (struct.is_set_time_stamp()) { oprot.writeFieldBegin(TIME_STAMP_FIELD_DESC); oprot.writeI64(struct.time_stamp); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class ProfileRequestTupleSchemeFactory implements SchemeFactory { public ProfileRequestTupleScheme getScheme() { return new ProfileRequestTupleScheme(); } } private static class ProfileRequestTupleScheme extends TupleScheme<ProfileRequest> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, ProfileRequest struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; struct.nodeInfo.write(oprot); oprot.writeI32(struct.action.getValue()); BitSet optionals = new BitSet(); if (struct.is_set_time_stamp()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.is_set_time_stamp()) { oprot.writeI64(struct.time_stamp); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, ProfileRequest struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.nodeInfo = new NodeInfo(); struct.nodeInfo.read(iprot); struct.set_nodeInfo_isSet(true); struct.action = org.apache.storm.generated.ProfileAction.findByValue(iprot.readI32()); struct.set_action_isSet(true); BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.time_stamp = iprot.readI64(); struct.set_time_stamp_isSet(true); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.avatica.util; import java.text.DateFormat; import java.text.NumberFormat; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.TimeZone; /* * THIS FILE HAS BEEN COPIED FROM THE APACHE CALCITE PROJECT UNTIL CALCITE-1884, CALCITE-3199 IS FIXED. * (Modified line: 839) */ /** * Utility functions for datetime types: date, time, timestamp. * * <p>Used by the JDBC driver. * * <p>TODO: review methods for performance. Due to allocations required, it may * be preferable to introduce a "formatter" with the required state. */ public class DateTimeUtils { /** The julian date of the epoch, 1970-01-01. */ public static final int EPOCH_JULIAN = 2440588; private DateTimeUtils() {} //~ Static fields/initializers --------------------------------------------- /** The SimpleDateFormat string for ISO dates, "yyyy-MM-dd". */ public static final String DATE_FORMAT_STRING = "yyyy-MM-dd"; /** The SimpleDateFormat string for ISO times, "HH:mm:ss". */ public static final String TIME_FORMAT_STRING = "HH:mm:ss"; /** The SimpleDateFormat string for ISO timestamps, "yyyy-MM-dd HH:mm:ss". */ public static final String TIMESTAMP_FORMAT_STRING = DATE_FORMAT_STRING + " " + TIME_FORMAT_STRING; /** The GMT time zone. * * @deprecated Use {@link #UTC_ZONE} */ @Deprecated // to be removed before 2.0 public static final TimeZone GMT_ZONE = TimeZone.getTimeZone("GMT"); /** The UTC time zone. */ public static final TimeZone UTC_ZONE = TimeZone.getTimeZone("UTC"); /** The Java default time zone. */ public static final TimeZone DEFAULT_ZONE = TimeZone.getDefault(); /** * The number of milliseconds in a second. */ public static final long MILLIS_PER_SECOND = 1000L; /** * The number of milliseconds in a minute. */ public static final long MILLIS_PER_MINUTE = 60000L; /** * The number of milliseconds in an hour. */ public static final long MILLIS_PER_HOUR = 3600000L; // = 60 * 60 * 1000 /** * The number of milliseconds in a day. * * <p>This is the modulo 'mask' used when converting * TIMESTAMP values to DATE and TIME values. */ public static final long MILLIS_PER_DAY = 86400000; // = 24 * 60 * 60 * 1000 /** * Calendar set to the epoch (1970-01-01 00:00:00 UTC). Useful for * initializing other values. Calendars are not immutable, so be careful not * to screw up this object for everyone else. */ public static final Calendar ZERO_CALENDAR; static { ZERO_CALENDAR = Calendar.getInstance(DateTimeUtils.UTC_ZONE, Locale.ROOT); ZERO_CALENDAR.setTimeInMillis(0); } //~ Methods ---------------------------------------------------------------- /** * Parses a string using {@link SimpleDateFormat} and a given pattern. This * method parses a string at the specified parse position and if successful, * updates the parse position to the index after the last character used. * The parsing is strict and requires months to be less than 12, days to be * less than 31, etc. * * @param s string to be parsed * @param dateFormat Date format * @param tz time zone in which to interpret string. Defaults to the Java * default time zone * @param pp position to start parsing from * @return a Calendar initialized with the parsed value, or null if parsing * failed. If returned, the Calendar is configured to the GMT time zone. */ private static Calendar parseDateFormat(String s, DateFormat dateFormat, TimeZone tz, ParsePosition pp) { if (tz == null) { tz = DEFAULT_ZONE; } Calendar ret = Calendar.getInstance(tz, Locale.ROOT); dateFormat.setCalendar(ret); dateFormat.setLenient(false); final Date d = dateFormat.parse(s, pp); if (null == d) { return null; } ret.setTime(d); ret.setTimeZone(UTC_ZONE); return ret; } @Deprecated // to be removed before 2.0 public static Calendar parseDateFormat(String s, String pattern, TimeZone tz) { return parseDateFormat(s, new SimpleDateFormat(pattern, Locale.ROOT), tz); } /** * Parses a string using {@link SimpleDateFormat} and a given pattern. The * entire string must match the pattern specified. * * @param s string to be parsed * @param dateFormat Date format * @param tz time zone in which to interpret string. Defaults to the Java * default time zone * @return a Calendar initialized with the parsed value, or null if parsing * failed. If returned, the Calendar is configured to the UTC time zone. */ public static Calendar parseDateFormat(String s, DateFormat dateFormat, TimeZone tz) { ParsePosition pp = new ParsePosition(0); Calendar ret = parseDateFormat(s, dateFormat, tz, pp); if (pp.getIndex() != s.length()) { // Didn't consume entire string - not good return null; } return ret; } @Deprecated // to be removed before 2.0 public static PrecisionTime parsePrecisionDateTimeLiteral( String s, String pattern, TimeZone tz) { assert pattern != null; return parsePrecisionDateTimeLiteral(s, new SimpleDateFormat(pattern, Locale.ROOT), tz, 3); } /** * Parses a string using {@link SimpleDateFormat} and a given pattern, and * if present, parses a fractional seconds component. The fractional seconds * component must begin with a decimal point ('.') followed by numeric * digits. The precision is rounded to a maximum of 3 digits of fractional * seconds precision (to obtain milliseconds). * * @param s string to be parsed * @param dateFormat Date format * @param tz time zone in which to interpret string. Defaults to the * local time zone * @return a {@link DateTimeUtils.PrecisionTime PrecisionTime} initialized * with the parsed value, or null if parsing failed. The PrecisionTime * contains a GMT Calendar and a precision. */ public static PrecisionTime parsePrecisionDateTimeLiteral(String s, DateFormat dateFormat, TimeZone tz, int maxPrecision) { final ParsePosition pp = new ParsePosition(0); final Calendar cal = parseDateFormat(s, dateFormat, tz, pp); if (cal == null) { return null; // Invalid date/time format } // Note: the Java SimpleDateFormat 'S' treats any number after // the decimal as milliseconds. That means 12:00:00.9 has 9 // milliseconds and 12:00:00.9999 has 9999 milliseconds. int p = 0; String secFraction = ""; if (pp.getIndex() < s.length()) { // Check to see if rest is decimal portion if (s.charAt(pp.getIndex()) != '.') { return null; } // Skip decimal sign pp.setIndex(pp.getIndex() + 1); // Parse decimal portion if (pp.getIndex() < s.length()) { secFraction = s.substring(pp.getIndex()); if (!secFraction.matches("\\d+")) { return null; } NumberFormat nf = NumberFormat.getIntegerInstance(Locale.ROOT); Number num = nf.parse(s, pp); if ((num == null) || (pp.getIndex() != s.length())) { // Invalid decimal portion return null; } // Determine precision - only support prec 3 or lower // (milliseconds) Higher precisions are quietly rounded away p = secFraction.length(); if (maxPrecision >= 0) { // If there is a maximum precision, ignore subsequent digits p = Math.min(maxPrecision, p); secFraction = secFraction.substring(0, p); } // Calculate milliseconds String millis = secFraction; if (millis.length() > 3) { millis = secFraction.substring(0, 3); } while (millis.length() < 3) { millis = millis + "0"; } int ms = Integer.valueOf(millis); cal.add(Calendar.MILLISECOND, ms); } } assert pp.getIndex() == s.length(); return new PrecisionTime(cal, secFraction, p); } /** * Gets the active time zone based on a Calendar argument */ public static TimeZone getTimeZone(Calendar cal) { if (cal == null) { return DEFAULT_ZONE; } return cal.getTimeZone(); } /** * Checks if the date/time format is valid * * @param pattern {@link SimpleDateFormat} pattern * @throws IllegalArgumentException if the given pattern is invalid */ public static void checkDateFormat(String pattern) { new SimpleDateFormat(pattern, Locale.ROOT); } /** * Creates a new date formatter with Farrago specific options. Farrago * parsing is strict and does not allow values such as day 0, month 13, etc. * * @param format {@link SimpleDateFormat} pattern */ public static SimpleDateFormat newDateFormat(String format) { SimpleDateFormat sdf = new SimpleDateFormat(format, Locale.ROOT); sdf.setLenient(false); return sdf; } /** Helper for CAST({timestamp} AS VARCHAR(n)). */ public static String unixTimestampToString(long timestamp) { return unixTimestampToString(timestamp, 0); } public static String unixTimestampToString(long timestamp, int precision) { final StringBuilder buf = new StringBuilder(17); int date = (int) (timestamp / MILLIS_PER_DAY); int time = (int) (timestamp % MILLIS_PER_DAY); if (time < 0) { --date; time += MILLIS_PER_DAY; } unixDateToString(buf, date); buf.append(' '); unixTimeToString(buf, time, precision); return buf.toString(); } /** Helper for CAST({timestamp} AS VARCHAR(n)). */ public static String unixTimeToString(int time) { return unixTimeToString(time, 0); } public static String unixTimeToString(int time, int precision) { final StringBuilder buf = new StringBuilder(8); unixTimeToString(buf, time, precision); return buf.toString(); } private static void unixTimeToString(StringBuilder buf, int time, int precision) { int h = time / 3600000; int time2 = time % 3600000; int m = time2 / 60000; int time3 = time2 % 60000; int s = time3 / 1000; int ms = time3 % 1000; int2(buf, h); buf.append(':'); int2(buf, m); buf.append(':'); int2(buf, s); if (precision > 0) { buf.append('.'); while (precision > 0) { buf.append((char) ('0' + (ms / 100))); ms = ms % 100; ms = ms * 10; --precision; } } } private static void int2(StringBuilder buf, int i) { buf.append((char) ('0' + (i / 10) % 10)); buf.append((char) ('0' + i % 10)); } private static void int4(StringBuilder buf, int i) { buf.append((char) ('0' + (i / 1000) % 10)); buf.append((char) ('0' + (i / 100) % 10)); buf.append((char) ('0' + (i / 10) % 10)); buf.append((char) ('0' + i % 10)); } /** Helper for CAST({date} AS VARCHAR(n)). */ public static String unixDateToString(int date) { final StringBuilder buf = new StringBuilder(10); unixDateToString(buf, date); return buf.toString(); } private static void unixDateToString(StringBuilder buf, int date) { julianToString(buf, date + EPOCH_JULIAN); } private static void julianToString(StringBuilder buf, int julian) { // Algorithm the book "Astronomical Algorithms" by Jean Meeus, 1998 int b, c; if (julian > 2299160) { int a = julian + 32044; b = (4 * a + 3) / 146097; c = a - b *146097 / 4; } else { b = 0; c = julian + 32082; } int d = (4 * c + 3) / 1461; int e = c - (1461 * d) / 4; int m = (5 * e + 2) / 153; int day = e - (153 * m + 2) / 5 + 1; int month = m + 3 - 12 * (m / 10); int year = b * 100 + d - 4800 + (m / 10); int4(buf, year); buf.append('-'); int2(buf, month); buf.append('-'); int2(buf, day); } public static String intervalYearMonthToString(int v, TimeUnitRange range) { final StringBuilder buf = new StringBuilder(); if (v >= 0) { buf.append('+'); } else { buf.append('-'); v = -v; } final int y; final int m; switch (range) { case YEAR: v = roundUp(v, 12); y = v / 12; buf.append(y); break; case YEAR_TO_MONTH: y = v / 12; buf.append(y); buf.append('-'); m = v % 12; number(buf, m, 2); break; case MONTH: m = v; buf.append(m); break; default: throw new AssertionError(range); } return buf.toString(); } public static StringBuilder number(StringBuilder buf, int v, int n) { for (int k = digitCount(v); k < n; k++) { buf.append('0'); } return buf.append(v); } public static int digitCount(int v) { for (int n = 1; true; n++) { v /= 10; if (v == 0) { return n; } } } private static int roundUp(int dividend, int divisor) { int remainder = dividend % divisor; dividend -= remainder; if (remainder * 2 > divisor) { dividend += divisor; } return dividend; } /** Cheap, unsafe, long power. power(2, 3) returns 8. */ public static long powerX(long a, long b) { long x = 1; while (b > 0) { x *= a; --b; } return x; } public static String intervalDayTimeToString(long v, TimeUnitRange range, int scale) { final StringBuilder buf = new StringBuilder(); if (v >= 0) { buf.append('+'); } else { buf.append('-'); v = -v; } final long ms; final long s; final long m; final long h; final long d; switch (range) { case DAY_TO_SECOND: v = roundUp(v, powerX(10, 3 - scale)); ms = v % 1000; v /= 1000; s = v % 60; v /= 60; m = v % 60; v /= 60; h = v % 24; v /= 24; d = v; buf.append((int) d); buf.append(' '); number(buf, (int) h, 2); buf.append(':'); number(buf, (int) m, 2); buf.append(':'); number(buf, (int) s, 2); fraction(buf, scale, ms); break; case DAY_TO_MINUTE: v = roundUp(v, 1000 * 60); v /= 1000; v /= 60; m = v % 60; v /= 60; h = v % 24; v /= 24; d = v; buf.append((int) d); buf.append(' '); number(buf, (int) h, 2); buf.append(':'); number(buf, (int) m, 2); break; case DAY_TO_HOUR: v = roundUp(v, 1000 * 60 * 60); v /= 1000; v /= 60; v /= 60; h = v % 24; v /= 24; d = v; buf.append((int) d); buf.append(' '); number(buf, (int) h, 2); break; case DAY: v = roundUp(v, 1000 * 60 * 60 * 24); d = v / (1000 * 60 * 60 * 24); buf.append((int) d); break; case HOUR: v = roundUp(v, 1000 * 60 * 60); v /= 1000; v /= 60; v /= 60; h = v; buf.append((int) h); break; case HOUR_TO_MINUTE: v = roundUp(v, 1000 * 60); v /= 1000; v /= 60; m = v % 60; v /= 60; h = v; buf.append((int) h); buf.append(':'); number(buf, (int) m, 2); break; case HOUR_TO_SECOND: v = roundUp(v, powerX(10, 3 - scale)); ms = v % 1000; v /= 1000; s = v % 60; v /= 60; m = v % 60; v /= 60; h = v; buf.append((int) h); buf.append(':'); number(buf, (int) m, 2); buf.append(':'); number(buf, (int) s, 2); fraction(buf, scale, ms); break; case MINUTE_TO_SECOND: v = roundUp(v, powerX(10, 3 - scale)); ms = v % 1000; v /= 1000; s = v % 60; v /= 60; m = v; buf.append((int) m); buf.append(':'); number(buf, (int) s, 2); fraction(buf, scale, ms); break; case MINUTE: v = roundUp(v, 1000 * 60); v /= 1000; v /= 60; m = v; buf.append((int) m); break; case SECOND: v = roundUp(v, powerX(10, 3 - scale)); ms = v % 1000; v /= 1000; s = v; buf.append((int) s); fraction(buf, scale, ms); break; default: throw new AssertionError(range); } return buf.toString(); } /** * Rounds a dividend to the nearest divisor. * For example roundUp(31, 10) yields 30; roundUp(37, 10) yields 40. * @param dividend Number to be divided * @param divisor Number to divide by * @return Rounded dividend */ private static long roundUp(long dividend, long divisor) { long remainder = dividend % divisor; dividend -= remainder; if (remainder * 2 > divisor) { dividend += divisor; } return dividend; } private static void fraction(StringBuilder buf, int scale, long ms) { if (scale > 0) { buf.append('.'); long v1 = scale == 3 ? ms : scale == 2 ? ms / 10 : scale == 1 ? ms / 100 : 0; number(buf, (int) v1, scale); } } public static int dateStringToUnixDate(String s) { int hyphen1 = s.indexOf('-'); int y; int m; int d; if (hyphen1 < 0) { y = Integer.parseInt(s.trim()); m = 1; d = 1; } else { y = Integer.parseInt(s.substring(0, hyphen1).trim()); final int hyphen2 = s.indexOf('-', hyphen1 + 1); if (hyphen2 < 0) { m = Integer.parseInt(s.substring(hyphen1 + 1).trim()); d = 1; } else { m = Integer.parseInt(s.substring(hyphen1 + 1, hyphen2).trim()); d = Integer.parseInt(s.substring(hyphen2 + 1).trim()); } } return ymdToUnixDate(y, m, d); } public static int timeStringToUnixDate(String v) { return timeStringToUnixDate(v, 0); } public static int timeStringToUnixDate(String v, int start) { final int colon1 = v.indexOf(':', start); int hour; int minute; int second; int milli; if (colon1 < 0) { hour = Integer.parseInt(v.trim()); minute = 1; second = 1; milli = 0; } else { hour = Integer.parseInt(v.substring(start, colon1).trim()); final int colon2 = v.indexOf(':', colon1 + 1); if (colon2 < 0) { minute = Integer.parseInt(v.substring(colon1 + 1).trim()); second = 1; milli = 0; } else { minute = Integer.parseInt(v.substring(colon1 + 1, colon2).trim()); int dot = v.indexOf('.', colon2); if (dot < 0) { second = Integer.parseInt(v.substring(colon2 + 1).trim()); milli = 0; } else { second = Integer.parseInt(v.substring(colon2 + 1, dot).trim()); milli = parseFraction(v.substring(dot + 1).trim(), 100); } } } return hour * (int) MILLIS_PER_HOUR + minute * (int) MILLIS_PER_MINUTE + second * (int) MILLIS_PER_SECOND + milli; } /** Parses a fraction, multiplying the first character by {@code multiplier}, * the second character by {@code multiplier / 10}, * the third character by {@code multiplier / 100}, and so forth. * * <p>For example, {@code parseFraction("1234", 100)} yields {@code 123}. */ private static int parseFraction(String v, int multiplier) { int r = 0; for (int i = 0; i < v.length(); i++) { char c = v.charAt(i); int x = c < '0' || c > '9' ? 0 : (c - '0'); r += multiplier * x; if (multiplier < 10) { // We're at the last digit. Check for rounding. if (i + 1 < v.length() && v.charAt(i + 1) >= '5') { ++r; } break; } multiplier /= 10; } return r; } public static long timestampStringToUnixDate(String s) { final long d; final long t; s = s.trim(); int space = s.indexOf(' '); if (space >= 0) { d = dateStringToUnixDate(s.substring(0, space)); t = timeStringToUnixDate(s, space + 1); } else { d = dateStringToUnixDate(s); t = 0; } return d * MILLIS_PER_DAY + t; } public static long unixDateExtract(TimeUnitRange range, long date) { return julianExtract(range, (int) date + EPOCH_JULIAN); } private static int julianExtract(TimeUnitRange range, int julian) { // Algorithm the book "Astronomical Algorithms" by Jean Meeus, 1998 int b, c; if (julian > 2299160) { int a = julian + 32044; b = (4 * a + 3) / 146097; c = a - b *146097 / 4; } else { b = 0; c = julian + 32082; } int d = (4 * c + 3) / 1461; int e = c - (1461 * d) / 4; int m = (5 * e + 2) / 153; int day = e - (153 * m + 2) / 5 + 1; int month = m + 3 - 12 * (m / 10); int year = b * 100 + d - 4800 + (m / 10); switch (range) { case YEAR: return year; case QUARTER: return (month + 2) / 3; case MONTH: return month; case DAY: return day; case DOW: return (int) floorMod(julian + 1, 7) + 1; // sun=1, sat=7 case WEEK: long fmofw = firstMondayOfFirstWeek(year); if (julian < fmofw) { fmofw = firstMondayOfFirstWeek(year - 1); } return (int) (julian - fmofw) / 7 + 1; case DOY: final long janFirst = ymdToJulian(year, 1, 1); return (int) (julian - janFirst) + 1; case CENTURY: return year > 0 ? (year + 99) / 100 : (year - 99) / 100; case MILLENNIUM: return year > 0 ? (year + 999) / 1000 : (year - 999) / 1000; default: throw new AssertionError(range); } } /** Returns the first day of the first week of a year. * Per ISO-8601 it is the Monday of the week that contains Jan 4, * or equivalently, it is a Monday between Dec 29 and Jan 4. * Sometimes it is in the year before the given year. */ private static long firstMondayOfFirstWeek(int year) { final long janFirst = ymdToJulian(year, 1, 1); final long janFirstDow = floorMod(janFirst + 1, 7); // sun=0, sat=6 return janFirst + (11 - janFirstDow) % 7 - 3; } /** Extracts a time unit from a UNIX date (milliseconds since epoch). */ public static int unixTimestampExtract(TimeUnitRange range, long timestamp) { return unixTimeExtract(range, (int) floorMod(timestamp, MILLIS_PER_DAY)); } /** Extracts a time unit from a time value (milliseconds since midnight). */ public static int unixTimeExtract(TimeUnitRange range, int time) { assert time >= 0; assert time < MILLIS_PER_DAY; switch (range) { case HOUR: return time / (int) MILLIS_PER_HOUR; case MINUTE: final int minutes = time / (int) MILLIS_PER_MINUTE; return minutes % 60; case SECOND: final int seconds = time / (int) MILLIS_PER_SECOND; return seconds % 60; default: throw new AssertionError(range); } } /** Resets to zero the "time" part of a timestamp. */ public static long resetTime(long timestamp) { int date = (int) (timestamp / MILLIS_PER_DAY); return (long) date * MILLIS_PER_DAY; } /** Resets to epoch (1970-01-01) the "date" part of a timestamp. */ public static long resetDate(long timestamp) { return floorMod(timestamp, MILLIS_PER_DAY); } public static long unixTimestampFloor(TimeUnitRange range, long timestamp) { int date = (int) (timestamp / MILLIS_PER_DAY); final int f = julianDateFloor(range, date + EPOCH_JULIAN, true); return (long) f * MILLIS_PER_DAY; } public static long unixDateFloor(TimeUnitRange range, long date) { return julianDateFloor(range, (int) date + EPOCH_JULIAN, true); } public static long unixTimestampCeil(TimeUnitRange range, long timestamp) { int date = (int) (timestamp / MILLIS_PER_DAY); final int f = julianDateFloor(range, date + EPOCH_JULIAN, false); return (long) f * MILLIS_PER_DAY; } public static long unixDateCeil(TimeUnitRange range, long date) { return julianDateFloor(range, (int) date + EPOCH_JULIAN, false); } private static int julianDateFloor(TimeUnitRange range, int julian, boolean floor) { // Algorithm the book "Astronomical Algorithms" by Jean Meeus, 1998 int b, c; if (julian > 2299160) { int a = julian + 32044; b = (4 * a + 3) / 146097; c = a - b *146097 / 4; } else { b = 0; c = julian + 32082; } int d = (4 * c + 3) / 1461; int e = c - (1461 * d) / 4; int m = (5 * e + 2) / 153; int day = e - (153 * m + 2) / 5 + 1; int month = m + 3 - 12 * (m / 10); int year = b * 100 + d - 4800 + (m / 10); switch (range) { case YEAR: if (!floor && (month > 1 || day > 1)) { ++year; } return ymdToUnixDate(year, 1, 1); case MONTH: if (!floor && day > 1) { ++month; } return ymdToUnixDate(year, month, 1); default: throw new AssertionError(range); } } public static int ymdToUnixDate(int year, int month, int day) { final int julian = ymdToJulian(year, month, day); return julian - EPOCH_JULIAN; } public static int ymdToJulian(int year, int month, int day) { int a = (14 - month) / 12; int y = year + 4800 - a; int m = month + 12 * a - 3; int j = day + (153 * m + 2) / 5 + 365 * y + y / 4 - y / 100 + y / 400 - 32045; if (j < 2299161) { j = day + (153 * m + 2) / 5 + 365 * y + y / 4 - 32083; } return j; } public static long unixTimestamp(int year, int month, int day, int hour, int minute, int second) { final int date = ymdToUnixDate(year, month, day); return (long) date * MILLIS_PER_DAY + (long) hour * MILLIS_PER_HOUR + (long) minute * MILLIS_PER_MINUTE + (long) second * MILLIS_PER_SECOND; } /** Adds a given number of months to a timestamp, represented as the number * of milliseconds since the epoch. */ public static long addMonths(long timestamp, int m) { final long millis = DateTimeUtils.floorMod(timestamp, DateTimeUtils.MILLIS_PER_DAY); timestamp -= millis; final long x = addMonths((int) (timestamp / DateTimeUtils.MILLIS_PER_DAY), m); return x * DateTimeUtils.MILLIS_PER_DAY + millis; } /** Adds a given number of months to a date, represented as the number of * days since the epoch. */ public static int addMonths(int date, int m) { int y0 = (int) DateTimeUtils.unixDateExtract(TimeUnitRange.YEAR, date); int m0 = (int) DateTimeUtils.unixDateExtract(TimeUnitRange.MONTH, date); int d0 = (int) DateTimeUtils.unixDateExtract(TimeUnitRange.DAY, date); int y = m / 12; y0 += y; m0 += m - y * 12; int last = lastDay(y0, m0); if (d0 > last) { d0 = 1; if (++m0 > 12) { m0 = 1; ++y0; } } return DateTimeUtils.ymdToUnixDate(y0, m0, d0); } private static int lastDay(int y, int m) { switch (m) { case 2: return y % 4 == 0 && (y % 100 != 0 || y % 400 == 0) ? 29 : 28; case 4: case 6: case 9: case 11: return 30; default: return 31; } } /** Finds the number of months between two dates, each represented as the * number of days since the epoch. */ public static int subtractMonths(int date0, int date1) { if (date0 < date1) { return -subtractMonths(date1, date0); } // Start with an estimate. // Since no month has more than 31 days, the estimate is <= the true value. int m = (date0 - date1) / 31; while (true) { int date2 = addMonths(date1, m); if (date2 >= date0) { return m; } int date3 = addMonths(date1, m + 1); if (date3 > date0) { return m; } ++m; } } public static int subtractMonths(long t0, long t1) { final long millis0 = DateTimeUtils.floorMod(t0, DateTimeUtils.MILLIS_PER_DAY); final int d0 = (int) DateTimeUtils.floorDiv(t0 - millis0, DateTimeUtils.MILLIS_PER_DAY); final long millis1 = DateTimeUtils.floorMod(t1, DateTimeUtils.MILLIS_PER_DAY); final int d1 = (int) DateTimeUtils.floorDiv(t1 - millis1, DateTimeUtils.MILLIS_PER_DAY); int x = subtractMonths(d0, d1); final long d2 = addMonths(d1, x); if (d2 == d0 && millis0 < millis1) { --x; } return x; } /** Divide, rounding towards negative infinity. */ public static long floorDiv(long x, long y) { long r = x / y; // if the signs are different and modulo not zero, round down if ((x ^ y) < 0 && (r * y != x)) { r--; } return r; } /** Modulo, always returning a non-negative result. */ public static long floorMod(long x, long y) { return x - floorDiv(x, y) * y; } /** Creates an instance of {@link Calendar} in the root locale and UTC time * zone. */ public static Calendar calendar() { return Calendar.getInstance(UTC_ZONE, Locale.ROOT); } //~ Inner Classes ---------------------------------------------------------- /** * Helper class for {@link DateTimeUtils#parsePrecisionDateTimeLiteral} */ public static class PrecisionTime { private final Calendar cal; private final String fraction; private final int precision; public PrecisionTime(Calendar cal, String fraction, int precision) { this.cal = cal; this.fraction = fraction; this.precision = precision; } public Calendar getCalendar() { return cal; } public int getPrecision() { return precision; } public String getFraction() { return fraction; } } } // End DateTimeUtils.java
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.impl; import com.hazelcast.cache.impl.record.CacheRecord; import com.hazelcast.config.CacheConfig; import com.hazelcast.internal.eviction.ExpiredKey; import com.hazelcast.internal.iteration.IterationPointer; import com.hazelcast.internal.nearcache.impl.invalidation.InvalidationQueue; import com.hazelcast.map.impl.MapEntries; import com.hazelcast.internal.serialization.Data; import com.hazelcast.internal.services.ObjectNamespace; import com.hazelcast.spi.impl.operationservice.Operation; import com.hazelcast.spi.merge.SplitBrainMergePolicy; import com.hazelcast.spi.merge.SplitBrainMergeTypes.CacheMergeTypes; import com.hazelcast.wan.impl.CallerProvenance; import javax.cache.expiry.ExpiryPolicy; import javax.cache.processor.EntryProcessor; import java.util.Collection; import java.util.Map; import java.util.Set; import java.util.UUID; /** * {@link ICacheRecordStore} is the core contract providing internal functionality to * {@link com.hazelcast.cache.ICache} implementations on partition scope. All of the ICache methods actually * map to a method on this interface through Hazelcast's RPC mechanism. Hazelcast * {@link Operation} is sent to the relevant partition to be executed and the final * results are returned to the callers. * <p> * For each partition, there is only one {@link ICacheRecordStore} in the cluster. * <p>Implementations of this interface may provide different internal data persistence like on-heap storage.</p> * Each expirible cache entry is actually a {@link Data}, {@link CacheRecord} pair. * <p>Key type: always serialized form of {@link Data}.</p> * <p>Value types: depend on the configuration.</p> * * @see com.hazelcast.cache.impl.CacheRecordStore */ @SuppressWarnings("checkstyle:methodcount") public interface ICacheRecordStore { /** * Gets the value to which the specified key is mapped, * or {@code null} if this cache contains no mapping for the key. * <p> * If the cache is configured to use read-through, and get would return null * because the entry is missing from the cache, the Cache's {@link javax.cache.integration.CacheLoader} * is called in an attempt to load the entry. * </p> * * @param key the key whose associated value is to be returned. * @param expiryPolicy custom expiry policy or null to use configured default value. * @return the element, or null, if it does not exist. */ Object get(Data key, ExpiryPolicy expiryPolicy); /** * Associates the specified value with the specified key in this cache, * returning an existing value if one existed. * <p> * If the cache previously contained a mapping for * the key, the old value is replaced by the specified value. (A cache * <tt>c</tt> is said to contain a mapping for a key <tt>k</tt> if and only * if {@link #contains(Data) c.contains(k)} would return * <tt>true</tt>.) * <p> * The previous value is returned, or null if there was no value associated * with the key previously. * * @param key key with which the specified value is to be associated. * @param value value to be associated with the specified key. * @param expiryPolicy custom expiry policy or null to use configured default value. * @param caller UUID of the calling node or client. * @return the stored {@link CacheRecord} (added as new record or updated). <code>null</code> if record has expired. */ CacheRecord put(Data key, Object value, ExpiryPolicy expiryPolicy, UUID caller, int completionId); /** * Associates the specified value with the specified key in this cache, * returning an existing value if one existed. * <p> * If the cache previously contained a mapping for * the key, the old value is replaced by the specified value. (A cache * <tt>c</tt> is said to contain a mapping for a key <tt>k</tt> if and only * if {@link #contains(Data) c.contains(k)} would return * <tt>true</tt>.) * <p> * The previous value is returned, or null if there was no value associated * with the key previously. * * @param key key with which the specified value is to be associated. * @param value value to be associated with the specified key. * @param expiryPolicy custom expiry policy or null to use configured default value. * @param caller UUID of the calling node or client. * @return the value associated with the key at the start of the operation or * null if none was associated. */ Object getAndPut(Data key, Object value, ExpiryPolicy expiryPolicy, UUID caller, int completionId); /** * Removes the mapping for a key from this cache if it is present. * <p> * More formally, if this cache contains a mapping from key <tt>k</tt> to * value <tt>v</tt> such that * <code>(key==null ? k==null : key.equals(k))</code>, that mapping is removed. * (The cache can contain at most one such mapping.) * <p> * <p>Returns <tt>true</tt> if this cache previously associated the key, * or <tt>false</tt> if the cache contained no mapping for the key. * <p> * The cache will not contain a mapping for the specified key once the * call returns. * * @param key key with which the specified value is to be associated. * @param value value to be associated with the specified key. * @param expiryPolicy custom expiry policy or null to use configured default value. * @param caller UUID of the calling node or client. * @return true if a value was set.. */ boolean putIfAbsent(Data key, Object value, ExpiryPolicy expiryPolicy, UUID caller, int completionId); /** * Atomically removes the entry for a key only if it is currently mapped to some * value. * <p> * This is equivalent to: * <pre><code> * if (cache.containsKey(key)) { * V oldValue = cache.get(key); * cache.remove(key); * return oldValue; * } else { * return null; * } * </code></pre> * except that the action is performed atomically. * * @param key key with which the specified value is associated. * @param caller UUID of the calling node or client. * @return the value if one existed or null if no mapping existed for this key. */ Object getAndRemove(Data key, UUID caller, int completionId); /** * Removes the mapping for a key from this cache if it is present. * <p> * More formally, if this cache contains a mapping from key <tt>k</tt> to * value <tt>v</tt> such that * <code>(key==null ? k==null : key.equals(k))</code>, that mapping is removed. * (The cache can contain at most one such mapping.) * <p> * <p>Returns <tt>true</tt> if this cache previously associated the key, * or <tt>false</tt> if the cache contained no mapping for the key. * <p> * The cache will not contain a mapping for the specified key once the * call returns. * * @param key key whose mapping is to be removed from the cache. * @param caller UUID of the calling node or client. * @param origin Source of the call * @param completionId User generated id which shall be received as a field of the cache event upon completion of * the request in the cluster. * @param provenance caller operation provenance * @return returns false if there was no matching key. */ boolean remove(Data key, UUID caller, UUID origin, int completionId, CallerProvenance provenance); boolean remove(Data key, UUID caller, UUID origin, int completionId); /** * Atomically removes the mapping for a key only if currently mapped to the * given value. * <p> * This is equivalent to: * <pre><code> * if (cache.containsKey(key) &amp;&amp; equals(cache.get(key), oldValue) { * cache.remove(key); * return true; * } else { * return false; * } * </code></pre> * except that the action is performed atomically. * * @param key key whose mapping is to be removed from the cache. * @param value value expected to be associated with the specified key. * @param caller UUID of the calling node or client. * @param origin Source of the call * @param completionId User generated id which shall be received as a field of the cache event upon completion of * the request in the cluster. * @return returns false if there was no matching key. */ boolean remove(Data key, Object value, UUID caller, UUID origin, int completionId); /** * Atomically replaces the entry for a key only if currently mapped to some * value. * <p> * This is equivalent to * <pre><code> * if (cache.containsKey(key)) { * cache.put(key, value); * return true; * } else { * return false; * }</code></pre> * except that the action is performed atomically. * * @param key the key with which the specified value is associated. * @param value the value to be associated with the specified key. * @param expiryPolicy custom expiry policy or null to use configured default value. * @param caller UUID of the calling node or client. * @return <tt>true</tt> if the value was replaced. */ boolean replace(Data key, Object value, ExpiryPolicy expiryPolicy, UUID caller, int completionId); /** * Atomically replaces the entry for a key only if currently mapped to a * given value. * <p> * This is equivalent to: * <pre><code> * if (cache.containsKey(key) &amp;&amp; equals(cache.get(key), oldValue)) { * cache.put(key, newValue); * return true; * } else { * return false; * } * </code></pre> * except that the action is performed atomically. * * @param key key with which the specified value is associated. * @param oldValue value expected to be associated with the specified key. * @param newValue value to be associated with the specified key. * @param expiryPolicy custom expiry policy or null to use configured default value. * @param caller UUID of the calling node or client. * @return <tt>true</tt> if the value was replaced. */ boolean replace(Data key, Object oldValue, Object newValue, ExpiryPolicy expiryPolicy, UUID caller, int completionId); /** * Atomically replaces the value for a given key if and only if there is a * value currently mapped by the key. * <p> * This is equivalent to * <pre><code> * if (cache.containsKey(key)) { * V oldValue = cache.get(key); * cache.put(key, value); * return oldValue; * } else { * return null; * } * </code></pre> * except that the action is performed atomically. * * @param key key with which the specified value is associated. * @param value value to be associated with the specified key. * @param expiryPolicy custom expiry policy or null to use configured default value. * @param caller uuid of the calling node or client. * @return the previous value associated with the specified key, or * <tt>null</tt> if there was no mapping for the key. */ Object getAndReplace(Data key, Object value, ExpiryPolicy expiryPolicy, UUID caller, int completionId); /** * Sets expiry policy for the records with given keys if and only if there is a * value currently mapped by the key * * @param keys keys for the entries * @param expiryPolicy custom expiry policy or null to use configured default value */ boolean setExpiryPolicy(Collection<Data> keys, Object expiryPolicy, UUID source); Object getExpiryPolicy(Data key); /** * Determines if this store contains an entry for the specified key. * <p> * More formally, returns <tt>true</tt> if and only if this store contains a * mapping for a key <tt>k</tt> such that <tt>key.equals(k)</tt> * (There can be at most one such mapping.) * * @param key key whose presence in this store is to be tested. * @return <tt>true</tt> if this map contains a mapping for the specified key. */ boolean contains(Data key); /** * Gets a collection of entries from the store, returning them as * {@link Map} of the values associated with the set of keys requested. * <p> * If the cache is configured read-through, and a get for a key would * return null because an entry is missing from the cache, the Cache's * {@link javax.cache.integration.CacheLoader} is called in an attempt to load the entry. If an * entry cannot be loaded for a given key, the key will not be present in * the returned Map. * * @param keySet keys whose associated values are to be returned. * @param expiryPolicy custom expiry policy or null to use configured default value. * @return A simple wrapper for map of entries that were found for the given keys. Keys not found * in the cache are not in the result. */ MapEntries getAll(Set<Data> keySet, ExpiryPolicy expiryPolicy); /** * Calculates the entry size of this store which reflects the partition size of the cache. * * @return partition size of the cache. */ int size(); /** * clears all internal data without publishing any events */ void clear(); /** * Resets the record store to it's initial state. * Used in replication operations. */ void reset(); /** * records of keys will be deleted one by one and will publish a REMOVE event * for each key. * * @param keys set of keys to be cleaned. */ void removeAll(Set<Data> keys, int completionId); /** * Initializes record store. */ void init(); /** * Close is equivalent to below operations in the given order: * <ul> * <li>close resources.</li> * <li>unregister all listeners.</li> * </ul> * * @param onShutdown true if {@code close} is called during CacheService shutdown, * false otherwise. * @see #clear() * @see #destroy() */ void close(boolean onShutdown); /** * Destroy is equivalent to below operations in the given order: * <ul> * <li>clear all.</li> * <li>close resources.</li> * <li>unregister all listeners.</li> * </ul> * * @see #clear() * @see #close(boolean) */ void destroy(); /** * Like {@link #destroy()} but does not touch state on other services * like event journal service. */ void destroyInternals(); /** * Gets the configuration of the cache that this store belongs to. * * @return {@link CacheConfig} */ CacheConfig getConfig(); /** * Gets the name of the distributed object name of the cache. * * @return name. */ String getName(); /** * Returns a readonly map of the internal key value store. * * @return readonly map of the internal key value store. */ Map<Data, CacheRecord> getReadOnlyRecords(); boolean isExpirable(); /** * Gets internal record of the store by key. * * @param key the key to the entry. * @return {@link CacheRecord} instance mapped. */ CacheRecord getRecord(Data key); /** * Associates the specified record with the specified key. * This is simply a put operation on the internal map data * without any CacheLoad. It also <b>DOES</b> trigger eviction! * * @param key the key to the entry. * @param record the value to be associated with the specified key. * @param updateJournal when true an event is appended to related event-journal */ void putRecord(Data key, CacheRecord record, boolean updateJournal); /** * Removes the record for a key. * * @param key the key to the entry. * @return the removed record if one exists. */ CacheRecord removeRecord(Data key); /** * Fetch minimally {@code size} keys from the {@code pointers} position. * The key is fetched on-heap. * The method may return less keys if iteration has completed. * <p> * NOTE: The implementation is free to return more than {@code size} items. * This can happen if we cannot easily resume from the last returned item * by receiving the {@code tableIndex} of the last item. The index can * represent a bucket with multiple items and in this case the returned * object will contain all items in that bucket, regardless if we exceed * the requested {@code size}. * * @param pointers the pointers defining the state of iteration * @param size the minimal count of returned items, unless iteration has completed * @return fetched keys and the new iteration state */ CacheKeysWithCursor fetchKeys(IterationPointer[] pointers, int size); /** * Fetch minimally {@code size} items from the {@code pointers} position. * Both the key and value are fetched on-heap. * <p> * NOTE: The implementation is free to return more than {@code size} items. * This can happen if we cannot easily resume from the last returned item * by receiving the {@code tableIndex} of the last item. The index can * represent a bucket with multiple items and in this case the returned * object will contain all items in that bucket, regardless if we exceed * the requested {@code size}. * * @param pointers the pointers defining the state of iteration * @param size the minimal count of returned items * @return fetched entries and the new iteration state */ CacheEntriesWithCursor fetchEntries(IterationPointer[] pointers, int size); /** * Invokes an {@link EntryProcessor} against the {@link javax.cache.Cache.Entry} specified by * the provided key. If an {@link javax.cache.Cache.Entry} does not exist for the specified key, * an attempt is made to load it (if a loader is configured) or a surrogate * {@link javax.cache.Cache.Entry}, consisting of the key with a null value is used instead. * <p> * * @param key the key of the entry. * @param entryProcessor the {@link EntryProcessor} to be invoked. * @param arguments additional arguments to be passed to the * {@link EntryProcessor}. * @return the result of the processing, if any, defined by the * {@link EntryProcessor} implementation. */ Object invoke(Data key, EntryProcessor entryProcessor, Object[] arguments, int completionId); /** * Synchronously loads the specified entries into the cache using the * configured {@link javax.cache.integration.CacheLoader} for the given keys. * <p> * If an entry for a key already exists in the cache, a value will be loaded * if and only if <code>replaceExistingValues</code> is true. If no loader * is configured for the cache, no objects will be loaded. * * @param keys the keys to be loaded. * @param replaceExistingValues when true, existing values in the cache will * be replaced by those loaded from a CacheLoader. * @return Set of keys which are successfully loaded. */ Set<Data> loadAll(Set<Data> keys, boolean replaceExistingValues); /** * Gets the Cache statistics associated with this {@link com.hazelcast.cache.impl.CacheService}. * * @return {@link CacheStatisticsImpl} cache statistics. */ CacheStatisticsImpl getCacheStats(); /** * Evict cache record store if eviction is required. * <p>Eviction logic is handled as specified {@link com.hazelcast.config.EvictionPolicy} * in {@link com.hazelcast.config.CacheConfig} for this record store</p> * * @return true is an entry was evicted, otherwise false */ boolean evictIfRequired(); void sampleAndForceRemoveEntries(int count); /** * Determines whether wan replication is enabled or not for this record store. * * @return <tt>true</tt> if wan replication is enabled for this record store, <tt>false</tt> otherwise */ boolean isWanReplicationEnabled(); /** * Returns {@link ObjectNamespace} associated with this record store. * * @return ObjectNamespace associated with this record store. */ ObjectNamespace getObjectNamespace(); /** * Merges the given {@link CacheMergeTypes} via the given {@link SplitBrainMergePolicy}. * * @param mergingEntry the {@link CacheMergeTypes} instance to merge * @param mergePolicy the {@link SplitBrainMergePolicy} instance to apply * @param callerProvenance * @return the used {@link CacheRecord} if merge is applied, otherwise {@code null} */ CacheRecord merge(CacheMergeTypes<Object, Object> mergingEntry, SplitBrainMergePolicy<Object, CacheMergeTypes<Object, Object>, Object> mergePolicy, CallerProvenance callerProvenance); /** * @return partition ID of this store */ int getPartitionId(); /** * Do expiration operations. * * @param percentage of max expirables according to the record store size. */ void evictExpiredEntries(int percentage); InvalidationQueue<ExpiredKey> getExpiredKeysQueue(); void disposeDeferredBlocks(); }
package org.sagebionetworks.repo.manager.asynch; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sagebionetworks.audit.dao.ObjectRecordDAO; import org.sagebionetworks.audit.utils.ObjectRecordBuilderUtils; import org.sagebionetworks.repo.manager.AuthorizationManager; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.Snapshotable; import org.sagebionetworks.repo.model.StackStatusDao; import org.sagebionetworks.repo.model.UnauthorizedException; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.asynch.AsynchJobState; import org.sagebionetworks.repo.model.asynch.AsynchronousJobStatus; import org.sagebionetworks.repo.model.asynch.AsynchronousRequestBody; import org.sagebionetworks.repo.model.asynch.AsynchronousResponseBody; import org.sagebionetworks.repo.model.asynch.CacheableRequestBody; import org.sagebionetworks.repo.model.asynch.ReadOnlyRequestBody; import org.sagebionetworks.repo.model.audit.ObjectRecord; import org.sagebionetworks.repo.model.dao.asynch.AsynchronousJobStatusDAO; import org.sagebionetworks.repo.model.status.StatusEnum; import org.sagebionetworks.repo.transactions.RequiresNewReadCommitted; import org.sagebionetworks.repo.transactions.WriteTransaction; import org.sagebionetworks.repo.web.NotFoundException; import org.springframework.beans.factory.annotation.Autowired; public class AsynchJobStatusManagerImpl implements AsynchJobStatusManager { private static final String CACHED_MESSAGE_TEMPLATE = "Returning a cached job for user: %d, requestHash: %s, and jobId: %s"; static private Log log = LogFactory.getLog(AsynchJobStatusManagerImpl.class); private static final String JOB_ABORTED_MESSAGE = "Job aborted because the stack was not in: "+StatusEnum.READ_WRITE; @Autowired AsynchronousJobStatusDAO asynchJobStatusDao; @Autowired AuthorizationManager authorizationManager; @Autowired StackStatusDao stackStatusDao; @Autowired AsynchJobQueuePublisher asynchJobQueuePublisher; @Autowired JobHashProvider jobHashProvider; @Autowired ObjectRecordDAO objectRecordDAO; /* * (non-Javadoc) * @see org.sagebionetworks.repo.manager.asynch.AsynchJobStatusManager#lookupJobStatus(java.lang.String) */ @Override public AsynchronousJobStatus lookupJobStatus(String jobId) throws DatastoreException, NotFoundException { // Get the status AsynchronousJobStatus status = asynchJobStatusDao.getJobStatus(jobId); // If a job is running and the stack is not in READ-WRITE mode then the job is failed. if(AsynchJobState.PROCESSING.equals(status.getJobState())){ if (! (status.getRequestBody() instanceof ReadOnlyRequestBody)) { // Since the job is processing check the state of the stack. checkStackReadWrite(); } } return status; } @Override public AsynchronousJobStatus getJobStatus(UserInfo userInfo, String jobId) throws DatastoreException, NotFoundException { if(userInfo == null) throw new IllegalArgumentException("UserInfo cannot be null"); // Get the status AsynchronousJobStatus status = lookupJobStatus(jobId); // Only the user that started a job can read it if(!authorizationManager.isUserCreatorOrAdmin(userInfo, status.getStartedByUserId().toString())){ throw new UnauthorizedException("Only the user that created a job can access the job's status."); } return status; } @Override public void cancelJob(UserInfo userInfo, String jobId) throws DatastoreException, NotFoundException { if (userInfo == null) throw new IllegalArgumentException("UserInfo cannot be null"); // Get the status AsynchronousJobStatus status = asynchJobStatusDao.getJobStatus(jobId); // Only the user that started a job can read it if (!authorizationManager.isUserCreatorOrAdmin(userInfo, status.getStartedByUserId().toString())) { throw new UnauthorizedException("Only the user that created a job can stop the job."); } asynchJobStatusDao.setJobCanceling(jobId); } @Override public AsynchronousJobStatus startJob(UserInfo user, AsynchronousRequestBody body) throws DatastoreException, NotFoundException { if(user == null) throw new IllegalArgumentException("UserInfo cannot be null"); if(body == null) throw new IllegalArgumentException("Body cannot be null"); if(body instanceof CacheableRequestBody){ /* * Before we start a CacheableRequestBody job, we need to determine if a job already exists * for this request and user. */ String requestHash = jobHashProvider.getJobHash((CacheableRequestBody) body); // if the requestHash is null the job cannot be cached. if(requestHash != null){ // Does this job already exist AsynchronousJobStatus status = findJobsMatching(requestHash, body, user.getId()); if(status != null){ /* * If here then the caller has already made this exact request * and the object has not changed since the last request. * Therefore, we return the same job status as before without * starting a new job. */ log.info(String.format(CACHED_MESSAGE_TEMPLATE, user.getId(), requestHash, status.getJobId())); return status; } } } // Start the job. AsynchronousJobStatus status = asynchJobStatusDao.startJob(user.getId(), body); // publish a message to get the work started asynchJobQueuePublisher.publishMessage(status); return status; } /** * Find a job that matches the given requestHash, objectEtag, body and userId. * * @param requestHash * @param objectEtag * @param body * @param userId * @return */ private AsynchronousJobStatus findJobsMatching(String requestHash, AsynchronousRequestBody body, Long userId){ // Find all jobs that match this request. List<AsynchronousJobStatus> matches = asynchJobStatusDao.findCompletedJobStatus(requestHash, userId); if (matches != null) { for(AsynchronousJobStatus match: matches){ if(body.equals(match.getRequestBody())){ return match; } } } // no match found return null; } @RequiresNewReadCommitted @Override public void updateJobProgress(String jobId, Long progressCurrent, Long progressTotal, String progressMessage) { // Progress can only be updated if the stack is in read-write mode. checkStackReadWrite(); asynchJobStatusDao.updateJobProgress(jobId, progressCurrent, progressTotal, progressMessage); } /** * If the stack is not in read-write mode an IllegalStateException will be thrown. */ private void checkStackReadWrite() { if(!StatusEnum.READ_WRITE.equals(stackStatusDao.getCurrentStatus())){ throw new IllegalStateException(JOB_ABORTED_MESSAGE); } } @WriteTransaction @Override public String setJobFailed(String jobId, Throwable error) { // We allow a job to fail even if the stack is not in read-write mode. return asynchJobStatusDao.setJobFailed(jobId, error); } @WriteTransaction @Override public void setJobCanceling(String jobId) { // We allow a job to cancel even if the stack is not in read-write mode. asynchJobStatusDao.setJobCanceling(jobId); } @WriteTransaction @Override public String setComplete(String jobId, AsynchronousResponseBody body) throws DatastoreException, NotFoundException, IOException { /* * For a cacheable requests we need to calculate a request hash. * This hash can be used to find jobs that already match an existing request. */ AsynchronousJobStatus status = lookupJobStatus(jobId); String requestHash = null; if(status.getRequestBody() instanceof CacheableRequestBody){ CacheableRequestBody request = (CacheableRequestBody) status.getRequestBody(); requestHash = jobHashProvider.getJobHash(request); } // capture the body of the response if (body instanceof Snapshotable) { ObjectRecord record = ObjectRecordBuilderUtils.buildObjectRecord(body, System.currentTimeMillis()); objectRecordDAO.saveBatch(Arrays.asList(record), record.getJsonClassName()); } return asynchJobStatusDao.setComplete(jobId, body, requestHash); } @Override public void emptyAllQueues() { asynchJobQueuePublisher.emptyAllQueues(); } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.Node; import junit.framework.TestCase; /** * Tests for {@link MustBeReachingVariableDef}. * */ public final class MustBeReachingVariableDefTest extends TestCase { private MustBeReachingVariableDef defUse = null; private Node def = null; private Node use = null; public static final String EXTERNS = "var goog = {}"; public void testStraightLine() { assertMatch("D:var x=1; U: x"); assertMatch("var x; D:x=1; U: x"); assertNotMatch("D:var x=1; x = 2; U: x"); assertMatch("var x=1; D:x=2; U: x"); assertNotMatch("U:x; D:var x = 1"); assertNotMatch("D:var x; U:x; x=1"); assertNotMatch("D:var x; U:x; x=1; x"); assertMatch("D: var x = 1; var y = 2; y; U:x"); } public void testIf() { assertNotMatch("var x; if(a){ D:x=1 } else { x=2 }; U:x"); assertNotMatch("var x; if(a){ x=1 } else { D:x=2 }; U:x"); assertMatch("D:var x=1; if(a){ U:x } else { x };"); assertMatch("D:var x=1; if(a){ x } else { U:x };"); assertNotMatch("var x; if(a) { D: x = 1 }; U:x;"); } public void testLoops() { assertNotMatch("var x=0; while(a){ D:x=1 }; U:x"); assertNotMatch("var x=0; for(;;) { D:x=1 }; U:x"); assertMatch("D:var x=1; while(a) { U:x }"); assertMatch("D:var x=1; for(;;) { U:x }"); } public void testConditional() { assertMatch("var x=0,y; D:(x=1)&&y; U:x"); assertNotMatch("var x=0,y; D:y&&(x=1); U:x"); } public void testUseAndDefInSameInstruction() { assertMatch("D:var x=0; U:x=1,x"); assertMatch("D:var x=0; U:x,x=1"); } public void testAssignmentInExpressions() { assertMatch("var x=0; D:foo(bar(x=1)); U:x"); assertMatch("var x=0; D:foo(bar + (x = 1)); U:x"); } public void testHook() { assertNotMatch("var x=0; D:foo() ? x=1 : bar(); U:x"); assertNotMatch("var x=0; D:foo() ? x=1 : x=2; U:x"); } public void testExpressionVariableReassignment() { assertMatch("var a,b; D: var x = a + b; U:x"); assertNotMatch("var a,b,c; D: var x = a + b; a = 1; U:x"); assertNotMatch("var a,b,c; D: var x = a + b; f(b = 1); U:x"); assertMatch("var a,b,c; D: var x = a + b; c = 1; U:x"); // Even if the sub-expression is change conditionally assertNotMatch("var a,b,c; D: var x = a + b; c ? a = 1 : 0; U:x"); } public void testMergeDefinitions() { assertNotMatch("var x,y; D: y = x + x; if(x) { x = 1 }; U:y"); } public void testMergesWithOneDefinition() { assertNotMatch( "var x,y; while(y) { if (y) { print(x) } else { D: x = 1 } } U:x"); } public void testRedefinitionUsingItself() { assertMatch("var x = 1; D: x = x + 1; U:x;"); assertNotMatch("var x = 1; D: x = x + 1; x = 1; U:x;"); } public void testMultipleDefinitionsWithDependence() { assertMatch("var x, a, b; D: x = a, x = b; U: x"); assertMatch("var x, a, b; D: x = a, x = b; a = 1; U: x"); assertNotMatch("var x, a, b; D: x = a, x = b; b = 1; U: x"); } public void testExterns() { assertNotMatch("D: goog = {}; U: goog"); } public void testAssignmentOp() { assertMatch("var x = 0; D: x += 1; U: x"); assertMatch("var x = 0; D: x *= 1; U: x"); assertNotMatch("D: var x = 0; x += 1; U: x"); } public void testIncAndDec() { assertMatch("var x; D: x++; U: x"); assertMatch("var x; D: x--; U: x"); } public void testFunctionParams1() { computeDefUse("if (param2) { D: param1 = 1; U: param1 }"); assertSame(def, defUse.getDefNode("param1", use)); } public void testFunctionParams2() { computeDefUse("if (param2) { D: param1 = 1} U: param1"); assertNotSame(def, defUse.getDefNode("param1", use)); } public void testArgumentsObjectModifications() { computeDefUse("D: param1 = 1; arguments[0] = 2; U: param1"); assertNotSame(def, defUse.getDefNode("param1", use)); } public void testArgumentsObjectEscaped() { computeDefUse("D: param1 = 1; var x = arguments; x[0] = 2; U: param1"); assertNotSame(def, defUse.getDefNode("param1", use)); } public void testArgumentsObjectEscapedDependents() { assertNotMatch("param1=1; var x; D:x=param1; var y=arguments; U:x"); } /** * The use of x at U: is the definition of x at D:. */ private void assertMatch(String src) { computeDefUse(src); assertSame(def, defUse.getDefNode("x", use)); } /** * The use of x at U: is not the definition of x at D:. */ private void assertNotMatch(String src) { computeDefUse(src); assertNotSame(def, defUse.getDefNode("x", use)); } /** * Computes reaching definition on given source. */ private void computeDefUse(String src) { Compiler compiler = new Compiler(); src = "function _FUNCTION(param1, param2){" + src + "}"; Node root = compiler.parseTestCode(src).getFirstChild(); assertEquals(0, compiler.getErrorCount()); Scope scope = SyntacticScopeCreator.makeUntyped(compiler).createScope(root, null); ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false, true); cfa.process(null, root); ControlFlowGraph<Node> cfg = cfa.getCfg(); defUse = new MustBeReachingVariableDef(cfg, scope, compiler); defUse.analyze(); def = null; use = null; new NodeTraversal(compiler,new LabelFinder()).traverse(root); assertNotNull("Code should have an instruction labeled D", def); assertNotNull("Code should have an instruction labeled U", use); } /** * Finds the D: and U: label and store which node they point to. */ private class LabelFinder extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isLabel()) { if (n.getFirstChild().getString().equals("D")) { def = n.getLastChild(); } else if (n.getFirstChild().getString().equals("U")) { use = n.getLastChild(); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.net; import java.io.IOException; import org.apache.harmony.luni.util.Msg; /** * This abstract subclass of <code>URLConnection</code> defines method for * managing HTTP connection according to the description given by RFC 2068 * * @see ContentHandler * @see URL * @see URLConnection * @see URLStreamHandler */ public abstract class HttpURLConnection extends URLConnection { @SuppressWarnings("nls") private String methodTokens[] = { "GET", "DELETE", "HEAD", "OPTIONS", "POST", "PUT", "TRACE" }; // Request method, DEFAULT: "GET" protected String method = "GET"; //$NON-NLS-1$ // Response code obtained from the request protected int responseCode = -1; // Response message, corresponds to the response code protected String responseMessage; protected boolean instanceFollowRedirects = followRedirects; private static boolean followRedirects = true; protected int chunkLength = -1; protected int fixedContentLength = -1; private final static int DEFAULT_CHUNK_LENGTH = 1024; // 2XX: generally "OK" // 3XX: relocation/redirect // 4XX: client error // 5XX: server error /** * Numeric status code, 202: Accepted */ public final static int HTTP_ACCEPTED = 202; /** * Numeric status code, 502: Bad Gateway */ public final static int HTTP_BAD_GATEWAY = 502; /** * Numeric status code, 405: Bad Method */ public final static int HTTP_BAD_METHOD = 405; /** * Numeric status code, 400: Bad Request */ public final static int HTTP_BAD_REQUEST = 400; /** * Numeric status code, 408: Client Timeout */ public final static int HTTP_CLIENT_TIMEOUT = 408; /** * Numeric status code, 409: Conflict */ public final static int HTTP_CONFLICT = 409; /** * Numeric status code, 201: Created */ public final static int HTTP_CREATED = 201; /** * Numeric status code, 413: Entity too large */ public final static int HTTP_ENTITY_TOO_LARGE = 413; /** * Numeric status code, 403: Forbidden */ public final static int HTTP_FORBIDDEN = 403; /** * Numeric status code, 504: Gateway timeout */ public final static int HTTP_GATEWAY_TIMEOUT = 504; /** * Numeric status code, 410: Gone */ public final static int HTTP_GONE = 410; /** * Numeric status code, 500: Internal error */ public final static int HTTP_INTERNAL_ERROR = 500; /** * Numeric status code, 411: Length required */ public final static int HTTP_LENGTH_REQUIRED = 411; /** * Numeric status code, 301 Moved permanently */ public final static int HTTP_MOVED_PERM = 301; /** * Numeric status code, 302: Moved temporarily */ public final static int HTTP_MOVED_TEMP = 302; /** * Numeric status code, 300: Multiple choices */ public final static int HTTP_MULT_CHOICE = 300; /** * Numeric status code, 204: No content */ public final static int HTTP_NO_CONTENT = 204; /** * Numeric status code, 406: Not acceptable */ public final static int HTTP_NOT_ACCEPTABLE = 406; /** * Numeric status code, 203: Not authoritative */ public final static int HTTP_NOT_AUTHORITATIVE = 203; /** * Numeric status code, 404: Not found */ public final static int HTTP_NOT_FOUND = 404; /** * Numeric status code, 501: Not implemented */ public final static int HTTP_NOT_IMPLEMENTED = 501; /** * Numeric status code, 304: Not modified */ public final static int HTTP_NOT_MODIFIED = 304; /** * Numeric status code, 200: OK */ public final static int HTTP_OK = 200; /** * Numeric status code, 206: Partial */ public final static int HTTP_PARTIAL = 206; /** * Numeric status code, 402: Payment required */ public final static int HTTP_PAYMENT_REQUIRED = 402; /** * Numeric status code, 412: Precondition failed */ public final static int HTTP_PRECON_FAILED = 412; /** * Numeric status code, 407: Proxy authentication required */ public final static int HTTP_PROXY_AUTH = 407; /** * Numeric status code, 414: Request too long */ public final static int HTTP_REQ_TOO_LONG = 414; /** * Numeric status code, 205: Reset */ public final static int HTTP_RESET = 205; /** * Numeric status code, 303: See other */ public final static int HTTP_SEE_OTHER = 303; /** * @deprecated Use HTTP_INTERNAL_ERROR */ @Deprecated public final static int HTTP_SERVER_ERROR = 500; /** * Numeric status code, 305: Use proxy */ public final static int HTTP_USE_PROXY = 305; /** * Numeric status code, 401: Unauthorized */ public final static int HTTP_UNAUTHORIZED = 401; /** * Numeric status code, 415: Unsupported type */ public final static int HTTP_UNSUPPORTED_TYPE = 415; /** * Numeric status code, 503: Unavailable */ public final static int HTTP_UNAVAILABLE = 503; /** * Numeric status code, 505: Version not supported */ public final static int HTTP_VERSION = 505; /** * Constructs a <code>HttpURLConnection</code> pointing to the resource * specified by the <code>URL</code>. * * @param url * the URL of this connection * * @see URL * @see URLConnection */ protected HttpURLConnection(URL url) { super(url); } /** * Closes the connection with the HTTP server * * @see URLConnection#connect() * @see URLConnection#connected */ public abstract void disconnect(); /** * Answers a input stream from the server in the case of error such as the * requested file (txt, htm, html) is not found on the remote server. * <p> * If the content type is not what stated above, * <code>FileNotFoundException</code> is thrown. * * @return the error input stream returned by the server. */ public java.io.InputStream getErrorStream() { return null; } /** * Answers the value of <code>followRedirects</code> which indicates if * this connection will follows a different URL redirected by the server. It * is enabled by default. * * @return The value of the flag * * @see #setFollowRedirects */ public static boolean getFollowRedirects() { return followRedirects; } /** * Answers the permission object (in this case, SocketPermission) with the * host and the port number as the target name and "resolve, connect" as the * action list. * * @return the permission object required for this connection * * @throws IOException * if an IO exception occurs during the creation of the * permission object. */ @Override public java.security.Permission getPermission() throws IOException { int port = url.getPort(); if (port < 0) { port = 80; } return new SocketPermission(url.getHost() + ":" + port, //$NON-NLS-1$ "connect, resolve"); //$NON-NLS-1$ } /** * Answers the request method which will be used to make the request to the * remote HTTP server. All possible methods of this HTTP implementation is * listed in the class definition. * * @return the request method string * * @see #method * @see #setRequestMethod */ public String getRequestMethod() { return method; } /** * Answers the response code returned by the remote HTTP server * * @return the response code, -1 if no valid response code * * @throws IOException * if there is an IO error during the retrieval. * * @see #getResponseMessage */ public int getResponseCode() throws IOException { // Call getInputStream() first since getHeaderField() doesn't return // exceptions getInputStream(); String response = getHeaderField(0); if (response == null) { return -1; } response = response.trim(); int mark = response.indexOf(" ") + 1; //$NON-NLS-1$ if (mark == 0) { return -1; } int last = mark + 3; if (last > response.length()) { last = response.length(); } responseCode = Integer.parseInt(response.substring(mark, last)); if (last + 1 <= response.length()) { responseMessage = response.substring(last + 1); } return responseCode; } /** * Answers the response message returned the remote HTTP server * * @return the response message. <code>null</code> if such response exists * * @throws IOException * if there is an IO error during the retrieval. * * @see #getResponseCode() * @see IOException */ public String getResponseMessage() throws IOException { if (responseMessage != null) { return responseMessage; } getResponseCode(); return responseMessage; } /** * Sets the flag of whether this connection will follow redirects returned * by the remote server. This method can only be called with the permission * from the security manager * * @param auto * The value to set * * @see java.lang.SecurityManager#checkSetFactory() */ public static void setFollowRedirects(boolean auto) { SecurityManager security = System.getSecurityManager(); if (security != null) { security.checkSetFactory(); } followRedirects = auto; } /** * Sets the request command which will be sent to the remote HTTP server. * This method can only be called before the connection is made. * * @param method * The <code>non-null</code> string representing the method * * @throws ProtocolException * Thrown when this is called after connected, or the method is * not supported by this HTTP implementation. * * @see #getRequestMethod() * @see #method */ public void setRequestMethod(String method) throws ProtocolException { if (connected) { throw new ProtocolException(Msg.getString("K0037")); //$NON-NLS-1$ } for (int i = 0; i < methodTokens.length; i++) { if (methodTokens[i].equals(method)) { // if there is a supported method that matches the desired // method, then set the current method and return this.method = methodTokens[i]; return; } } // if none matches, then throw ProtocolException throw new ProtocolException(); } /** * Answers if this connection uses proxy. * * @return true if this connection supports proxy, false otherwise. */ public abstract boolean usingProxy(); /** * Answers if this connection follows redirects. * * @return true if this connection follows redirects, false otherwise. */ public boolean getInstanceFollowRedirects() { return instanceFollowRedirects; } /** * Sets if this connection follows redirects. * * @param followRedirects * true if this connection should follows redirects, false * otherwise. */ public void setInstanceFollowRedirects(boolean followRedirects) { instanceFollowRedirects = followRedirects; } /** * Answers the date value in the form of milliseconds since epoch * corresponding to the field <code>field</code>. Answers * <code>defaultValue</code> if no such field can be found in the response * header. * * @param field * the field in question * @param defaultValue * the default value if no field is found * @return milliseconds since epoch */ @Override public long getHeaderFieldDate(String field, long defaultValue) { return super.getHeaderFieldDate(field, defaultValue); } /** * If length of a HTTP request body is known ahead, sets fixed length to * enable streaming without buffering. Sets after connection will cause an * exception. * * @see <code>setChunkedStreamingMode</code> * @param contentLength * the fixed length of the HTTP request body * @throws IllegalStateException * if already connected or other mode already set * @throws IllegalArgumentException * if contentLength is less than zero */ public void setFixedLengthStreamingMode(int contentLength) { if (super.connected) { throw new IllegalStateException(Msg.getString("K0079")); //$NON-NLS-1$ } if (0 < chunkLength) { throw new IllegalStateException(Msg.getString("KA003")); //$NON-NLS-1$ } if (0 > contentLength) { throw new IllegalArgumentException(Msg.getString("K0051")); //$NON-NLS-1$ } this.fixedContentLength = contentLength; } /** * If length of a HTTP request body is NOT known ahead, enable chunked * transfer encoding to enable streaming without buffering. Notice that not * all http servers support this mode. Sets after connection will cause an * exception. * * @see <code>setFixedLengthStreamingMode</code> * @param chunklen * the length of a chunk * @throws IllegalStateException * if already connected or other mode already set */ public void setChunkedStreamingMode(int chunklen) { if (super.connected) { throw new IllegalStateException(Msg.getString("K0079")); //$NON-NLS-1$ } if (0 <= fixedContentLength) { throw new IllegalStateException(Msg.getString("KA003")); //$NON-NLS-1$ } if (0 >= chunklen) { chunkLength = DEFAULT_CHUNK_LENGTH; } else { chunkLength = chunklen; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.ui.tree; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.codeInsight.lookup.impl.LookupImpl; import com.intellij.execution.Executor; import com.intellij.execution.ui.RunContentDescriptor; import com.intellij.execution.ui.RunContentManager; import com.intellij.execution.ui.RunContentWithExecutorListener; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.wm.IdeFocusManager; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.plaf.basic.ComboPopup; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.*; import java.util.ArrayList; import java.util.List; /** * @author nik */ public abstract class TreeInplaceEditor implements AWTEventListener { private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.ui.impl.watch.DebuggerTreeInplaceEditor"); private JComponent myInplaceEditorComponent; private final List<Runnable> myRemoveActions = new ArrayList<>(); protected final Disposable myDisposable = Disposer.newDisposable(); protected abstract JComponent createInplaceEditorComponent(); protected abstract JComponent getPreferredFocusedComponent(); public abstract Editor getEditor(); public abstract JComponent getEditorComponent(); protected abstract TreePath getNodePath(); protected abstract JTree getTree(); protected void doPopupOKAction() { doOKAction(); } public void doOKAction() { hide(); } public void cancelEditing() { hide(); } private void hide() { if (!isShown()) { return; } myInplaceEditorComponent = null; onHidden(); myRemoveActions.forEach(Runnable::run); myRemoveActions.clear(); Disposer.dispose(myDisposable); final JTree tree = getTree(); tree.repaint(); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(tree, true); }); } protected void onHidden() { } protected abstract Project getProject(); private static void setInplaceEditorBounds(JComponent component, int x, int y, int width, int height) { int h = Math.max(height, component.getPreferredSize().height); component.setBounds(x, y - (h - height) / 2, width, h); } public final void show() { LOG.assertTrue(myInplaceEditorComponent == null, "editor is not released"); final JTree tree = getTree(); tree.scrollPathToVisible(getNodePath()); final JRootPane rootPane = tree.getRootPane(); if (rootPane == null) { return; } final JLayeredPane layeredPane = rootPane.getLayeredPane(); Rectangle bounds = getEditorBounds(); if (bounds == null) { return; } Point layeredPanePoint = SwingUtilities.convertPoint(tree, bounds.x, bounds.y,layeredPane); final JComponent inplaceEditorComponent = createInplaceEditorComponent(); myInplaceEditorComponent = inplaceEditorComponent; LOG.assertTrue(inplaceEditorComponent != null); setInplaceEditorBounds(inplaceEditorComponent, layeredPanePoint.x, layeredPanePoint.y, bounds.width, bounds.height); layeredPane.add(inplaceEditorComponent, new Integer(250)); myRemoveActions.add(() -> layeredPane.remove(inplaceEditorComponent)); inplaceEditorComponent.validate(); inplaceEditorComponent.paintImmediately(0,0,inplaceEditorComponent.getWidth(),inplaceEditorComponent.getHeight()); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(getPreferredFocusedComponent(), true); }); final ComponentAdapter componentListener = new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { final Project project = getProject(); ApplicationManager.getApplication().invokeLater(() -> { if (!isShown() || project == null || project.isDisposed()) { return; } JTree tree1 = getTree(); JLayeredPane layeredPane1 = tree1.getRootPane().getLayeredPane(); Rectangle bounds1 = getEditorBounds(); if (bounds1 == null) { return; } Point layeredPanePoint1 = SwingUtilities.convertPoint(tree1, bounds1.x, bounds1.y, layeredPane1); setInplaceEditorBounds(inplaceEditorComponent, layeredPanePoint1.x, layeredPanePoint1.y, bounds1.width, bounds1.height); inplaceEditorComponent.revalidate(); }); } @Override public void componentHidden(ComponentEvent e) { cancelEditing(); } }; final HierarchyListener hierarchyListener = e -> { if (!tree.isShowing()) { cancelEditing(); } }; tree.addHierarchyListener(hierarchyListener); tree.addComponentListener(componentListener); rootPane.addComponentListener(componentListener); myRemoveActions.add(() -> { tree.removeHierarchyListener(hierarchyListener); tree.removeComponentListener(componentListener); rootPane.removeComponentListener(componentListener); }); getProject().getMessageBus().connect(myDisposable).subscribe(RunContentManager.TOPIC, new RunContentWithExecutorListener() { @Override public void contentSelected(@Nullable RunContentDescriptor descriptor, @NotNull Executor executor) { cancelEditing(); } @Override public void contentRemoved(@Nullable RunContentDescriptor descriptor, @NotNull Executor executor) { cancelEditing(); } }); final JComponent editorComponent = getEditorComponent(); editorComponent.getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), "enterStroke"); editorComponent.getActionMap().put("enterStroke", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { doOKAction(); } }); editorComponent.getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), "escapeStroke"); editorComponent.getActionMap().put("escapeStroke", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { cancelEditing(); } }); final Toolkit defaultToolkit = Toolkit.getDefaultToolkit(); SwingUtilities.invokeLater(() -> { if (!isShown()) return; defaultToolkit.addAWTEventListener(this, AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_WHEEL_EVENT_MASK); }); myRemoveActions.add(() -> defaultToolkit.removeAWTEventListener(this)); onShown(); } protected void onShown() { } @Override public void eventDispatched(AWTEvent event) { if (!isShown()) { return; } MouseEvent mouseEvent = (MouseEvent)event; if (mouseEvent.getClickCount() == 0 && !(event instanceof MouseWheelEvent)) { return; } final int id = mouseEvent.getID(); if (id != MouseEvent.MOUSE_PRESSED && id != MouseEvent.MOUSE_RELEASED && id != MouseEvent.MOUSE_CLICKED && id != MouseEvent.MOUSE_WHEEL) { return; } final Component sourceComponent = mouseEvent.getComponent(); final Point originalPoint = mouseEvent.getPoint(); final Editor editor = getEditor(); if (editor == null) return; Project project = editor.getProject(); LookupImpl activeLookup = project != null ? (LookupImpl)LookupManager.getInstance(project).getActiveLookup() : null; if (activeLookup != null){ final Point lookupPoint = SwingUtilities.convertPoint(sourceComponent, originalPoint, activeLookup.getComponent()); if (activeLookup.getComponent().getBounds().contains(lookupPoint)){ return; //mouse click inside lookup } else { activeLookup.hide(); //hide popup on mouse position changed } } // do not cancel editing if we click in editor popup final List<JBPopup> popups = JBPopupFactory.getInstance().getChildPopups(myInplaceEditorComponent); for (JBPopup popup : popups) { if (SwingUtilities.isDescendingFrom(sourceComponent, popup.getContent())) { return; } } final Point point = SwingUtilities.convertPoint(sourceComponent, originalPoint, myInplaceEditorComponent); if (myInplaceEditorComponent.contains(point)) { return; } final Component componentAtPoint = SwingUtilities.getDeepestComponentAt(sourceComponent, originalPoint.x, originalPoint.y); for (Component comp = componentAtPoint; comp != null; comp = comp.getParent()) { if (comp instanceof ComboPopup) { if (id != MouseEvent.MOUSE_WHEEL) { doPopupOKAction(); } return; } } if (id != MouseEvent.MOUSE_RELEASED) { // do not cancel on release outside of the component cancelEditing(); } } @Nullable protected Rectangle getEditorBounds() { final JTree tree = getTree(); Rectangle bounds = tree.getVisibleRect(); Rectangle nodeBounds = tree.getPathBounds(getNodePath()); if (bounds == null || nodeBounds == null) { return null; } bounds.y = nodeBounds.y; bounds.height = nodeBounds.height; if(nodeBounds.x > bounds.x) { bounds.width = bounds.width - nodeBounds.x + bounds.x; bounds.x = nodeBounds.x; } return bounds; } public boolean isShown() { return myInplaceEditorComponent != null; } }
package com.redhat.ceylon.compiler.js; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import com.redhat.ceylon.common.Backend; import com.redhat.ceylon.compiler.js.GenerateJsVisitor.InitDeferrer; import com.redhat.ceylon.compiler.js.GenerateJsVisitor.PrototypeInitCallback; import com.redhat.ceylon.compiler.js.GenerateJsVisitor.SuperVisitor; import com.redhat.ceylon.compiler.js.util.TypeComparator; import com.redhat.ceylon.compiler.js.util.TypeUtils; import com.redhat.ceylon.compiler.typechecker.tree.Node; import com.redhat.ceylon.compiler.typechecker.tree.Tree; import com.redhat.ceylon.compiler.typechecker.tree.Tree.StaticType; import com.redhat.ceylon.compiler.typechecker.util.NativeUtil; import com.redhat.ceylon.model.typechecker.model.Class; import com.redhat.ceylon.model.typechecker.model.ClassOrInterface; import com.redhat.ceylon.model.typechecker.model.Constructor; import com.redhat.ceylon.model.typechecker.model.Declaration; import com.redhat.ceylon.model.typechecker.model.Interface; import com.redhat.ceylon.model.typechecker.model.Function; import com.redhat.ceylon.model.typechecker.model.ParameterList; import com.redhat.ceylon.model.typechecker.model.Type; import com.redhat.ceylon.model.typechecker.model.Scope; import com.redhat.ceylon.model.typechecker.model.TypeDeclaration; import com.redhat.ceylon.model.typechecker.model.TypeParameter; import com.redhat.ceylon.model.typechecker.model.TypedDeclaration; import com.redhat.ceylon.model.typechecker.model.ModelUtil; import com.redhat.ceylon.model.typechecker.model.Value; public class TypeGenerator { static final ErrorVisitor errVisitor = new ErrorVisitor(); /** Generates a function to initialize the specified type. * @param initDeferrer */ static void initializeType(final Node type, final GenerateJsVisitor gen, InitDeferrer initDeferrer) { Tree.ExtendedType extendedType = null; Tree.SatisfiedTypes satisfiedTypes = null; final ClassOrInterface decl; final List<Tree.Statement> stmts; Value objDecl = null; if (type instanceof Tree.ClassDefinition) { Tree.ClassDefinition classDef = (Tree.ClassDefinition) type; extendedType = classDef.getExtendedType(); satisfiedTypes = classDef.getSatisfiedTypes(); decl = classDef.getDeclarationModel(); Tree.Declaration nh = gen.getNativeHeader(decl); if (nh == null && NativeUtil.hasNativeMembers(decl)) { nh = classDef; } stmts = NativeUtil.mergeStatements(classDef.getClassBody(), nh, Backend.JavaScript); } else if (type instanceof Tree.InterfaceDefinition) { satisfiedTypes = ((Tree.InterfaceDefinition) type).getSatisfiedTypes(); decl = ((Tree.InterfaceDefinition) type).getDeclarationModel(); final Tree.InterfaceDefinition idef = (Tree.InterfaceDefinition)type; Tree.Declaration nh = gen.getNativeHeader(decl); if (nh == null && NativeUtil.hasNativeMembers(decl)) { nh = idef; } stmts = NativeUtil.mergeStatements(idef.getInterfaceBody(), nh, Backend.JavaScript); } else if (type instanceof Tree.ObjectDefinition) { Tree.ObjectDefinition objectDef = (Tree.ObjectDefinition) type; extendedType = objectDef.getExtendedType(); satisfiedTypes = objectDef.getSatisfiedTypes(); decl = (ClassOrInterface)objectDef.getDeclarationModel().getTypeDeclaration(); objDecl = objectDef.getDeclarationModel(); Tree.Declaration nh = gen.getNativeHeader(decl); if (nh == null && NativeUtil.hasNativeMembers(decl)) { nh = objectDef; } stmts = NativeUtil.mergeStatements(objectDef.getClassBody(), nh,Backend.JavaScript); } else if (type instanceof Tree.ObjectExpression) { Tree.ObjectExpression objectDef = (Tree.ObjectExpression) type; extendedType = objectDef.getExtendedType(); satisfiedTypes = objectDef.getSatisfiedTypes(); decl = (ClassOrInterface)objectDef.getAnonymousClass(); stmts = objectDef.getClassBody().getStatements(); } else if (type instanceof Tree.Enumerated) { Tree.Enumerated vc = (Tree.Enumerated)type; stmts = vc.getBlock().getStatements(); decl = (ClassOrInterface)vc.getDeclarationModel().getTypeDeclaration().getContainer(); } else { stmts = null; decl = null; } final PrototypeInitCallback callback = new PrototypeInitCallback() { @Override public void addToPrototypeCallback() { if (decl != null) { gen.addToPrototype(type, decl, stmts); } } }; typeInitialization(extendedType, satisfiedTypes, decl, callback, gen, objDecl, initDeferrer); } /** This is now the main method to generate the type initialization code. * @param extendedType The type that is being extended. * @param satisfiedTypes The types satisfied by the type being initialized. * @param d The declaration for the type being initialized * @param callback A callback to add something more to the type initializer in prototype style. * @param initDeferrer something which lets us put statements at the end of the container initialiser, if it's not null (it's null for toplevels) */ static void typeInitialization(final Tree.ExtendedType extendedType, final Tree.SatisfiedTypes satisfiedTypes, final ClassOrInterface d, PrototypeInitCallback callback, final GenerateJsVisitor gen, final Value objectDeclaration, InitDeferrer initDeferrer) { final boolean isInterface = d instanceof com.redhat.ceylon.model.typechecker.model.Interface; String initFuncName = isInterface ? "initTypeProtoI" : "initTypeProto"; final String typename = gen.getNames().name(d); final String initname; if (d.isAnonymous()) { String _initname = gen.getNames().objectName(d); if (objectDeclaration != null && objectDeclaration.isNativeHeader() && TypeUtils.makeAbstractNative(objectDeclaration) && !(_initname.endsWith("$$N") || _initname.endsWith("$$N()"))) { if (_initname.endsWith("()")) { _initname = _initname.substring(0, _initname.length()-2) + "$$N()"; } else { _initname += "$$N"; } } if (d.isToplevel()) { initname = "$init$" + _initname.substring(0, _initname.length()-2); } else { initname = "$init$" + _initname; } } else { initname = "$init$" + typename; } gen.out("function ", initname, "()"); gen.beginBlock(); gen.out("if(", typename, ".$$===undefined)"); gen.beginBlock(); boolean genIniter = true; if (TypeUtils.isNativeExternal(d)) { //Allow native types to have their own initialization code genIniter = !gen.stitchInitializer(d); } if (genIniter) { gen.out(gen.getClAlias(), initFuncName, "(", typename, ",'", d.getQualifiedNameString(), "'"); final List<Tree.StaticType> supers = satisfiedTypes == null ? Collections.<Tree.StaticType>emptyList() : new ArrayList<Tree.StaticType>(satisfiedTypes.getTypes().size()+1); if (extendedType != null) { if (satisfiedTypes == null) { String fname = typeFunctionName(extendedType.getType(), d, gen); gen.out(",", fname); } else { supers.add(extendedType.getType()); } } else if (!isInterface) { gen.out(",", gen.getClAlias(), "Basic"); } if (satisfiedTypes != null) { supers.addAll(satisfiedTypes.getTypes()); Collections.sort(supers, new StaticTypeComparator()); for (Tree.StaticType satType : supers) { String fname = typeFunctionName(satType, d, gen); gen.out(",", fname); } } gen.out(");"); } //Add ref to outer type if (d.isMember()) { StringBuilder containers = new StringBuilder(); Scope _d2 = d; while (_d2 instanceof ClassOrInterface) { if (containers.length() > 0) { containers.insert(0, '.'); } containers.insert(0, gen.getNames().name((Declaration)_d2)); _d2 = _d2.getContainer(); } gen.endLine(); gen.out(containers.toString(), "=", typename, ";"); } //The class definition needs to be inside the init function if we want forwards decls to work in prototype style if (gen.opts.isOptimize()) { gen.endLine(); callback.addToPrototypeCallback(); } gen.endBlockNewLine(); gen.out("return ", typename, ";"); gen.endBlockNewLine(); //If it's nested, share the init function if (gen.outerSelf(d)) { gen.out(".", initname, "=", initname); gen.endLine(true); } if(initDeferrer != null){ initDeferrer.deferred.add(initname+"();"); }else{ gen.out(initname, "()"); gen.endLine(true); } } /** Returns the name of the type or its $init$ function if it's local. */ static String typeFunctionName(final Tree.StaticType type, final ClassOrInterface coi, final GenerateJsVisitor gen) { TypeDeclaration d = type.getTypeModel().getDeclaration(); final boolean removeAlias = d == null || !d.isClassOrInterfaceMember() || d instanceof Interface; if ((removeAlias && d.isAlias()) || d instanceof Constructor) { Type extendedType = d.getExtendedType(); d = extendedType==null ? null : extendedType.getDeclaration(); } Declaration cont = ModelUtil.getContainingDeclaration(d); final boolean inProto = gen.opts.isOptimize() && cont instanceof TypeDeclaration; final boolean imported = gen.isImported(type.getUnit().getPackage(), d); String dname = gen.getNames().name(d); if (d.isAlias()) { TypeDeclaration d2 = d; while (d2.isAlias()) { d2 = d2.getExtendedType().getDeclaration(); } dname = gen.getNames().name(d2); } final String initName = "$init$" + dname + "()"; if (!imported && !d.isClassOrInterfaceMember()) { return initName; } if (inProto && coi.isMember() && !d.isAlias() && (coi.getContainer() == cont || ModelUtil.contains(d, coi))) { //A member class that extends or satisfies another member of its same container, //use its $init$ function return initName; } String tfn; //#628 If coi is anonymous and inside cont, qualify the path from cont instead if (coi != null && coi.isAnonymous() && cont instanceof Scope && ModelUtil.contains((Scope)cont, coi)) { tfn = gen.qualifiedPath(type, cont, inProto); } else if (inProto && d.isClassOrInterfaceMember()) { return pathToType(type, d, gen); } else { tfn = gen.qualifiedPath(type, d, inProto); } tfn = gen.memberAccessBase(type, d, false, tfn); if (removeAlias && !imported) { int idx = tfn.lastIndexOf('.'); if (idx > 0) { tfn = tfn.substring(0, idx+1) + initName; } else { tfn = initName; } } return tfn; } static String pathToType(Node that, TypeDeclaration d, GenerateJsVisitor gen) { List<TypeDeclaration> parents = new ArrayList<>(3); TypeDeclaration path=d; parents.add(path); while (path.isClassOrInterfaceMember()) { path = ModelUtil.getContainingClassOrInterface(path.getContainer()); parents.add(0, path); } StringBuilder sb = new StringBuilder(); String qp = gen.qualifiedPath(that, parents.get(0), gen.opts.isOptimize() && ModelUtil.getContainingDeclaration(d) instanceof TypeDeclaration); if (qp != null && !qp.isEmpty()) { sb.append(qp); } boolean first = true; for (TypeDeclaration td : parents) { if (first) { first=false; } else { sb.append(".$$.prototype"); } if (sb.length() > 0) { sb.append('.'); } if (!td.isAlias()) { sb.append("$init$"); } sb.append(gen.getNames().name(td)); if (!td.isAlias()) { sb.append("()"); } } return sb.toString(); } static void interfaceDefinition(final Tree.InterfaceDefinition that, final GenerateJsVisitor gen, InitDeferrer initDeferrer) { //Don't even bother with nodes that have errors if (errVisitor.hasErrors(that))return; final Interface d = that.getDeclarationModel(); //If it's inside a dynamic interface, don't generate anything if (d.isClassOrInterfaceMember() && ((ClassOrInterface)d.getContainer()).isDynamic())return; final Interface natd = (Interface)ModelUtil.getNativeDeclaration(d, Backend.JavaScript); final boolean headerWithoutBackend = NativeUtil.isHeaderWithoutBackend(that, Backend.JavaScript); if (natd!= null && (headerWithoutBackend || NativeUtil.isNativeHeader(that))) { // It's a native header, remember it for later when we deal with its implementation gen.saveNativeHeader(that); return; } if (!(NativeUtil.isForBackend(that, Backend.JavaScript) || headerWithoutBackend)) { return; } gen.comment(that); gen.out(GenerateJsVisitor.function, gen.getNames().name(d)); final boolean withTargs = generateParameters(that.getTypeParameterList(), null, d, gen); gen.beginBlock(); final List<Declaration> superDecs = new ArrayList<Declaration>(3); if (!gen.opts.isOptimize()) { new SuperVisitor(superDecs).visit(that.getInterfaceBody()); } final Tree.SatisfiedTypes sats = that.getSatisfiedTypes(); if (withTargs) { gen.out(gen.getClAlias(), "set_type_args(", gen.getNames().self(d), ",$$targs$$,", gen.getNames().name(d), ")"); gen.endLine(true); } callSupertypes(sats == null ? null : TypeUtils.getTypes(sats.getTypes()), null, d, that, superDecs, null, null, gen); if (!d.isToplevel() && d.getContainer() instanceof Function && !((Function)d.getContainer()).getTypeParameters().isEmpty()) { gen.out(gen.getClAlias(), "set_type_args(", gen.getNames().self(d), ",", gen.getNames().typeArgsParamName((Function)d.getContainer()), ",", gen.getNames().name(d), ")"); gen.endLine(true); } final List<Tree.Statement> stmts; if (NativeUtil.isForBackend(d, Backend.JavaScript)) { Tree.Declaration nh = gen.getNativeHeader(d); if (nh == null && NativeUtil.hasNativeMembers(d)) { nh = that; } stmts = NativeUtil.mergeStatements(that.getInterfaceBody(), nh, Backend.JavaScript); } else { stmts = that.getInterfaceBody().getStatements(); } gen.visitStatements(stmts); //returnSelf(d); gen.endBlockNewLine(); if (d.isDynamic()) { //Add the list of expected members here final List<Declaration> members = d.getMembers(); gen.out(gen.getNames().name(d), ".dynmem$=["); if (members.isEmpty()) { gen.out("];"); } else { gen.out("'"); boolean first = true; for (Declaration m : members) { if (first)first=false;else gen.out("','"); gen.out(gen.getNames().name(m)); } gen.out("'];"); } } //Add reference to metamodel gen.out(gen.getNames().name(d), ".$crtmm$="); TypeUtils.encodeForRuntime(that, d, that.getAnnotationList(), gen); gen.endLine(true); gen.share(d); initializeType(that, gen, initDeferrer); } /** Outputs the parameter list of the type's constructor, including surrounding parens. * Returns true if the type has type parameters. */ static boolean generateParameters(final Tree.TypeParameterList tparms, final Tree.ParameterList plist, final TypeDeclaration d, final GenerateJsVisitor gen) { gen.out("("); final boolean withTargs = tparms != null && !tparms.getTypeParameterDeclarations().isEmpty(); if (plist != null) { for (Tree.Parameter p: plist.getParameters()) { p.visit(gen); gen.out(","); } } if (withTargs) { gen.out("$$targs$$,"); } gen.out(gen.getNames().self(d), ")"); return withTargs; } static void callSuperclass(final Tree.SimpleType extendedType, final Tree.InvocationExpression invocation, final Class d, final ParameterList plist, final Node that, final boolean pseudoAbstractConstructor, final List<Declaration> superDecs, final GenerateJsVisitor gen) { TypeDeclaration typeDecl = extendedType.getDeclarationModel(); if (invocation != null) { Tree.PositionalArgumentList argList = invocation.getPositionalArgumentList(); final String qpath; if (typeDecl instanceof Constructor) { final String path = gen.qualifiedPath(that, (TypeDeclaration)typeDecl.getContainer(), false); if (path.isEmpty()) { qpath = gen.getNames().name((TypeDeclaration)typeDecl.getContainer()); } else { qpath = path + "." + gen.getNames().name((TypeDeclaration)typeDecl.getContainer()); } } else { qpath = gen.qualifiedPath(that, typeDecl, false); } if (pseudoAbstractConstructor) { if (typeDecl instanceof Constructor) { gen.out(gen.memberAccessBase(extendedType, typeDecl, false, qpath), "$$a("); } else { gen.out(gen.memberAccessBase(extendedType, typeDecl, false, qpath), gen.getNames().constructorSeparator(typeDecl), "$c$$$a("); } } else { gen.out(gen.memberAccessBase(extendedType, typeDecl, false, qpath), (gen.opts.isOptimize() && (gen.getSuperMemberScope(extendedType) != null)) ? ".call(this," : "("); } gen.getInvoker().generatePositionalArguments(invocation.getPrimary(), argList, argList.getPositionalArguments(), false, false); if (argList.getPositionalArguments().size() > 0) { gen.out(","); } //There may be defaulted args we must pass as undefined if (plist != null && plist.getParameters().size() > argList.getPositionalArguments().size()) { for (int i = argList.getPositionalArguments().size(); i < plist.getParameters().size(); i++) { com.redhat.ceylon.model.typechecker.model.Parameter p = plist.getParameters().get(i); if (p.isSequenced()) { gen.out(gen.getClAlias(), "empty(),"); } else { gen.out("undefined,"); } } } //If the supertype has type arguments, add them to the call List<TypeParameter> typeParams; if (typeDecl instanceof Constructor) { //Output the type arguments to the constructor, //UNLESS you're in the same class, then just pass the type arguments object typeParams = ((Class)typeDecl.getContainer()).getTypeParameters(); if (typeParams != null && !typeParams.isEmpty()) { typeParams = null; if (ModelUtil.contains(d, typeDecl)) { gen.out("$$targs$$,"); } else { TypeUtils.printTypeArguments(that, extendedType.getTypeModel().getQualifyingType().getTypeArguments(), gen, false, null); gen.out(","); } } } else { typeParams = typeDecl.getTypeParameters(); } if (typeParams != null && !typeParams.isEmpty()) { List<Type> typeArgs = null; if (extendedType.getTypeArgumentList() != null) { typeArgs = extendedType.getTypeArgumentList().getTypeModels(); } TypeUtils.printTypeArguments(that, TypeUtils.matchTypeParametersWithArguments(typeParams, typeArgs), gen, false, null); gen.out(","); } gen.out(gen.getNames().self(d), ")"); gen.endLine(true); } copySuperMembers(typeDecl, superDecs, d, gen); } static void callSupertypes(final List<Type> sats, final Tree.SimpleType supertype, final ClassOrInterface d, final Node that, final List<Declaration> superDecs, final Tree.InvocationExpression invoke, final ParameterList plist, final GenerateJsVisitor gen) { if (sats != null) { final ArrayList<Type> supers = new ArrayList<>(sats.size()+1); supers.addAll(sats); if (supertype != null) { supers.add(supertype.getTypeModel()); } Collections.sort(supers, new TypeComparator()); HashSet<String> myTypeArgs = new HashSet<>(); for (TypeParameter tp : d.getTypeParameters()) { myTypeArgs.add(tp.getName()); } for (Type st: supers) { if (supertype != null && st == supertype.getTypeModel()) { callSuperclass(supertype, invoke, (Class)d, plist, that, false, superDecs, gen); } else { TypeDeclaration typeDecl = st.getDeclaration(); final TypeDeclaration _anoncont; if (d.isAnonymous() && ModelUtil.contains( ModelUtil.getContainingClassOrInterface(d.getContainer()), typeDecl)) { _anoncont = ModelUtil.getContainingClassOrInterface(d); } else { _anoncont = null; } if (_anoncont == null) { gen.qualify(that, typeDecl); gen.out(gen.getNames().name(typeDecl), "("); } else { gen.qualify(that, _anoncont); gen.out(gen.getNames().name(typeDecl), ".call(", gen.getNames().self(ModelUtil.getContainingClassOrInterface(d.getContainer())), ","); } if (typeDecl.getTypeParameters() != null && !typeDecl.getTypeParameters().isEmpty()) { TypeUtils.printTypeArguments(that, st.getTypeArguments(), gen, d.isToplevel(), null); gen.out(","); } gen.out(gen.getNames().self(d), ")"); gen.endLine(true); copySuperMembers(typeDecl, superDecs, d, gen); } } } else if (supertype != null) { callSuperclass(supertype, invoke, (Class)d, plist, that, false, superDecs, gen); } } private static void copySuperMembers(final TypeDeclaration typeDecl, final List<Declaration> decs, final ClassOrInterface d, final GenerateJsVisitor gen) { if (!gen.opts.isOptimize() && decs != null) { for (Declaration dec: decs) { if (!typeDecl.isMember(dec)) { continue; } String suffix = gen.getNames().scopeSuffix(dec.getContainer()); if (dec instanceof Value && ((Value)dec).isTransient()) { superGetterRef(dec,d,suffix, gen); if (((Value) dec).isVariable()) { superSetterRef(dec,d,suffix, gen); } } else { gen.out(gen.getNames().self(d), ".", gen.getNames().name(dec), suffix, "=", gen.getNames().self(d), ".", gen.getNames().name(dec)); gen.endLine(true); } } } } private static void superGetterRef(final Declaration d, final ClassOrInterface sub, final String parentSuffix, final GenerateJsVisitor gen) { if (AttributeGenerator.defineAsProperty(d)) { gen.out(gen.getClAlias(), "copySuperAttr(", gen.getNames().self(sub), ",'", gen.getNames().name(d), "','", parentSuffix, "')"); } else { gen.out(gen.getNames().self(sub), ".", gen.getNames().getter(d, false), parentSuffix, "=", gen.getNames().self(sub), ".", gen.getNames().getter(d, false)); } gen.endLine(true); } private static void superSetterRef(final Declaration d, final ClassOrInterface sub, final String parentSuffix, final GenerateJsVisitor gen) { if (!AttributeGenerator.defineAsProperty(d)) { gen.out(gen.getNames().self(sub), ".", gen.getNames().setter(d), parentSuffix, "=", gen.getNames().self(sub), ".", gen.getNames().setter(d)); gen.endLine(true); } } public static class StaticTypeComparator implements Comparator<Tree.StaticType> { @Override public int compare(StaticType o1, StaticType o2) { final Type t1 = o1.getTypeModel(); final Type t2 = o2.getTypeModel(); if (ModelUtil.isTypeUnknown(t1)) { return ModelUtil.isTypeUnknown(t2) ? 0 : -1; } if (ModelUtil.isTypeUnknown(t2)) { return ModelUtil.isTypeUnknown(t1) ? 0 : -1; } if (t1.isSubtypeOf(t2)) { return 1; } if (t2.isSubtypeOf(t1)) { return -1; } //Check the members for (Declaration d : t1.getDeclaration().getMembers()) { if (d instanceof TypedDeclaration || d instanceof ClassOrInterface) { Declaration d2 = t2.getDeclaration().getMember(d.getName(), null, false); if (d2 != null) { final Declaration dd2 = ModelUtil.getContainingDeclaration(d2); if (dd2 instanceof TypeDeclaration && t1.getDeclaration().inherits((TypeDeclaration)dd2)) { return 1; } } } } for (Declaration d : t2.getDeclaration().getMembers()) { if (d instanceof TypedDeclaration || d instanceof ClassOrInterface) { Declaration d2 = t1.getDeclaration().getMember(d.getName(), null, false); if (d2 != null) { final Declaration dd2 = ModelUtil.getContainingDeclaration(d2); if (dd2 instanceof TypeDeclaration && t2.getDeclaration().inherits((TypeDeclaration)dd2)) { return -1; } } } } return 0; } } }
package ca.uhn.fhir.rest.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.StringReader; import java.nio.charset.Charset; import org.apache.commons.io.input.ReaderInputStream; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicStatusLine; import org.hamcrest.core.StringContains; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.internal.stubbing.defaultanswers.ReturnsDeepStubs; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.TagList; import ca.uhn.fhir.model.dstu2.resource.Patient; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.rest.server.Constants; import ca.uhn.fhir.rest.server.exceptions.NotModifiedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; /** * Created by dsotnikov on 2/25/2014. */ public class ETagClientDstu2Test { private static FhirContext ourCtx; private HttpClient myHttpClient; private HttpResponse myHttpResponse; @Before public void before() { myHttpClient = mock(HttpClient.class, new ReturnsDeepStubs()); ourCtx.getRestfulClientFactory().setHttpClient(myHttpClient); ourCtx.getRestfulClientFactory().setServerValidationModeEnum(ServerValidationModeEnum.NEVER); myHttpResponse = mock(HttpResponse.class, new ReturnsDeepStubs()); } private String getResourceResult() { //@formatter:off String msg = "<Patient xmlns=\"http://hl7.org/fhir\">" + "<text><status value=\"generated\" /><div xmlns=\"http://www.w3.org/1999/xhtml\">John Cardinal: 444333333 </div></text>" + "<identifier><label value=\"SSN\" /><system value=\"http://orionhealth.com/mrn\" /><value value=\"PRP1660\" /></identifier>" + "<name><use value=\"official\" /><family value=\"Cardinal\" /><given value=\"John\" /></name>" + "<name><family value=\"Kramer\" /><given value=\"Doe\" /></name>" + "<telecom><system value=\"phone\" /><value value=\"555-555-2004\" /><use value=\"work\" /></telecom>" + "<gender value=\"male\"/>" + "<address><use value=\"home\" /><line value=\"2222 Home Street\" /></address><active value=\"true\" />" + "</Patient>"; //@formatter:on return msg; } private Patient getResource() { return ourCtx.newXmlParser().parseResource(Patient.class, getResourceResult()); } @Test public void testReadWithContentLocationInResponse() throws Exception { String msg = getResourceResult(); ArgumentCaptor<HttpUriRequest> capt = ArgumentCaptor.forClass(HttpUriRequest.class); when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse); when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), 200, "OK")); when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", Constants.CT_FHIR_XML + "; charset=UTF-8")); when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(msg), Charset.forName("UTF-8"))); //@formatter:off Header[] headers = new Header[] { new BasicHeader(Constants.HEADER_LAST_MODIFIED, "Wed, 15 Nov 1995 04:58:08 GMT"), new BasicHeader(Constants.HEADER_CONTENT_LOCATION, "http://foo.com/Patient/123/_history/2333"), new BasicHeader(Constants.HEADER_CATEGORY, "http://foo/tagdefinition.html; scheme=\"http://hl7.org/fhir/tag\"; label=\"Some tag\""), new BasicHeader(Constants.HEADER_ETAG, "\"9999\"") }; //@formatter:on when(myHttpResponse.getAllHeaders()).thenReturn(headers); IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir"); Patient response = client.read(Patient.class, new IdDt("Patient/1234")); assertEquals("http://foo.com/Patient/123/_history/2333", response.getId().getValue()); InstantDt lm = (InstantDt) response.getResourceMetadata().get(ResourceMetadataKeyEnum.UPDATED); lm.setTimeZoneZulu(true); assertEquals("1995-11-15T04:58:08.000Z", lm.getValueAsString()); TagList tags = ResourceMetadataKeyEnum.TAG_LIST.get(response); assertNotNull(tags); assertEquals(1, tags.size()); assertEquals("http://foo/tagdefinition.html", tags.get(0).getTerm()); assertEquals("http://hl7.org/fhir/tag", tags.get(0).getScheme()); assertEquals("Some tag", tags.get(0).getLabel()); } @Test public void testReadWithIfNoneMatch() throws Exception { ArgumentCaptor<HttpUriRequest> capt = ArgumentCaptor.forClass(HttpUriRequest.class); when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse); when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), Constants.STATUS_HTTP_304_NOT_MODIFIED, "Not modified")); when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", Constants.CT_FHIR_XML + "; charset=UTF-8")); IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir"); int count = 0; //@formatter:off when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(""))); try { client .read() .resource(Patient.class) .withId(new IdDt("Patient/1234")) .execute(); fail(); } catch (NotModifiedException e) { // good! } //@formatter:on assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count).getURI().toString()); count++; //@formatter:off when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(""))); Patient expected = new Patient(); Patient response = client .read() .resource(Patient.class) .withId(new IdDt("Patient/1234")) .ifVersionMatches("9876").returnResource(expected) .execute(); //@formatter:on assertSame(expected, response); assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count).getURI().toString()); assertEquals("\"9876\"", capt.getAllValues().get(count).getHeaders(Constants.HEADER_IF_NONE_MATCH_LC)[0].getValue()); count++; } @Test public void testUpdateWithIfMatch() throws Exception { ArgumentCaptor<HttpUriRequest> capt = ArgumentCaptor.forClass(HttpUriRequest.class); when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse); when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), Constants.STATUS_HTTP_200_OK, "OK")); when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", Constants.CT_FHIR_XML + "; charset=UTF-8")); IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir"); int count = 0; //@formatter:off when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(""))); client .update() .resource(getResource()) .withId(new IdDt("Patient/1234")) .execute(); //@formatter:on assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count).getURI().toString()); assertEquals(0, capt.getAllValues().get(count).getHeaders(Constants.HEADER_IF_MATCH_LC).length); count++; //@formatter:off when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(""))); client .update() .resource(getResource()) .withId(new IdDt("Patient/1234/_history/9876")) .execute(); //@formatter:on assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count).getURI().toString()); assertEquals("\"9876\"", capt.getAllValues().get(count).getHeaders(Constants.HEADER_IF_MATCH_LC)[0].getValue()); count++; } @Test public void testUpdateWithIfMatchWithPreconditionFailed() throws Exception { ArgumentCaptor<HttpUriRequest> capt = ArgumentCaptor.forClass(HttpUriRequest.class); when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse); when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), Constants.STATUS_HTTP_412_PRECONDITION_FAILED, "Precondition Failed")); when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", Constants.CT_FHIR_XML + "; charset=UTF-8")); IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir"); int count = 0; //@formatter:off when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(""))); try { client .update() .resource(getResource()) .withId(new IdDt("Patient/1234/_history/9876")) .execute(); fail(); } catch (PreconditionFailedException e) { // good } //@formatter:on assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count).getURI().toString()); assertEquals("\"9876\"", capt.getAllValues().get(count).getHeaders(Constants.HEADER_IF_MATCH_LC)[0].getValue()); count++; //@formatter:off when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(""))); try { Patient resource = getResource(); resource.setId(new IdDt("Patient/1234/_history/9876")); client .update() .resource(resource) .execute(); fail(); } catch (PreconditionFailedException e) { // good } //@formatter:on assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count).getURI().toString()); assertEquals("\"9876\"", capt.getAllValues().get(count).getHeaders(Constants.HEADER_IF_MATCH_LC)[0].getValue()); count++; } @Test public void testReadWithETag() throws Exception { String msg = getResourceResult(); ArgumentCaptor<HttpUriRequest> capt = ArgumentCaptor.forClass(HttpUriRequest.class); when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse); when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), 200, "OK")); when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", Constants.CT_FHIR_XML + "; charset=UTF-8")); when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(msg), Charset.forName("UTF-8"))); Header[] headers = new Header[] { new BasicHeader(Constants.HEADER_LAST_MODIFIED, "Wed, 15 Nov 1995 04:58:08 GMT"), new BasicHeader(Constants.HEADER_CONTENT_LOCATION, "http://foo.com/Patient/123/_history/2333"), new BasicHeader(Constants.HEADER_CATEGORY, "http://foo/tagdefinition.html; scheme=\"http://hl7.org/fhir/tag\"; label=\"Some tag\"") }; when(myHttpResponse.getAllHeaders()).thenReturn(headers); IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir"); int count = 0; Patient response = client.read().resource(Patient.class).withId(new IdDt("Patient/1234")).execute(); assertThat(response.getNameFirstRep().getFamilyAsSingleString(), StringContains.containsString("Cardinal")); assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count++).getURI().toString()); when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(msg), Charset.forName("UTF-8"))); response = (Patient) client.read().resource("Patient").withId("1234").execute(); assertThat(response.getNameFirstRep().getFamilyAsSingleString(), StringContains.containsString("Cardinal")); assertEquals("http://example.com/fhir/Patient/1234", capt.getAllValues().get(count++).getURI().toString()); when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(msg), Charset.forName("UTF-8"))); response = client.read().resource(Patient.class).withIdAndVersion("1234", "22").execute(); assertThat(response.getNameFirstRep().getFamilyAsSingleString(), StringContains.containsString("Cardinal")); assertEquals("http://example.com/fhir/Patient/1234/_history/22", capt.getAllValues().get(count++).getURI().toString()); when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(msg), Charset.forName("UTF-8"))); response = client.read().resource(Patient.class).withUrl("http://foo/Patient/22").execute(); assertThat(response.getNameFirstRep().getFamilyAsSingleString(), StringContains.containsString("Cardinal")); assertEquals("http://foo/Patient/22", capt.getAllValues().get(count++).getURI().toString()); } @BeforeClass public static void beforeClass() { ourCtx = FhirContext.forDstu2(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.chime.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/chime-2018-05-01/BatchUpdateUser" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class BatchUpdateUserRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon Chime account ID. * </p> */ private String accountId; /** * <p> * The request containing the user IDs and details to update. * </p> */ private java.util.List<UpdateUserRequestItem> updateUserRequestItems; /** * <p> * The Amazon Chime account ID. * </p> * * @param accountId * The Amazon Chime account ID. */ public void setAccountId(String accountId) { this.accountId = accountId; } /** * <p> * The Amazon Chime account ID. * </p> * * @return The Amazon Chime account ID. */ public String getAccountId() { return this.accountId; } /** * <p> * The Amazon Chime account ID. * </p> * * @param accountId * The Amazon Chime account ID. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchUpdateUserRequest withAccountId(String accountId) { setAccountId(accountId); return this; } /** * <p> * The request containing the user IDs and details to update. * </p> * * @return The request containing the user IDs and details to update. */ public java.util.List<UpdateUserRequestItem> getUpdateUserRequestItems() { return updateUserRequestItems; } /** * <p> * The request containing the user IDs and details to update. * </p> * * @param updateUserRequestItems * The request containing the user IDs and details to update. */ public void setUpdateUserRequestItems(java.util.Collection<UpdateUserRequestItem> updateUserRequestItems) { if (updateUserRequestItems == null) { this.updateUserRequestItems = null; return; } this.updateUserRequestItems = new java.util.ArrayList<UpdateUserRequestItem>(updateUserRequestItems); } /** * <p> * The request containing the user IDs and details to update. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUpdateUserRequestItems(java.util.Collection)} or * {@link #withUpdateUserRequestItems(java.util.Collection)} if you want to override the existing values. * </p> * * @param updateUserRequestItems * The request containing the user IDs and details to update. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchUpdateUserRequest withUpdateUserRequestItems(UpdateUserRequestItem... updateUserRequestItems) { if (this.updateUserRequestItems == null) { setUpdateUserRequestItems(new java.util.ArrayList<UpdateUserRequestItem>(updateUserRequestItems.length)); } for (UpdateUserRequestItem ele : updateUserRequestItems) { this.updateUserRequestItems.add(ele); } return this; } /** * <p> * The request containing the user IDs and details to update. * </p> * * @param updateUserRequestItems * The request containing the user IDs and details to update. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchUpdateUserRequest withUpdateUserRequestItems(java.util.Collection<UpdateUserRequestItem> updateUserRequestItems) { setUpdateUserRequestItems(updateUserRequestItems); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAccountId() != null) sb.append("AccountId: ").append(getAccountId()).append(","); if (getUpdateUserRequestItems() != null) sb.append("UpdateUserRequestItems: ").append(getUpdateUserRequestItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BatchUpdateUserRequest == false) return false; BatchUpdateUserRequest other = (BatchUpdateUserRequest) obj; if (other.getAccountId() == null ^ this.getAccountId() == null) return false; if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false) return false; if (other.getUpdateUserRequestItems() == null ^ this.getUpdateUserRequestItems() == null) return false; if (other.getUpdateUserRequestItems() != null && other.getUpdateUserRequestItems().equals(this.getUpdateUserRequestItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode()); hashCode = prime * hashCode + ((getUpdateUserRequestItems() == null) ? 0 : getUpdateUserRequestItems().hashCode()); return hashCode; } @Override public BatchUpdateUserRequest clone() { return (BatchUpdateUserRequest) super.clone(); } }
package org.robolectric.shadows; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Point; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Looper; import android.text.TextUtils; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewParent; import org.robolectric.Robolectric; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.annotation.RealObject; import org.robolectric.internal.HiddenApi; import org.robolectric.internal.ReflectionHelpers; import java.io.PrintStream; import java.lang.reflect.Method; import static org.robolectric.Robolectric.directlyOn; import static org.robolectric.Robolectric.shadowOf; import static org.robolectric.bytecode.RobolectricInternals.invokeConstructor; /** * Shadow implementation of {@code View} that simulates the behavior of this * class. * <p/> * Supports listeners, focusability (but not focus order), resource loading, * visibility, onclick, tags, and tracks the size and shape of the view. */ @Implements(View.class) public class ShadowView { public static final String ANDROID_NS = "http://schemas.android.com/apk/res/android"; @RealObject protected View realView; private View.OnClickListener onClickListener; private View.OnLongClickListener onLongClickListener; private View.OnFocusChangeListener onFocusChangeListener; private View.OnSystemUiVisibilityChangeListener onSystemUiVisibilityChangeListener; private boolean wasInvalidated; private View.OnTouchListener onTouchListener; protected AttributeSet attributeSet; public Point scrollToCoordinates = new Point(); private boolean didRequestLayout; private MotionEvent lastTouchEvent; private float scaleX = 1.0f; private float scaleY = 1.0f; private int hapticFeedbackPerformed = -1; private boolean onLayoutWasCalled; private View.OnCreateContextMenuListener onCreateContextMenuListener; public void __constructor__(Context context, AttributeSet attributeSet, int defStyle) { if (context == null) throw new NullPointerException("no context"); this.attributeSet = attributeSet; invokeConstructor(View.class, realView, new ReflectionHelpers.ClassParameter(Context.class, context), new ReflectionHelpers.ClassParameter(AttributeSet.class, attributeSet), new ReflectionHelpers.ClassParameter(int.class, defStyle)); } /** * Build drawable, either LayerDrawable or BitmapDrawable. * * @param resourceId Resource id * @return Drawable */ protected Drawable buildDrawable(int resourceId) { return realView.getResources().getDrawable(resourceId); } protected String getQualifiers() { return shadowOf(realView.getResources().getConfiguration()).getQualifiers(); } /** * Non-Android accessor. * * @return the resource ID of this view's background * @deprecated Use FEST assertions instead. */ public int getBackgroundResourceId() { Drawable drawable = realView.getBackground(); return drawable instanceof BitmapDrawable ? shadowOf(((BitmapDrawable) drawable).getBitmap()).getCreatedFromResId() : -1; } /** * Non-Android accessor. * * @return the color of this view's background, or 0 if it's not a solid color * @deprecated Use FEST assertions instead. */ public int getBackgroundColor() { Drawable drawable = realView.getBackground(); return drawable instanceof ColorDrawable ? ((ColorDrawable) drawable).getColor() : 0; } @HiddenApi @Implementation public void computeOpaqueFlags() { } @Implementation public void setOnFocusChangeListener(View.OnFocusChangeListener l) { onFocusChangeListener = l; directly().setOnFocusChangeListener(l); } @Implementation public void setOnClickListener(View.OnClickListener onClickListener) { this.onClickListener = onClickListener; directly().setOnClickListener(onClickListener); } @Implementation public void setOnLongClickListener(View.OnLongClickListener onLongClickListener) { this.onLongClickListener = onLongClickListener; directly().setOnLongClickListener(onLongClickListener); } @Implementation public void setOnSystemUiVisibilityChangeListener(View.OnSystemUiVisibilityChangeListener onSystemUiVisibilityChangeListener) { this.onSystemUiVisibilityChangeListener = onSystemUiVisibilityChangeListener; directly().setOnSystemUiVisibilityChangeListener(onSystemUiVisibilityChangeListener); } @Implementation public void setOnCreateContextMenuListener(View.OnCreateContextMenuListener onCreateContextMenuListener) { this.onCreateContextMenuListener = onCreateContextMenuListener; directly().setOnCreateContextMenuListener(onCreateContextMenuListener); } @Implementation public void draw(android.graphics.Canvas canvas) { Drawable background = realView.getBackground(); if (background != null) { shadowOf(canvas).appendDescription("background:"); background.draw(canvas); } } @Implementation public void onLayout(boolean changed, int left, int top, int right, int bottom) { onLayoutWasCalled = true; directlyOn(realView, View.class, "onLayout", new ReflectionHelpers.ClassParameter(boolean.class, changed), new ReflectionHelpers.ClassParameter(int.class, left), new ReflectionHelpers.ClassParameter(int.class, top), new ReflectionHelpers.ClassParameter(int.class, right), new ReflectionHelpers.ClassParameter(int.class, bottom)); } public boolean onLayoutWasCalled() { return onLayoutWasCalled; } @Implementation public void requestLayout() { didRequestLayout = true; directly().requestLayout(); } public boolean didRequestLayout() { return didRequestLayout; } public void setDidRequestLayout(boolean didRequestLayout) { this.didRequestLayout = didRequestLayout; } public void setViewFocus(boolean hasFocus) { if (onFocusChangeListener != null) { onFocusChangeListener.onFocusChange(realView, hasFocus); } } @Implementation public void invalidate() { wasInvalidated = true; directly().invalidate(); } @Implementation public boolean onTouchEvent(MotionEvent event) { lastTouchEvent = event; return directly().onTouchEvent(event); } @Implementation public void setOnTouchListener(View.OnTouchListener onTouchListener) { this.onTouchListener = onTouchListener; directly().setOnTouchListener(onTouchListener); } public MotionEvent getLastTouchEvent() { return lastTouchEvent; } /** * Returns a string representation of this {@code View}. Unless overridden, it will be an empty string. * <p/> * Robolectric extension. */ public String innerText() { return ""; } /** * Dumps the status of this {@code View} to {@code System.out} */ public void dump() { dump(System.out, 0); } /** * Dumps the status of this {@code View} to {@code System.out} at the given indentation level */ public void dump(PrintStream out, int indent) { dumpFirstPart(out, indent); out.println("/>"); } protected void dumpFirstPart(PrintStream out, int indent) { dumpIndent(out, indent); out.print("<" + realView.getClass().getSimpleName()); dumpAttributes(out); } protected void dumpAttributes(PrintStream out) { if (realView.getId() > 0) { dumpAttribute(out, "id", shadowOf(realView.getContext()).getResourceLoader().getNameForId(realView.getId())); } switch (realView.getVisibility()) { case View.VISIBLE: break; case View.INVISIBLE: dumpAttribute(out, "visibility", "INVISIBLE"); break; case View.GONE: dumpAttribute(out, "visibility", "GONE"); break; } } protected void dumpAttribute(PrintStream out, String name, String value) { out.print(" " + name + "=\"" + (value == null ? null : TextUtils.htmlEncode(value)) + "\""); } protected void dumpIndent(PrintStream out, int indent) { for (int i = 0; i < indent; i++) out.print(" "); } /** * Non-Android accessor. * * @return whether or not {@link #invalidate()} has been called */ public boolean wasInvalidated() { return wasInvalidated; } /** * Clears the wasInvalidated flag */ public void clearWasInvalidated() { wasInvalidated = false; } /** * Utility method for clicking on views exposing testing scenarios that are not possible when using the actual app. * * @throws RuntimeException if the view is disabled or if the view or any of its parents are not visible. */ public boolean checkedPerformClick() { if (!realView.isShown()) { throw new RuntimeException("View is not visible and cannot be clicked"); } if (!realView.isEnabled()) { throw new RuntimeException("View is not enabled and cannot be clicked"); } return realView.performClick(); } /** * Non-android accessor. Returns touch listener, if set. */ public View.OnTouchListener getOnTouchListener() { return onTouchListener; } /** * Non-android accessor. Returns click listener, if set. */ public View.OnClickListener getOnClickListener() { return onClickListener; } /** * Non-android accessor. Returns long click listener, if set. */ public View.OnLongClickListener getOnLongClickListener() { return onLongClickListener; } /** * Non-android accessor. Returns long click listener, if set. */ public View.OnSystemUiVisibilityChangeListener getOnSystemUiVisibilityChangeListener() { return onSystemUiVisibilityChangeListener; } /** * Non-android accessor. Returns create ContextMenu listener, if set. */ public View.OnCreateContextMenuListener getOnCreateContextMenuListener() { return onCreateContextMenuListener; } @Implementation public Bitmap getDrawingCache() { return Robolectric.newInstanceOf(Bitmap.class); } @Implementation public void post(Runnable action) { Robolectric.getUiThreadScheduler().post(action); } @Implementation public void postDelayed(Runnable action, long delayMills) { Robolectric.getUiThreadScheduler().postDelayed(action, delayMills); } @Implementation public void postInvalidateDelayed(long delayMilliseconds) { Robolectric.getUiThreadScheduler().postDelayed(new Runnable() { @Override public void run() { realView.invalidate(); } }, delayMilliseconds); } @Implementation public void removeCallbacks(Runnable callback) { shadowOf(Looper.getMainLooper()).getScheduler().remove(callback); } @Implementation public void scrollTo(int x, int y) { try { Method method = View.class.getDeclaredMethod("onScrollChanged", new Class[]{int.class, int.class, int.class, int.class}); method.setAccessible(true); method.invoke(realView, x, y, scrollToCoordinates.x, scrollToCoordinates.y); } catch (Exception e) { throw new RuntimeException(e); } scrollToCoordinates = new Point(x, y); } @Implementation public int getScrollX() { return scrollToCoordinates != null ? scrollToCoordinates.x : 0; } @Implementation public int getScrollY() { return scrollToCoordinates != null ? scrollToCoordinates.y : 0; } @Implementation public void setScrollX(int scrollX) { scrollTo(scrollX, scrollToCoordinates.y); } @Implementation public void setScrollY(int scrollY) { scrollTo(scrollToCoordinates.x, scrollY); } @Implementation public void setScaleX(float scaleX) { this.scaleX = scaleX; } @Implementation public float getScaleX() { return scaleX; } @Implementation public void setScaleY(float scaleY) { this.scaleY = scaleY; } @Implementation public float getScaleY() { return scaleY; } @Implementation public void onAnimationEnd() { } /* * Non-Android accessor. */ public void finishedAnimation() { try { Method onAnimationEnd = realView.getClass().getDeclaredMethod("onAnimationEnd", new Class[0]); onAnimationEnd.setAccessible(true); onAnimationEnd.invoke(realView); } catch (Exception e) { throw new RuntimeException(e); } } public boolean isAttachedToWindow() { return ReflectionHelpers.getFieldReflectively(realView, "mAttachInfo") != null; } public void callOnAttachedToWindow() { invokeReflectively("onAttachedToWindow"); } public void callOnDetachedFromWindow() { invokeReflectively("onDetachedFromWindow"); } private void invokeReflectively(String methodName) { ReflectionHelpers.callInstanceMethodReflectively(realView, methodName); } @Implementation public boolean performHapticFeedback(int hapticFeedbackType) { hapticFeedbackPerformed = hapticFeedbackType; return true; } public int lastHapticFeedbackPerformed() { return hapticFeedbackPerformed; } public void setMyParent(ViewParent viewParent) { directlyOn(realView, View.class, "assignParent", new ReflectionHelpers.ClassParameter(ViewParent.class, viewParent)); } private View directly() { return directlyOn(realView, View.class); } }
package gov.nih.ncbi.jobs; import gov.nih.ncbi.data.TsvParser; import gov.nih.ncbi.solr.SolrDatabases; import gov.nih.ncbi.solr.SolrProvider; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; /** * * @author Lena Pons * NCBI Hackathon 2016 - Metadata Sorting group * * reads in BioSample data from tsv and populates the Annotations or AnnotationsDev solr core * * tsv file must include these fields * (=> indicates which solr fields are populated from these source data): * BioSampleId => id * Cell_Line, Sample_Name, Sample_Title, ExperimentTitle => sourceCellLine * Cell_Type => sourceCellType * Organism => sourceSpecies * Tissue => sourceAnatomy * Disease, Health_State, Phenotype => sourceDisease * Treatment => sourceCellTreatment * */ public class PopulateDrosoJob { public static void main(String[] args) { //filename of the tsv file used to populate the records String sourceFile = args[0]; //database name to access the proper Solr core String dbName = args [1]; SolrDatabases db = null; if (dbName.equalsIgnoreCase("annotations")) { db = SolrDatabases.Annotations; } else if (dbName.equalsIgnoreCase("AnnotationsDev")){ db= SolrDatabases.AnnotationsDev; } SolrClient solr = SolrProvider.getConnection(db); try { TsvParser parser = new TsvParser (sourceFile); //read header row parser.readHeaders(); int count = 0; int queueId = 0; int taxId = 7227; SolrQuery query = new SolrQuery("queueId:*").setSort("queueId", SolrQuery.ORDER.desc); QueryResponse response = solr.query(query); SolrDocumentList docs = response.getResults(); queueId = ((Integer)docs.get(0).get("queueId")); queueId++; while (parser.readRecord()) { SolrInputDocument doc = new SolrInputDocument (); doc.setField("queueId", queueId); doc.setField("taxId", taxId); try { if (!parser.get("BioSampleID").equals(".")){ try { doc.addField("id", Integer.parseInt(parser.get("BioSampleID"))); } catch (NumberFormatException nfe) { System.err.println("Number FormatException"); continue; } } //this populates the cell line field with the best possible clue try { if (!parser.get("Cell_Line").equals(".")) { doc.addField("sourceCellLine", parser.get("Cell_Line")); } else if (!parser.get("Sample_Name").equals(".")){ doc.addField("sourceCellLine", parser.get("Sample_Name")); } else if (! parser.get("Sample_Title").equals(".")){ doc.addField("sourceCellLine", parser.get("Sample_Title")); } else if (! parser.get("ExperimentTitle").equals(".")){ doc.addField("sourceCellLine", parser.get("ExperimentTitle")); } else { // System.err.println("No available data for this field"); doc.addField("sourceCellLine", "0"); } } catch (NullPointerException e) { doc.addField("sourceCellLine", "0"); } try { if (!parser.get("Sample_Name").equals(".")){ doc.addField("sampleName", parser.get("Sample_Name")); } else { doc.addField("sampleName", "0"); } } catch (NullPointerException e) { doc.addField("sampleName", "0"); } if (!parser.get("Sample_Title").equals(".")){ doc.addField("sampleTitle", parser.get("Sample_Title")); } else { doc.addField("sampleTitle", "0"); } if (!parser.get("Cell_Type").equals(".")){ doc.addField("sourceCellType", parser.get("Cell_Type")); } else { doc.addField("sourceCellType", "0"); } if (!parser.get("Organism").equals(".")){ doc.addField("sourceSpecies", parser.get("Organism")); } else { doc.addField("sourceSpecies", "0"); } if (!parser.get("Tissue").equals(".")){ doc.addField("sourceAnatomy", parser.get("Tissue")); } else { doc.addField("sourceAnatomy", "0"); } //construct disease blob String disease = ""; String healthState = ""; String phenotype = ""; if (!parser.get("Disease").equals(".")){ disease = parser.get("Disease"); } if (!parser.get("Health_State").equals(".")){ healthState = parser.get("Health_State"); } if (!parser.get("Phenotype").equals(".")){ phenotype = parser.get("Phenotype"); } StringBuilder build = new StringBuilder(); build.append(disease + " "); build.append(healthState + " "); build.append(phenotype); String trimmed = build.toString().trim(); if (!trimmed.equals("")){ doc.addField("sourceDisease", build.toString().trim()); } else{ doc.addField("sourceDisease", "0"); } // if (!parser.get("Treatment").equals(".")){ // doc.addField("sourceCellTreatment", parser.get("Treatment")); // } // else{ // doc.addField("sourceCellTreatment", "0"); // } doc.addField("sourceCellTreatment", "0"); doc.addField("sourceSex", "0"); doc.addField("sourceDevStage", "0"); //set all the annotation fields to default 0 doc.addField("annotCellLine", "0"); doc.addField("annotCellType", "0"); doc.addField("annotSpecies", "0"); doc.addField("annotAnatomy", "0"); doc.addField("annotCellTreatment", "0"); doc.addField("annotSex", "0"); doc.addField("annotDevStage", "0"); } catch (ArrayIndexOutOfBoundsException | NullPointerException e) { continue; } if (count < 1000) { count ++; } else { queueId++; count = 0; System.out.println ("queue " + queueId + " completed"); solr.commit(); } try { solr.add(doc); } catch (Exception e) { // e.printStackTrace(); System.err.println(parser.get("BioSampleId") + "Skipped"); continue; } } solr.commit(); parser.close(); } catch (Exception e) { e.printStackTrace(); } } }
package jMEF; import java.util.Arrays; import java.util.Locale; /** * @author Vincent Garcia * @author Frank Nielsen * @version 1.0 * * @section License * * See file LICENSE.txt * * @section Description * * A statistical distribution is parameterized by a set of values (parameters). * The PMatrix class implements a parameter object. * Parameters are represented as a matrix. */ public class PMatrix extends Parameter{ /** * Constant for serialization. */ private static final long serialVersionUID = 1L; /** * Dimension of the matrix. */ public int dim; /** * Array containing the values of the matrix. */ public double[][] array; /** * Class constructor. * @param dim dimension of the matrix */ public PMatrix(int dim){ this.dim = dim; this.array = new double[dim][dim]; } /** * Class constructor by copy. * @param M matrix to copy */ public PMatrix(PMatrix M){ this.dim = M.dim; this.array = new double[this.dim][this.dim]; for(int i=0; i<this.dim; i++) for(int j=0; j<this.dim; j++) this.array[i][j] = M.array[i][j]; } /** * Adds (not in place) the current matrix \f$ m_1 \f$ to the matrix \f$ m_2 \f$. * @param m2 matrix \f$ m_2 \f$ * @return \f$ m_1 + m_2 \f$ */ public PMatrix Plus(Parameter m2){ PMatrix Q = (PMatrix)m2; PMatrix result = new PMatrix(this.dim); for(int i=0; i<this.dim; i++) for(int j=0; j<this.dim; j++) result.array[i][j] = this.array[i][j] + Q.array[i][j]; return result; } /** * Subtracts (not in place) the matrix \f$ m_2 \f$ to the current matrix \f$ v_1 \f$. * @param m2 vector \f$ m_2 \f$ * @return \f$ m_1 - m_2 \f$ */ public PMatrix Minus(Parameter m2){ PMatrix Q = (PMatrix)m2; PMatrix result = new PMatrix(this.dim); for(int i=0; i<this.dim; i++) for(int j=0; j<this.dim; j++) result.array[i][j] = this.array[i][j] - Q.array[i][j]; return result; } /** * Multiplies (not in place) the current matrix \f$ m \f$ by a real number \f$ \lambda \f$. * @param lambda value \f$ \lambda \f$ * @return \f$ \lambda m\f$ */ public PMatrix Times(double lambda){ PMatrix result = new PMatrix(this.dim); for(int i=0; i<this.dim; i++) for(int j=0; j<this.dim; j++) result.array[i][j] = this.array[i][j] * lambda; return result; } /** * Computes the inner product (real number) between the current matrix \f$ m_1 \f$ and the matrix \f$ m_2 \f$. * @param m2 vector \f$ m_2 \f$ * @return \f$ tr(m_1 . m_2^\top) \f$ */ public double InnerProduct(Parameter m2){ PMatrix Q = (PMatrix)m2; return (this.Multiply(Q.Transpose())).Trace(); } /** * Multiplies (not in place) the current matrix \f$ v_1 \f$ by the matrix \f$ m_2 \f$. * @param m2 matrix \f$ m_2 \f$ * @return \f$ m_1 m_2\f$ */ public PMatrix Multiply(PMatrix m2){ PMatrix result = new PMatrix(this.dim); double sum; for(int i=0; i<this.dim; i++) for(int j=0; j<this.dim; j++){ sum = 0.0d; for(int k=0; k<this.dim; k++) sum += this.array[i][k] * m2.array[k][j]; result.array[i][j] = sum; } return result; } /** * Multiplies (not in place) the current matrix \f$ m \f$ by a vector \f$ v \f$. * @param v vector \f$ v \f$ * @return \f$ m . v\f$ */ public PVector MultiplyVectorRight(PVector v){ PVector result = new PVector(v.dim); double sum; for(int i=0; i<this.dim; i++){ sum = 0.0d; for(int j=0; j<this.dim; j++) sum += this.array[i][j]*v.array[j]; result.array[i] = sum; } return result; } /** * Computes the inverse of the current matrix \f$ m \f$ using Gauss-Jordan elimination. * @return \f$ m^{-1} \f$ */ public PMatrix Inverse(){ PMatrix result = new PMatrix(this); GaussJordan(result.array, this.dim); return result; } /** * Gauss-Jordan elimination. * @param a matrix to inverse * @param dim dimension of the matrix */ private static void GaussJordan( double a[][], int dim) { double det = 1.0d, big, save; int i,j,k,L; int[] ik = new int[dim]; int[] jk = new int[dim]; for (k=0; k<dim; k++){ big = 0.0d; for (i=k; i<dim; i++) for (j=k; j<dim; j++) // find biggest element if (Math.abs(big) <= Math.abs(a[i][j])){ big = a[i][j]; ik[k] = i; jk[k] = j; } if (big == 0.0) { // NOT INVERTIBLE!!! // Frank: Raise exception } i = ik[k]; if (i>k) for (j=0; j<dim; j++){ // exchange rows save = a[k][j]; a[k][j] = a[i][j]; a[i][j] = -save; } j = jk[k]; if (j>k) for (i=0; i<dim; i++){ save = a[i][k]; a[i][k] = a[i][j]; a[i][j] = -save; } for (i=0; i<dim; i++) // build the inverse if (i != k) a[i][k] = -a[i][k]/big; for (i=0; i<dim; i++) for (j=0; j<dim; j++) if ((i != k) && (j != k)) a[i][j] += a[i][k]*a[k][j]; for (j=0; j<dim; j++) if (j != k) a[k][j] /= big; a[k][k] = 1.0/big; det *= big; // bomb point } // end k loop for (L=0; L<dim; L++){ k = dim-L-1; j = ik[k]; if (j>k) for (i=0; i<dim; i++){ save = a[i][k]; a[i][k] = -a[i][j]; a[i][j] = save; } i = jk[k]; if (i>k) for (j=0; j<dim; j++){ save = a[k][j]; a[k][j] = -a[i][j]; a[i][j] = save; } } } /** * Transposes the current matrix \f$ m \f$. * @return \f$ m^\top \f$ */ public PMatrix Transpose(){ PMatrix T = new PMatrix(this.dim); for(int i=0; i<this.dim; i++) for(int j=0; j<this.dim; j++) T.array[i][j] = this.array[j][i]; return T; } /** * Computes the determinant of the current matrix \f$ m \f$. * @return \f$ \det (m)\f$ */ public double Determinant(){ double result = 0.0d; if (this.dim==1) return array[0][0]; PMatrix SubMatrix = new PMatrix(this.dim-1); for(int i=0; i<this.dim; i++){ for(int j=1; j<this.dim; j++){ for(int k=0; k<this.dim; k++){ if(k<i) SubMatrix.array[j-1][k] = array[j][k]; else if(k>i) SubMatrix.array[j-1][k-1] = array[j][k]; } } result += array[0][i] * Math.pow(-1, (double)i) * SubMatrix.Determinant(); } return result; } /** * Computes the trace of the current matrix \f$ m \f$. * @return \f$ tr (m)\f$ */ public double Trace(){ double tr = 0.0d; for(int i=0; i<this.dim; i++) tr += this.array[i][i]; return tr; } /** * Generates a random matrix \f$ m \f$ where each element is drawn from \f$ \mathcal{U}(0,1)\f$. * @param dim dimension of the matrix * @return random matrix \f$ m \f$ */ public static PMatrix Random(int dim){ PMatrix m = new PMatrix(dim); for(int i=0; i<dim; i++) for(int j=0; j<dim; j++) m.array[i][j]=Math.random(); return m; } /** * Generates a random matrix \f$ m \f$ such as \f$ m \f$ is a positive definite matrix: * Draw a lower triangular matrix \f$ L \f$ at random and then return \f$ LL^T\f$. * @param dim dimension of the matrix * @return random matrix \f$ m = L L^T\f$ */ public static PMatrix RandomPositiveDefinite(int dim){ PMatrix L = new PMatrix(dim); for(int i=0; i<dim; i++) for(int j=0; j<dim; j++){ if (j>=i) L.array[i][j] = Math.random(); else L.array[i][j] = 0.0; } return L.Multiply(L.Transpose()); } /** * Computes the Cholesky decomposition of the current matrix \f$ m \f$. * @return a lower triangular matrix */ public PMatrix Cholesky(){ PMatrix L = new PMatrix(this.dim); for (int i=0; i<dim; i++) { for (int j=0; j<=i; j++) { double sum = 0.0d; for (int k=0; k<j; k++) sum += L.array[i][k] * L.array[j][k]; if (i==j) L.array[i][i] = Math.sqrt(this.array[i][i] - sum); else L.array[i][j] = (this.array[i][j] - sum) / L.array[j][j]; //L.array[i][j] = 1.0d / L.array[j][j] * (this.array[i][j] - sum); } if (L.array[i][i] <= 0.0d) throw new RuntimeException("MEF|Matrix is not positive definite!"); } return L; } /** * Verifies if two matrices \f$ m_1 \f$ and \f$ m_2 \f$ are similar. * @param m1 matrix \f$ m_1 \f$ * @param m2 matrix \f$ m_2 \f$ * @return true if \f$ m_1 = m_2 \f$, false otherwise */ public static boolean equals(PMatrix m1, PMatrix m2){ for (int i=0; i<m1.dim; i++){ if (!Arrays.equals(m1.array[i], m2.array[i])){ return false; } } return true; } /** * Method toString. * @return value of the matrix as a string */ public String toString(){ String output=""; for(int i=0; i<this.dim; i++){ output += "| "; for(int j=0; j<this.dim; j++) output += String.format(Locale.ENGLISH, "%13.6f ", array[i][j]); output += "|\n"; } return output;// += "]"; } /** * Creates and returns a copy of the instance. * @return a clone of the instance. */ public Parameter clone() { PMatrix param = new PMatrix(this.dim); param.type = this.type; param.array = this.array.clone(); return param; } /** * Returns matrix's dimension. * @return matrix's dimension. */ public int getDimension(){ return this.dim; } }
package com.google.api.ads.dfp.jaxws.v201311; import java.util.List; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; /** * * Provides methods for the creation and management of creative wrappers. * {@link CreativeWrapper CreativeWrappers} allow HTML snippets to be * served along with creatives. * <p> * Creative wrappers must be associated with a * {@link LabelType#CREATIVE_WRAPPER} label and applied to ad units by * {@link AdUnit#appliedLabels}. * * * This class was generated by the JAX-WS RI. * JAX-WS RI 2.2.4-b01 * Generated source version: 2.1 * */ @WebService(name = "CreativeWrapperServiceInterface", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @XmlSeeAlso({ ObjectFactory.class }) public interface CreativeWrapperServiceInterface { /** * * Creates a new {@code CreativeWrapper}. * * The following fields are required: * <ul> * <li>{@link CreativeWrapper#labelId}</li> * <li>{@link CreativeWrapper#ordering}</li> * <li>{@link CreativeWrapper#header} or {@link CreativeWrapper#footer}</li> * </ul> * * @param creativeWrapper the creative wrapper to create * @return the creative wrapper with its ID filled in * @throws ApiException * * * @param creativeWrapper * @return * returns com.google.api.ads.dfp.jaxws.v201311.CreativeWrapper * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "createCreativeWrapper", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacecreateCreativeWrapper") @ResponseWrapper(localName = "createCreativeWrapperResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacecreateCreativeWrapperResponse") public CreativeWrapper createCreativeWrapper( @WebParam(name = "creativeWrapper", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") CreativeWrapper creativeWrapper) throws ApiException_Exception ; /** * * Creates a new {@code CreativeWrapper} objects. * * The following fields are required: * <ul> * <li>{@link CreativeWrapper#labelId}</li> * <li>{@link CreativeWrapper#ordering}</li> * <li>{@link CreativeWrapper#header} or {@link CreativeWrapper#footer}</li> * </ul> * * @param creativeWrappers the creative wrappers to create * @return the creative wrappers with their IDs filled in * @throws ApiException * * * @param creativeWrappers * @return * returns java.util.List<com.google.api.ads.dfp.jaxws.v201311.CreativeWrapper> * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "createCreativeWrappers", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacecreateCreativeWrappers") @ResponseWrapper(localName = "createCreativeWrappersResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacecreateCreativeWrappersResponse") public List<CreativeWrapper> createCreativeWrappers( @WebParam(name = "creativeWrappers", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") List<CreativeWrapper> creativeWrappers) throws ApiException_Exception ; /** * * Returns the {@link CreativeWrapper} uniquely identified by the given * ID. * * @param creativeWrapperId the ID of the creative wrapper, which * must already exist * @return the {@code CreativeWrapper} uniquely identified by the given * ID * @throws ApiException * * * @param creativeWrapperId * @return * returns com.google.api.ads.dfp.jaxws.v201311.CreativeWrapper * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "getCreativeWrapper", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacegetCreativeWrapper") @ResponseWrapper(localName = "getCreativeWrapperResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacegetCreativeWrapperResponse") public CreativeWrapper getCreativeWrapper( @WebParam(name = "creativeWrapperId", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") Long creativeWrapperId) throws ApiException_Exception ; /** * * Gets a {@link CreativeWrapperPage} of {@link CreativeWrapper} * objects that satisfy the given {@link Statement#query}. The following * fields are supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> <th scope="col">Object Property</th> * </tr> * <tr> * <td>{@code id}</td> * <td>{@link CreativeWrapper#id}</td> * </tr> * <tr> * <td>{@code labelId}</td> * <td>{@link CreativeWrapper#labelId}</td> * </tr> * <tr> * <td>{@code status}</td> * <td>{@link CreativeWrapper#status}</td> * </tr> * <tr> * <td>{@code ordering}</td> * <td>{@link CreativeWrapper#ordering}</td> * </tr> * </table> * * @param filterStatement a Publisher Query Language statement used to filter * a set of creative wrappers. * @return the creative wrappers that match the given filter * * * @param filterStatement * @return * returns com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperPage * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "getCreativeWrappersByStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacegetCreativeWrappersByStatement") @ResponseWrapper(localName = "getCreativeWrappersByStatementResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfacegetCreativeWrappersByStatementResponse") public CreativeWrapperPage getCreativeWrappersByStatement( @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") Statement filterStatement) throws ApiException_Exception ; /** * * Performs actions on {@link CreativeWrapper} objects that match the * given {@link Statement#query}. * * @param creativeWrapperAction the action to perform * @param filterStatement a Publisher Query Language statement used to filter * a set of labels * @return the result of the action performed * * * @param creativeWrapperAction * @param filterStatement * @return * returns com.google.api.ads.dfp.jaxws.v201311.UpdateResult * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "performCreativeWrapperAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfaceperformCreativeWrapperAction") @ResponseWrapper(localName = "performCreativeWrapperActionResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfaceperformCreativeWrapperActionResponse") public UpdateResult performCreativeWrapperAction( @WebParam(name = "creativeWrapperAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") CreativeWrapperAction creativeWrapperAction, @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") Statement filterStatement) throws ApiException_Exception ; /** * * Updates the specified {@code CreativeWrapper}. * * @param creativeWrapper the creative wrapper to update * @return the updated creative wrapper * @throws ApiException * * * @param creativeWrapper * @return * returns com.google.api.ads.dfp.jaxws.v201311.CreativeWrapper * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "updateCreativeWrapper", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfaceupdateCreativeWrapper") @ResponseWrapper(localName = "updateCreativeWrapperResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfaceupdateCreativeWrapperResponse") public CreativeWrapper updateCreativeWrapper( @WebParam(name = "creativeWrapper", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") CreativeWrapper creativeWrapper) throws ApiException_Exception ; /** * * Updates the specified {@code CreativeWrapper} objects. * * @param creativeWrappers the creative wrappers to update * @return the updated creative wrapper objects * @throws ApiException * * * @param creativeWrappers * @return * returns java.util.List<com.google.api.ads.dfp.jaxws.v201311.CreativeWrapper> * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") @RequestWrapper(localName = "updateCreativeWrappers", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfaceupdateCreativeWrappers") @ResponseWrapper(localName = "updateCreativeWrappersResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311", className = "com.google.api.ads.dfp.jaxws.v201311.CreativeWrapperServiceInterfaceupdateCreativeWrappersResponse") public List<CreativeWrapper> updateCreativeWrappers( @WebParam(name = "creativeWrappers", targetNamespace = "https://www.google.com/apis/ads/publisher/v201311") List<CreativeWrapper> creativeWrappers) throws ApiException_Exception ; }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.classfinder; import java.io.File; import java.lang.reflect.Modifier; import java.util.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import ghidra.util.Msg; import ghidra.util.SystemUtilities; import ghidra.util.exception.CancelledException; import ghidra.util.task.TaskMonitor; import utility.module.ModuleUtilities; /** * Finds extension classes in the classpath */ public class ClassFinder { static final Logger log = LogManager.getLogger(ClassFinder.class); private static List<Class<?>> FILTER_CLASSES = Collections.unmodifiableList(Arrays.asList(ExtensionPoint.class)); private Set<ClassDir> classDirs = new HashSet<>(); private Set<ClassJar> classJars = new HashSet<>(); public ClassFinder(List<String> searchPaths, TaskMonitor monitor) throws CancelledException { initialize(searchPaths, monitor); } private void initialize(List<String> searchPaths, TaskMonitor monitor) throws CancelledException { Set<String> pathSet = new LinkedHashSet<>(searchPaths); Iterator<String> pathIterator = pathSet.iterator(); while (pathIterator.hasNext()) { monitor.checkCanceled(); String path = pathIterator.next(); String lcPath = path.toLowerCase(); File file = new File(path); if ((lcPath.endsWith(".jar") || lcPath.endsWith(".zip")) && file.exists()) { if (ClassJar.ignoreJar(lcPath)) { log.trace("Ignoring jar file: {}", path); continue; } log.trace("Searching jar file: {}", path); classJars.add(new ClassJar(path, monitor)); } else if (file.isDirectory()) { log.trace("Searching classpath directory: {}", path); classDirs.add(new ClassDir(path, monitor)); } } } List<Class<?>> getClasses(TaskMonitor monitor) throws CancelledException { Set<Class<?>> classSet = new HashSet<>(); for (ClassDir dir : classDirs) { monitor.checkCanceled(); dir.getClasses(classSet, monitor); } for (ClassJar jar : classJars) { monitor.checkCanceled(); jar.getClasses(classSet, monitor); } List<Class<?>> classList = new ArrayList<>(classSet); Collections.sort(classList, (c1, c2) -> { // Sort classes primarily by priority and secondarily by name int p1 = ExtensionPointProperties.Util.getPriority(c1); int p2 = ExtensionPointProperties.Util.getPriority(c2); if (p1 > p2) { return -1; } if (p1 < p2) { return 1; } String n1 = c1.getName(); String n2 = c2.getName(); if (n1.equals(n2)) { // Same priority and same package/class name....just arbitrarily choose one return Integer.compare(c1.hashCode(), c2.hashCode()); } return n1.compareTo(n2); }); return classList; } /*package*/ static Class<?> loadExtensionPoint(String path, String fullName) { if (!ClassSearcher.isExtensionPointName(fullName)) { return null; } ClassLoader classLoader = ClassSearcher.class.getClassLoader(); try { Class<?> c = Class.forName(fullName, true, classLoader); if (isClassOfInterest(c)) { return c; } } catch (Throwable t) { processClassLoadError(path, fullName, t); } return null; } private static void processClassLoadError(String path, String name, Throwable t) { if (t instanceof LinkageError) { // We see this sometimes when loading classes that match our naming convention for // extension points, but are actually extending 3rd party libraries. For now, do // not make noise in the log for this case. Msg.trace(ClassFinder.class, "LinkageError loading class " + name + "; Incompatible class version? ", t); return; } if (!(t instanceof ClassNotFoundException)) { Msg.error(ClassFinder.class, "Error loading class " + name + " - " + t.getMessage(), t); return; } processClassNotFoundExcepetion(path, name, (ClassNotFoundException) t); } private static void processClassNotFoundExcepetion(String path, String name, ClassNotFoundException t) { if (!isModuleEntryMissingFromClasspath(path)) { // not sure if this can actually happen--it implies a half-built Eclipse issue Msg.error(ClassFinder.class, "Error loading class " + name + " - " + t.getMessage(), t); return; } // We have a special case: we know a module class was loaded, but it is not in our // classpath. This can happen in Eclipse when we scan all modules, but the launcher does // not include all modules. if (SystemUtilities.isInTestingMode()) { // ignore the error in testing mode, as many modules are not loaded for any given test return; } Msg.error(ClassFinder.class, "Module class is missing from the classpath.\n\tUpdate your launcher " + "accordingly.\n\tModule: '" + path + "'\n\tClass: '" + name + "'"); } private static boolean isModuleEntryMissingFromClasspath(String path) { boolean inModule = ModuleUtilities.isInModule(path); if (!inModule) { return false; } String classPath = System.getProperty("java.class.path"); boolean inClassPath = classPath.contains(path); return !inClassPath; } /** * Checks to see if the given class is an extension point of interest. * * @param c The class to check. * @return True if the given class is an extension point of interest; otherwise, false. */ public static boolean isClassOfInterest(Class<?> c) { if (Modifier.isAbstract(c.getModifiers())) { return false; } if (c.getEnclosingClass() != null && !Modifier.isStatic(c.getModifiers())) { return false; } if (!Modifier.isPublic(c.getModifiers())) { return false; } if (ExtensionPointProperties.Util.isExcluded(c)) { return false; } for (Class<?> filterClasse : FILTER_CLASSES) { if (filterClasse.isAssignableFrom(c)) { return true; } } return false; } }
/* * Copyright 2002-2005 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.jtrac.util; import info.jtrac.Jtrac; import info.jtrac.domain.Attachment; import info.jtrac.domain.Field; import info.jtrac.domain.History; import info.jtrac.domain.Item; import info.jtrac.domain.ItemItem; import info.jtrac.domain.ItemSearch; import info.jtrac.domain.Space; import info.jtrac.domain.User; import info.jtrac.exception.JtracSecurityException; import java.io.BufferedReader; import java.io.StringReader; import java.io.Writer; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.wicket.PageParameters; import org.dom4j.Element; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.MessageSource; import org.springframework.web.servlet.support.RequestContextUtils; import org.springframework.web.util.HtmlUtils; /** * Utilities to convert an Item into HTML etc. * The getAsHtml() routine is used to diplay an item - within a tag lib for JSP * And we are able to re-use this to send HTML e-mail etc. */ public final class ItemUtils { private static final Logger logger = LoggerFactory.getLogger(ItemUtils.class); /** * does HTML escaping, converts tabs to spaces and converts leading * spaces (for each multi-line) to as many '&nbsp;' sequences as required */ public static String fixWhiteSpace(String text) { if(text == null) { return ""; } String temp = HtmlUtils.htmlEscape(text); BufferedReader reader = new BufferedReader(new StringReader(temp)); StringBuilder sb = new StringBuilder(); String s; boolean first = true; try { while((s = reader.readLine()) != null) { if(first) { first = false; } else { sb.append("<br/>"); } if(s.startsWith(" ")) { int i; for(i = 0; i < s.length(); i++) { if(s.charAt(i) == ' ') { sb.append("&nbsp;"); } else { break; } } s = s.substring(i); } sb.append(s); } } catch(Exception e) { throw new RuntimeException(e); } return sb.toString().replaceAll("\t", "&nbsp;&nbsp;&nbsp;&nbsp;"); } private static String fmt(String key, MessageSource messageSource, Locale locale) { try { return messageSource.getMessage("item_view." + key, null, locale); } catch (Exception e) { return "???item_view." + key + "???"; } } public static String getAsHtml(Item item, MessageSource messageSource, Locale locale) { return getAsHtml(item, null, null, messageSource, locale); } public static String getAsHtml(Item item, HttpServletRequest request, HttpServletResponse response) { Locale locale = RequestContextUtils.getLocale(request); MessageSource messageSource = RequestContextUtils.getWebApplicationContext(request); return getAsHtml(item, request, response, messageSource, locale); } private static String getAsHtml(Item item, HttpServletRequest request, HttpServletResponse response, MessageSource ms, Locale loc) { boolean isWeb = request != null && response != null; String tableStyle = " class='jtrac'"; String tdStyle = ""; String thStyle = ""; String altStyle = " class='alt'"; String labelStyle = " class='label'"; if (!isWeb) { // inline CSS so that HTML mail works across most mail-reader clients String tdCommonStyle = "border: 1px solid black"; tableStyle = " class='jtrac' style='border-collapse: collapse; font-family: Arial; font-size: 75%'"; tdStyle = " style='" + tdCommonStyle + "'"; thStyle = " style='" + tdCommonStyle + "; background: #CCCCCC'"; altStyle = " style='background: #e1ecfe'"; labelStyle = " style='" + tdCommonStyle + "; background: #CCCCCC; font-weight: bold; text-align: right'"; } StringBuffer sb = new StringBuffer(); sb.append("<table width='100%'" + tableStyle + ">"); sb.append("<tr" + altStyle + ">"); sb.append(" <td" + labelStyle + ">" + fmt("id", ms, loc) + "</td>"); sb.append(" <td" + tdStyle + ">" + item.getRefId() + "</td>"); sb.append(" <td" + labelStyle + ">" + fmt("relatedItems", ms, loc) + "</td>"); sb.append(" <td colspan='3'" + tdStyle + ">"); if (item.getRelatedItems() != null || item.getRelatingItems() != null) { String flowUrlParam = null; String flowUrl = null; if (isWeb) { flowUrlParam = "_flowExecutionKey=" + request.getAttribute("flowExecutionKey"); flowUrl = "/flow?" + flowUrlParam; } if (item.getRelatedItems() != null) { // ItemViewForm itemViewForm = null; if (isWeb) { // itemViewForm = (ItemViewForm) request.getAttribute("itemViewForm"); sb.append("<input type='hidden' name='_removeRelated'/>"); } for(ItemItem itemItem : item.getRelatedItems()) { String refId = itemItem.getRelatedItem().getRefId(); if (isWeb) { String checked = ""; Set<Long> set = null; // itemViewForm.getRemoveRelated(); if (set != null && set.contains(itemItem.getId())) { checked = " checked='true'"; } String url = flowUrl + "&_eventId=viewRelated&itemId=" + itemItem.getRelatedItem().getId(); refId = "<a href='" + response.encodeURL(request.getContextPath() + url) + "'>" + refId + "</a>" + "<input type='checkbox' name='removeRelated' value='" + itemItem.getId() + "' title='" + fmt("remove", ms, loc) + "'" + checked + "/>"; } sb.append(fmt(itemItem.getRelationText(), ms, loc) + " " + refId + " "); } } if (item.getRelatingItems() != null) { for(ItemItem itemItem : item.getRelatingItems()) { String refId = itemItem.getItem().getRefId(); if (isWeb) { String url = flowUrl + "&_eventId=viewRelated&itemId=" + itemItem.getItem().getId(); refId = "<a href='" + response.encodeURL(request.getContextPath() + url) + "'>" + refId + "</a>"; } sb.append(refId + " " + fmt(itemItem.getRelationText() + "This", ms, loc) + ". "); } } } sb.append(" </td>"); sb.append("</tr>"); sb.append("<tr>"); sb.append(" <td width='15%'" + labelStyle + ">" + fmt("status", ms, loc) + "</td>"); sb.append(" <td" + tdStyle + ">" + item.getStatusValue() + "</td>"); sb.append(" <td" + labelStyle + ">" + fmt("loggedBy", ms, loc) + "</td>"); sb.append(" <td" + tdStyle + ">" + item.getLoggedBy().getName() + "</td>"); sb.append(" <td" + labelStyle + ">" + fmt("assignedTo", ms, loc) + "</td>"); sb.append(" <td width='15%'" + tdStyle + ">" + (item.getAssignedTo() == null ? "" : item.getAssignedTo().getName()) + "</td>"); sb.append("</tr>"); sb.append("<tr" + altStyle + ">"); sb.append(" <td" + labelStyle + ">" + fmt("summary", ms, loc) + "</td>"); sb.append(" <td colspan='5'" + tdStyle + ">" + HtmlUtils.htmlEscape(item.getSummary()) + "</td>"); sb.append("</tr>"); sb.append("<tr>"); sb.append(" <td valign='top'" + labelStyle + ">" + fmt("detail", ms, loc) + "</td>"); sb.append(" <td colspan='5'" + tdStyle + ">" + fixWhiteSpace(item.getDetail()) + "</td>"); sb.append("</tr>"); int row = 0; Map<Field.Name, Field> fields = item.getSpace().getMetadata().getFields(); for(Field.Name fieldName : item.getSpace().getMetadata().getFieldOrder()) { Field field = fields.get(fieldName); sb.append("<tr" + (row % 2 == 0 ? altStyle : "") + ">"); sb.append(" <td" + labelStyle + ">" + field.getLabel() + "</td>"); sb.append(" <td colspan='5'" + tdStyle + ">" + item.getCustomValue(fieldName) + "</td>"); sb.append("</tr>"); row++; } sb.append("</table>"); //=========================== HISTORY ================================== sb.append("<br/>&nbsp;<b" + tableStyle + ">" + fmt("history", ms, loc) + "</b>"); sb.append("<table width='100%'" + tableStyle + ">"); sb.append("<tr>"); sb.append(" <th" + thStyle + ">" + fmt("loggedBy", ms, loc) + "</th><th" + thStyle + ">" + fmt("status", ms, loc) + "</th>" + "<th" + thStyle + ">" + fmt("assignedTo", ms, loc) + "</th><th" + thStyle + ">" + fmt("comment", ms, loc) + "</th><th" + thStyle + ">" + fmt("timeStamp", ms, loc) + "</th>"); List<Field> editable = item.getSpace().getMetadata().getEditableFields(); for(Field field : editable) { sb.append("<th" + thStyle + ">" + field.getLabel() + "</th>"); } sb.append("</tr>"); if (item.getHistory() != null) { row = 1; for(History history : item.getHistory()) { sb.append("<tr valign='top'" + (row % 2 == 0 ? altStyle : "") + ">"); sb.append(" <td" + tdStyle + ">" + history.getLoggedBy().getName() + "</td>"); sb.append(" <td" + tdStyle + ">" + history.getStatusValue() +"</td>"); sb.append(" <td" + tdStyle + ">" + (history.getAssignedTo() == null ? "" : history.getAssignedTo().getName()) + "</td>"); sb.append(" <td" + tdStyle + ">"); Attachment attachment = history.getAttachment(); if (attachment != null) { if (request != null && response != null) { String href = response.encodeURL(request.getContextPath() + "/app/attachments/" + attachment.getFileName() +"?filePrefix=" + attachment.getFilePrefix()); sb.append("<a target='_blank' href='" + href + "'>" + attachment.getFileName() + "</a>&nbsp;"); } else { sb.append("(attachment:&nbsp;" + attachment.getFileName() + ")&nbsp;"); } } sb.append(fixWhiteSpace(history.getComment())); sb.append(" </td>"); sb.append(" <td" + tdStyle + ">" + history.getTimeStamp() + "</td>"); for(Field field : editable) { sb.append("<td" + tdStyle + ">" + history.getCustomValue(field.getName()) + "</td>"); } sb.append("</tr>"); row++; } } sb.append("</table>"); return sb.toString(); } public static void writeAsXml(Jtrac jtrac, Writer writer) { final int batchSize = 500; int totalSize = jtrac.loadCountOfAllItems(); logger.info("total count: " + totalSize); int firstResult = 0; int currentItem = 0; try { while(true) { logger.info("processing batch starting from: " + firstResult + ", current: " + currentItem); List<Item> items = jtrac.findAllItems(firstResult, batchSize); for (Item item : items) { getAsXml(item).write(writer); currentItem++; } logger.debug("size of current batch: " + items.size()); firstResult += batchSize; if(currentItem >= totalSize || firstResult > totalSize) { logger.info("batch completed at position: " + currentItem); writer.flush(); break; } } } catch(Exception e) { throw new RuntimeException(e); } } public static void writeAsXml(ItemSearch itemSearch, Jtrac jtrac, Writer writer) { final int batchSize = 500; int originalPageSize = itemSearch.getPageSize(); int originalCurrentPage = itemSearch.getCurrentPage(); // get the total count first itemSearch.setPageSize(0); itemSearch.setCurrentPage(0); jtrac.findItems(itemSearch); long totalSize = itemSearch.getResultCount(); logger.debug("total count: " + totalSize); itemSearch.setBatchMode(true); itemSearch.setPageSize(batchSize); try { writer.write("<items>"); int currentPage = 0; int currentItem = 0; while(true) { logger.debug("processing batch starting from page: " + currentPage); itemSearch.setCurrentPage(currentPage); List<Item> items = jtrac.findItems(itemSearch); for(Item item : items) { getAsXml(item).write(writer); currentItem++; } logger.debug("size of current batch: " + items.size()); if(currentItem >= totalSize) { logger.info("batch completed at position: " + currentItem); break; } else { currentPage++; } } writer.write("</items>"); writer.flush(); } catch(Exception e) { throw new RuntimeException(e); } finally { itemSearch.setPageSize(originalPageSize); itemSearch.setCurrentPage(originalCurrentPage); itemSearch.setBatchMode(false); } } public static Element getAsXml(Item item) { // root Element root = XmlUtils.getNewElement("item"); root.addAttribute("refId", item.getRefId()); // related items if (item.getRelatedItems() != null && item.getRelatedItems().size() > 0) { Element relatedItems = root.addElement("relatedItems"); for(ItemItem itemItem : item.getRelatedItems()) { Element relatedItem = relatedItems.addElement("relatedItem"); relatedItem.addAttribute("refId", itemItem.getItem().getRefId()); relatedItem.addAttribute("linkType", itemItem.getRelationText()); } } // relating items if (item.getRelatingItems() != null && item.getRelatingItems().size() > 0) { Element relatingItems = root.addElement("relatingItems"); for(ItemItem itemItem : item.getRelatingItems()) { Element relatingItem = relatingItems.addElement("relatingItem"); relatingItem.addAttribute("refId", itemItem.getItem().getRefId()); relatingItem.addAttribute("linkType", itemItem.getRelationText()); } } // summary if (item.getSummary() != null) { root.addElement("summary").addText(item.getSummary()); } // detail if (item.getDetail() != null) { root.addElement("detail").addText(item.getDetail()); } // logged by Element loggedBy = root.addElement("loggedBy"); // loggedBy.addAttribute("userId", item.getLoggedBy().getId() + ""); loggedBy.addText(item.getLoggedBy().getName()); // assigned to if (item.getAssignedTo() != null) { Element assignedTo = root.addElement("assignedTo"); // assignedTo.addAttribute("userId", item.getAssignedTo().getId() + ""); assignedTo.addText(item.getAssignedTo().getName()); } // status Element status = root.addElement("status"); status.addAttribute("statusId", item.getStatus() + ""); status.addText(item.getStatusValue()); // custom fields Map<Field.Name, Field> fields = item.getSpace().getMetadata().getFields(); for(Field.Name fieldName : item.getSpace().getMetadata().getFieldOrder()) { Object value = item.getValue(fieldName); if(value != null) { Field field = fields.get(fieldName); Element customField = root.addElement(fieldName.getText()); customField.addAttribute("label", field.getLabel()); if(field.isDropDownType()) { customField.addAttribute("optionId", value + ""); } customField.addText(item.getCustomValue(fieldName)); } } // timestamp Element timestamp = root.addElement("timestamp"); timestamp.addText(DateUtils.formatTimeStamp(item.getTimeStamp())); // history if (item.getHistory() != null) { Element historyRoot = root.addElement("history"); for(History history : item.getHistory()) { Element event = historyRoot.addElement("event"); // index event.addAttribute("eventId", (history.getIndex() + 1) + ""); // logged by Element historyLoggedBy = event.addElement("loggedBy"); // historyLoggedBy.addAttribute("userId", history.getLoggedBy().getId() + ""); historyLoggedBy.addText(history.getLoggedBy().getName()); // status if(history.getStatus() != null) { Element historyStatus = event.addElement("status"); historyStatus.addAttribute("statusId", history.getStatus() + ""); historyStatus.addText(history.getStatusValue()); } // assigned to if(history.getAssignedTo() != null) { Element historyAssignedTo = event.addElement("assignedTo"); // historyAssignedTo.addAttribute("userId", history.getAssignedTo().getId() + ""); historyAssignedTo.addText(history.getAssignedTo().getName()); } // attachment if(history.getAttachment() != null) { Element historyAttachment = event.addElement("attachment"); historyAttachment.addAttribute("attachmentId", history.getAttachment().getId() + ""); historyAttachment.addText(history.getAttachment().getFileName()); } // comment if(history.getComment() != null) { Element historyComment = event.addElement("comment"); historyComment.addText(history.getComment()); } // timestamp Element historyTimestamp = event.addElement("timestamp"); historyTimestamp.addText(DateUtils.formatTimeStamp(history.getTimeStamp())); // custom fields List<Field> editable = item.getSpace().getMetadata().getEditableFields(); for(Field field : editable) { Object value = history.getValue(field.getName()); if(value != null) { Element historyCustomField = event.addElement(field.getName().getText()); historyCustomField.addAttribute("label", field.getLabel()); if(field.isDropDownType()) { historyCustomField.addAttribute("optionId", value + ""); } historyCustomField.addText(history.getCustomValue(field.getName())); } } } } return root; } public static ItemSearch getItemSearch(User user, PageParameters params, Jtrac jtrac) throws JtracSecurityException { long spaceId = params.getLong("s", -1); ItemSearch itemSearch = null; if(spaceId > 0) { Space space = jtrac.loadSpace(spaceId); if(!user.isAllocatedToSpace(space.getId())) { throw new JtracSecurityException("User not allocated to space: " + spaceId + " in URL: " + params); } itemSearch = new ItemSearch(space); } else { itemSearch = new ItemSearch(user); } itemSearch.initFromPageParameters(params, user, jtrac); return itemSearch; } }
/* Copyright 2014 Immutables Authors and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.immutables.value.processor.meta; import com.google.common.base.CharMatcher; import com.google.common.base.Joiner; import com.google.common.collect.HashMultiset; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multiset; import java.util.Collection; import java.util.List; import javax.annotation.Nullable; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.util.ElementFilter; import org.immutables.value.processor.meta.Proto.Protoclass; /** * It may grow later in some better abstraction, but as it stands now, currently it is * just a glue between new "protoclass" model and old discovery routines. */ public final class ValueTypeComposer { private static final CharMatcher ATTRIBUTE_NAME_CHARS = CharMatcher.is('_') .or(CharMatcher.inRange('a', 'z')) .or(CharMatcher.inRange('A', 'Z')) .or(CharMatcher.inRange('0', '9')).precomputed(); private final ProcessingEnvironment processing; private final Round round; @Nullable private final String typeMoreObjects; ValueTypeComposer(Round round) { this.round = round; this.processing = round.processing(); this.typeMoreObjects = inferTypeMoreObjects(); } /** * @return current Guava's MoreObjects or {@code null} if no Guava available on the classpath. */ @Nullable String inferTypeMoreObjects() { String typeMoreObjects = UnshadeGuava.typeString("base.MoreObjects"); String typeObjects = UnshadeGuava.typeString("base.Objects"); if (isValidElementFound(typeMoreObjects)) { return typeMoreObjects; } if (isValidElementFound(typeMoreObjects)) { return typeObjects; } return null; } private boolean isValidElementFound(String typeName) { try { @Nullable TypeElement typeElement = processing.getElementUtils().getTypeElement(typeName); return typeElement != null && typeElement.asType().getKind() != TypeKind.ERROR; } catch (Exception e) { // type loading problem return false; } } ValueType compose(Protoclass protoclass) { ValueType type = new ValueType(); type.round = round; type.typeMoreObjects = typeMoreObjects; type.element = protoclass.sourceElement(); type.immutableFeatures = protoclass.features(); type.constitution = protoclass.constitution(); if (protoclass.kind().isFactory()) { new FactoryMethodAttributesCollector(protoclass, type).collect(); } else if (protoclass.kind().isValue()) { Collection<String> violations = Lists.newArrayList(); // This check is legacy, most such checks should have been done on a higher level? if (checkAbstractValueType(type.element, violations)) { checkForMutableFields(protoclass, (TypeElement) type.element); checkForTypeHierarchy(protoclass, type); new AccessorAttributesCollector(protoclass, type).collect(); } else { protoclass.report() .error("Value type '%s' %s", protoclass.sourceElement().getSimpleName(), Joiner.on(", ").join(violations)); // Do nothing now. kind of way to less blow things up when it happens. } } checkAttributeNamesIllegalCharacters(type); checkAttributeNamesForDuplicates(type, protoclass); checkConstructability(type); return type; } private void checkAttributeNamesIllegalCharacters(ValueType type) { for (ValueAttribute a : type.attributes) { if (!ATTRIBUTE_NAME_CHARS.matchesAllOf(a.name())) { a.report() .error("Name '%s' contains some unsupported or reserved characters, please use only A-Z, a-z, 0-9 and _ chars", a.name()); } } } private void checkConstructability(ValueType type) { if (!type.isUseBuilder() || type.isUseConstructor()) { for (ValueAttribute a : type.getConstructorExcluded()) { if (a.isMandatory()) { a.report() .error("Attribute '%s' is mandatory and should be a constructor" + " @Value.Parameter when builder is disabled or" + " there are other constructor parameters", a.name()); } } } if (!type.isUseBuilder() && !type.isUseCopyMethods()) { for (ValueAttribute a : type.getConstructorExcluded()) { if (!a.isMandatory()) { a.report() .warning("There is no way to initialize '%s' attribute to non-default value." + " Enable builder=true or copy=true or add it as a constructor @Value.Parameter", a.name()); } } } if (type.isUseSingleton() && !type.getMandatoryAttributes().isEmpty()) { for (ValueAttribute a : type.getMandatoryAttributes()) { if (a.isMandatory()) { a.report() .error("Attribute '%s' is mandatory and cannot be used with singleton enabled." + " Singleton instance require all attributes to have default value, otherwise" + " default instance could not be created", a.name()); } } } } private void checkForTypeHierarchy(Protoclass protoclass, ValueType type) { scanAndReportInvalidInheritance(protoclass, type.element, type.extendedClasses()); scanAndReportInvalidInheritance(protoclass, type.element, type.implementedInterfaces()); } private static void scanAndReportInvalidInheritance( Protoclass protoclass, Element element, Iterable<DeclaredType> supertypes) { for (TypeElement supertype : Iterables.transform(supertypes, Proto.DeclatedTypeToElement.FUNCTION)) { if (!CachingElements.equals(element, supertype) && ImmutableMirror.isPresent(supertype)) { protoclass.report() .error("Should not inherit %s which is a value type itself." + " Avoid extending from another abstract value type." + " Better to share common abstract class or interface which" + " are not carrying @%s annotation", supertype, ImmutableMirror.simpleName()); } } } private void checkForMutableFields(Protoclass protoclass, TypeElement element) { for (VariableElement field : ElementFilter.fieldsIn( processing.getElementUtils().getAllMembers(CachingElements.getDelegate(element)))) { if (!field.getModifiers().contains(Modifier.FINAL)) { Reporter report = protoclass.report(); boolean ownField = CachingElements.equals(element, field.getEnclosingElement()); if (ownField) { report.withElement(field).warning("Avoid introduction of fields (except constants) in abstract value types"); } else { report.warning("Abstract value type inherits mutable fields"); } } } } private void checkAttributeNamesForDuplicates(ValueType type, Protoclass protoclass) { if (!type.attributes.isEmpty()) { Multiset<String> attributeNames = HashMultiset.create(type.attributes.size()); for (ValueAttribute attribute : type.attributes) { attributeNames.add(attribute.name()); } List<String> duplicates = Lists.newArrayList(); for (Multiset.Entry<String> entry : attributeNames.entrySet()) { if (entry.getCount() > 1) { duplicates.add(entry.getElement()); } } if (!duplicates.isEmpty()) { protoclass.report() .error("Duplicate attribute names %s. You should check if correct @Value.Style applied", duplicates); } } } static boolean checkAbstractValueType(Element element, Collection<String> violations) { boolean ofSupportedKind = false || element.getKind() == ElementKind.INTERFACE || element.getKind() == ElementKind.ANNOTATION_TYPE || element.getKind() == ElementKind.CLASS; boolean staticOrTopLevel = false || element.getEnclosingElement().getKind() == ElementKind.PACKAGE || element.getModifiers().contains(Modifier.STATIC); boolean nonFinal = !element.getModifiers().contains(Modifier.FINAL); boolean hasNoTypeParameters = ((TypeElement) element).getTypeParameters().isEmpty(); boolean publicOrPackageVisible = !element.getModifiers().contains(Modifier.PRIVATE) && !element.getModifiers().contains(Modifier.PROTECTED); if (!ofSupportedKind) { violations.add("must be class or interface or annotation type"); } if (!nonFinal) { violations.add("must be non-final"); } if (!hasNoTypeParameters) { violations.add("should have no type parameters"); } if (!publicOrPackageVisible) { violations.add("should be public or package-visible"); } if (!staticOrTopLevel) { violations.add("should be top-level or static inner class"); } return violations.isEmpty(); } }
/* * Copyright 2022 Red Hat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.hal.meta; import java.util.function.Supplier; import org.jboss.hal.config.Environment; import org.jboss.hal.dmr.ModelNode; import org.jboss.hal.dmr.Property; import org.jboss.hal.meta.capabilitiy.Capabilities; import org.jboss.hal.meta.description.ResourceDescription; import org.jboss.hal.meta.description.StaticResourceDescription; import org.jboss.hal.meta.security.SecurityContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gwt.resources.client.TextResource; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsProperty; import jsinterop.annotations.JsType; import static org.jboss.hal.dmr.ModelDescriptionConstants.*; import static org.jboss.hal.dmr.ModelNodeHelper.failSafeGet; import static org.jboss.hal.meta.AddressTemplate.ROOT; import static org.jboss.hal.meta.security.SecurityContext.RWX; /** Simple data struct for common metadata. Used to keep the method signatures small and tidy. */ @JsType public class Metadata { private static final Logger logger = LoggerFactory.getLogger(Metadata.class); @JsIgnore public static Metadata empty() { return new Metadata(ROOT, () -> RWX, new ResourceDescription(new ModelNode()), new Capabilities(null)); } @JsIgnore public static Metadata staticDescription(TextResource description) { return Metadata.staticDescription(StaticResourceDescription.from(description)); } /** * Constructs a Metadata with read-write-execution permissions and a non-working Capabilities object. */ @JsIgnore public static Metadata staticDescription(ResourceDescription description) { return new Metadata(ROOT, () -> RWX, new ResourceDescription(description), new Capabilities(null)); } /** * Constructs a Metadata with read-write-execution permissions and a working Capabilities object based on the environment * object. */ @JsIgnore public static Metadata staticDescription(ResourceDescription description, Environment environment) { return new Metadata(ROOT, () -> RWX, new ResourceDescription(description), new Capabilities(environment)); } private final AddressTemplate template; private final Supplier<SecurityContext> securityContext; private final ResourceDescription description; private final Capabilities capabilities; @JsIgnore public Metadata(AddressTemplate template, Supplier<SecurityContext> securityContext, ResourceDescription description, Capabilities capabilities) { this.template = template; this.securityContext = securityContext; this.description = description; this.capabilities = capabilities; } /** Copies attributes from this description to the specified metadata */ @JsIgnore public void copyAttribute(String attribute, Metadata destination) { Property p = getDescription().findAttribute(ATTRIBUTES, attribute); if (p != null) { destination.getDescription().get(ATTRIBUTES).get(attribute).set(p.getValue()); } } /** * Makes the specified attribute writable. This is necessary if you copy attributes from a complex attribute to another * metadata. Without adjustment the copied attributes are read-only in the destination metadata. */ @JsIgnore public void makeWritable(String attribute) { getSecurityContext().get(ATTRIBUTES).get(attribute).get(READ).set(true); getSecurityContext().get(ATTRIBUTES).get(attribute).get(WRITE).set(true); } /** Shortcut for {@link #copyAttribute(String, Metadata)} and {@link #makeWritable(String)} */ @JsIgnore public void copyComplexAttributeAttributes(Iterable<String> attributes, Metadata destination) { for (String attribute : attributes) { copyAttribute(attribute, destination); destination.makeWritable(attribute); } } /** * Creates a new metadata instance based on this metadata with the attributes taken from the specified complex attribute. * The resource description will only include the attributes but no operations! */ @JsIgnore public Metadata forComplexAttribute(String name) { return forComplexAttribute(name, false); } /** * Creates a new metadata instance based on this metadata with the attributes taken from the specified complex attribute. * The resource description will only include the attributes but no operations! * * @param prefixLabel if {@code true} the labels of the attributes of the complex attribute are prefixed with name of the * complex attribute. */ @JsIgnore public Metadata forComplexAttribute(String name, boolean prefixLabel) { ModelNode payload = new ModelNode(); payload.get(DESCRIPTION).set(failSafeGet(description, ATTRIBUTES + "/" + name + "/" + DESCRIPTION)); payload.get(REQUIRED).set(failSafeGet(description, ATTRIBUTES + "/" + name + "/" + REQUIRED)); payload.get(NILLABLE).set(failSafeGet(description, ATTRIBUTES + "/" + name + "/" + NILLABLE)); Property complexAttribute = description.findAttribute(ATTRIBUTES, name); if (complexAttribute != null && complexAttribute.getValue().hasDefined(VALUE_TYPE)) { complexAttribute.getValue().get(VALUE_TYPE).asPropertyList().forEach(nestedProperty -> { // The nested name is *always* just the nested property name, // since it's used when building the DMR operations String nestedName = nestedProperty.getName(); ModelNode nestedDescription = nestedProperty.getValue(); // The name which is used for the label can be prefixed with the complex attribute name. // If prefixComplexAttribute == true), it is stored as an artificial attribute and picked // up by LabelBuilder.label(Property) if (prefixLabel) { nestedDescription.get(HAL_LABEL).set(name + "-" + nestedProperty.getName()); } payload.get(ATTRIBUTES).get(nestedName).set(nestedDescription); }); } SecurityContext parentContext = this.securityContext.get(); SecurityContext attributeContext = new SecurityContext(new ModelNode()) { @Override public boolean isReadable() { return parentContext.isReadable(name); } @Override public boolean isWritable() { return parentContext.isWritable(name); } @Override public boolean isReadable(String attribute) { return isReadable(); // if the complex attribute is readable all nested attributes are readable as well } @Override public boolean isWritable(String attribute) { return isWritable(); // if the complex attribute is writable all nested attributes are writable as well } @Override public boolean isExecutable(String operation) { return parentContext.isExecutable(operation); } }; return new Metadata(template, () -> attributeContext, new ResourceDescription(payload), capabilities); } @JsIgnore public Metadata forOperation(String name) { ModelNode payload = new ModelNode(); payload.get(DESCRIPTION).set(failSafeGet(description, OPERATIONS + "/" + name + "/" + DESCRIPTION)); payload.get(ATTRIBUTES).set(failSafeGet(description, OPERATIONS + "/" + name + "/" + REQUEST_PROPERTIES)); SecurityContext parentContext = this.securityContext.get(); SecurityContext operationContext = new SecurityContext(new ModelNode()) { @Override public boolean isReadable() { return parentContext.isExecutable(name); } @Override public boolean isWritable() { return parentContext.isExecutable(name); } @Override public boolean isReadable(String attribute) { return isReadable(); // if the operation is executable all of its request properties are readable as well } @Override public boolean isWritable(String attribute) { return isWritable(); // if the operation is executable all of its request properties are writable as well } @Override public boolean isExecutable(String operation) { return parentContext.isExecutable(operation); } }; return new Metadata(template, () -> operationContext, new ResourceDescription(payload), capabilities); } /** @return the address template */ @JsProperty public AddressTemplate getTemplate() { return template; } /** @return the security context */ @JsProperty public SecurityContext getSecurityContext() { if (securityContext != null && securityContext.get() != null) { return securityContext.get(); } else { logger.error("No security context found for {}. Return SecurityContext.READ_ONLY", template); return SecurityContext.READ_ONLY; } } /** @return the resource description */ @JsProperty public ResourceDescription getDescription() { return description; } @JsIgnore public Capabilities getCapabilities() { return capabilities; } }
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.kernel.api.index; import java.util.Arrays; import org.neo4j.graphdb.Node; import org.neo4j.kernel.impl.api.index.UpdateMode; import static org.neo4j.collection.primitive.PrimitiveLongCollections.EMPTY_LONG_ARRAY; import static org.neo4j.kernel.impl.api.index.UpdateMode.ADDED; import static org.neo4j.kernel.impl.api.index.UpdateMode.CHANGED; import static org.neo4j.kernel.impl.api.index.UpdateMode.REMOVED; public class NodePropertyUpdate { private final long nodeId; private final int propertyKeyId; private final Object valueBefore; private final Object valueAfter; private final UpdateMode updateMode; private final long[] labelsBefore; private final long[] labelsAfter; private NodePropertyUpdate( long nodeId, int propertyKeyId, Object valueBefore, long[] labelsBefore, Object valueAfter, long[] labelsAfter, UpdateMode updateMode ) { this.nodeId = nodeId; this.propertyKeyId = propertyKeyId; this.valueBefore = valueBefore; this.labelsBefore = labelsBefore; this.valueAfter = valueAfter; this.labelsAfter = labelsAfter; this.updateMode = updateMode; } public long getNodeId() { return nodeId; } public int getPropertyKeyId() { return propertyKeyId; } public Object getValueBefore() { return valueBefore; } public Object getValueAfter() { return valueAfter; } public int getNumberOfLabelsBefore() { return labelsBefore.length; } public int getLabelBefore( int i ) { return (int) labelsBefore[i]; } public int getNumberOfLabelsAfter() { return labelsAfter.length; } public int getLabelAfter( int i ) { return (int) labelsAfter[i]; } public UpdateMode getUpdateMode() { return updateMode; } /** * Whether or not this property update is for the given {@code labelId}. * * If this property update comes from setting/changing/removing a property it will * affect all labels on that {@link Node}. * * If this property update comes from adding or removing labels to/from a {@link Node} * it will affect only those labels. * * @param labelId the label id the check. */ public boolean forLabel( long labelId ) { return updateMode.forLabel( labelsBefore, labelsAfter, labelId ); } @Override public String toString() { StringBuilder result = new StringBuilder( getClass().getSimpleName() ) .append( "[" ).append( nodeId ).append( ", prop:" ).append( propertyKeyId ).append( " " ); switch ( updateMode ) { case ADDED: result.append( "add:" ).append( valueAfter ); break; case CHANGED: result.append( "change:" ).append( valueBefore ).append( " => " ).append( valueAfter ); break; case REMOVED: result.append( "remove:" ).append( valueBefore ); break; default: throw new IllegalArgumentException( updateMode.toString() ); } result.append( ", labelsBefore:" ).append( Arrays.toString( labelsBefore ) ); result.append( ", labelsAfter:" ).append( Arrays.toString( labelsAfter ) ); return result.append( "]" ).toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode( labelsBefore ); result = prime * result + Arrays.hashCode( labelsAfter ); result = prime * result + (int) (nodeId ^ (nodeId >>> 32)); result = prime * result + propertyKeyId; result = prime * result + updateMode.hashCode(); return result; } @Override public boolean equals( Object obj ) { if ( this == obj ) { return true; } if ( obj == null ) { return false; } if ( getClass() != obj.getClass() ) { return false; } NodePropertyUpdate other = (NodePropertyUpdate) obj; return Arrays.equals( labelsBefore, other.labelsBefore ) && Arrays.equals( labelsAfter, other.labelsAfter ) && nodeId == other.nodeId && propertyKeyId == other.propertyKeyId && updateMode == other.updateMode && propertyValuesEqual( valueBefore, other.valueBefore ) && propertyValuesEqual( valueAfter, other.valueAfter ); } public static boolean propertyValuesEqual( Object a, Object b ) { if ( a == null ) { return b == null; } if ( b == null ) { return false; } if (a instanceof boolean[] && b instanceof boolean[]) { return Arrays.equals( (boolean[]) a, (boolean[]) b ); } if (a instanceof byte[] && b instanceof byte[]) { return Arrays.equals( (byte[]) a, (byte[]) b ); } if (a instanceof short[] && b instanceof short[]) { return Arrays.equals( (short[]) a, (short[]) b ); } if (a instanceof int[] && b instanceof int[]) { return Arrays.equals( (int[]) a, (int[]) b ); } if (a instanceof long[] && b instanceof long[]) { return Arrays.equals( (long[]) a, (long[]) b ); } if (a instanceof char[] && b instanceof char[]) { return Arrays.equals( (char[]) a, (char[]) b ); } if (a instanceof float[] && b instanceof float[]) { return Arrays.equals( (float[]) a, (float[]) b ); } if (a instanceof double[] && b instanceof double[]) { return Arrays.equals( (double[]) a, (double[]) b ); } if (a instanceof Object[] && b instanceof Object[]) { return Arrays.equals( (Object[]) a, (Object[]) b ); } return a.equals( b ); } public static NodePropertyUpdate add( long nodeId, int propertyKeyId, Object value, long[] labels ) { return new NodePropertyUpdate( nodeId, propertyKeyId, null, EMPTY_LONG_ARRAY, value, labels, ADDED ); } public static NodePropertyUpdate change( long nodeId, int propertyKeyId, Object valueBefore, long[] labelsBefore, Object valueAfter, long[] labelsAfter ) { return new NodePropertyUpdate( nodeId, propertyKeyId, valueBefore, labelsBefore, valueAfter, labelsAfter, CHANGED ); } public static NodePropertyUpdate remove( long nodeId, int propertyKeyId, Object value, long[] labels ) { return new NodePropertyUpdate( nodeId, propertyKeyId, value, labels, null, EMPTY_LONG_ARRAY, REMOVED ); } }
/* * Copyright 2008 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.factmodel; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.drools.core.phreak.Reactive; import org.kie.api.definition.type.Annotation; import org.kie.api.definition.type.FactField; import org.kie.api.definition.type.FactType; /** * Declares a class to be dynamically created */ public class ClassDefinition implements FactType { public static enum TRAITING_MODE { NONE, BASIC, LOGICAL } private String className; private String superClass; private String[] interfaces; private transient Class< ? > definedClass; private TRAITING_MODE traitable; private boolean abstrakt = false; private Map<String, Object> metaData; private LinkedHashMap<String, FieldDefinition> fields = new LinkedHashMap<String, FieldDefinition>(); private Map<String, AnnotationDefinition> annotations; private Map<String, List<String>> modifiedPropsByMethod; public ClassDefinition() { this( null, null, null ); } public ClassDefinition( String className ) { this( className, null, null ); } public ClassDefinition( String className, String superClass, String[] interfaces ) { this.setClassName( className ); this.setSuperClass( superClass ); this.setInterfaces( interfaces ); } public ClassDefinition( Class<?> cls ) { this.definedClass = cls; this.setClassName( cls.getCanonicalName() ); this.setSuperClass( cls.getSuperclass() != null ? cls.getSuperclass().getCanonicalName() : null ); String[] interfaces = new String[cls.getInterfaces().length]; int i = 0; for (Class<?> interfaze : cls.getInterfaces()) { interfaces[i++] = interfaze.getCanonicalName(); } this.setInterfaces( interfaces ); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { this.className = (String) in.readObject(); this.superClass = (String) in.readObject(); this.interfaces = (String[]) in.readObject(); this.fields = (LinkedHashMap<String, FieldDefinition>) in.readObject(); this.annotations = (Map<String, AnnotationDefinition>) in.readObject(); this.modifiedPropsByMethod = (Map<String, List<String>>) in.readObject(); this.traitable = (ClassDefinition.TRAITING_MODE) in.readObject(); this.abstrakt = in.readBoolean(); this.metaData = (HashMap<String,Object>) in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject( this.className ); out.writeObject( this.superClass ); out.writeObject( this.interfaces ); out.writeObject( this.fields ); out.writeObject( this.annotations ); out.writeObject( this.modifiedPropsByMethod); out.writeObject( this.traitable ); out.writeBoolean( this.abstrakt ); out.writeObject( this.metaData ); } /** * @return Returns the name. */ public final String getClassName() { return this.className; } /** * @param className The name to set. */ public final void setClassName(final String className) { this.className = className; } /** * @return Returns the className. */ public final Class< ? > getDefinedClass() { return definedClass; } /** * @param definedClass The class to set. */ public void setDefinedClass(final Class< ? > definedClass) { this.definedClass = definedClass; } /** * Adds a field definition to this class * @param attr */ public final void addField(FieldDefinition attr) { this.fields.put( attr.getName(), attr ); } /** * @return Returns an unmodifiable collection of field definitions */ public final Collection<FieldDefinition> getFieldsDefinitions() { return Collections.unmodifiableCollection( this.fields.values() ); } /** * Returns the field definition object for the given field name * * @param fieldName * @return */ public FieldDefinition getField(final String fieldName) { return this.fields.get( fieldName ); } public FieldDefinition getFieldByAlias( String alias ) { for ( FactField factField : getFields() ) { FieldDefinition def = (FieldDefinition) factField; if ( def.resolveAlias().equals( alias ) ) { return def; } } return null; } /** * Returns the field at position index, as defined by the builder using the @position annotation * @param index * @return the index-th field */ public FieldDefinition getField(int index) { if (index >= fields.size() || index < 0) { return null; } Iterator<FieldDefinition> iter = fields.values().iterator(); for (int j = 0; j < index ; j++) { iter.next(); } return iter.next(); } /** * @return Returns the interfaces. */ public final String[] getInterfaces() { return interfaces; } /** * @param interfaces The interfaces to set. */ public final void setInterfaces(String[] interfaces) { this.interfaces = (interfaces != null) ? interfaces : new String[0]; } public final void addInterface(String interfaze) { if (interfaces == null) { interfaces = new String[] { interfaze }; } else { String[] i2 = new String[interfaces.length+1]; System.arraycopy( interfaces, 0, i2, 0, interfaces.length ); i2[interfaces.length] = interfaze; this.interfaces = i2; } } /** * @return Returns the superClass. */ public final String getSuperClass() { return superClass; } /** * @param superClass The superClass to set. */ public final void setSuperClass(final String superClass) { this.superClass = (superClass != null) ? superClass : "java.lang.Object"; } public String getName() { return getClassName(); } public String getSimpleName() { return getClassName().substring( getClassName().lastIndexOf( '.' ) + 1 ); } public String getPackageName() { return getClassName().substring( 0, getClassName().lastIndexOf( '.' ) ); } public Object newInstance() throws InstantiationException, IllegalAccessException { return this.definedClass.newInstance(); } public Class< ? > getFactClass() { return getDefinedClass(); } public List<FactField> getFields() { return new ArrayList<FactField>( fields.values() ); } public Object get(Object bean, String field) { FieldDefinition fieldDefinition = getField( field ); if (fieldDefinition != null) { return fieldDefinition.getFieldAccessor().getValue( bean ); } return null; } public void set(Object bean, String field, Object value) { FieldDefinition fieldDefinition = getField( field ); if (fieldDefinition != null) { fieldDefinition.getFieldAccessor().setValue( bean, value ); } } public Map<String, Object> getAsMap(Object bean) { Map<String, Object> m = new HashMap<String, Object>( fields.size() ); for (Map.Entry<String, FieldDefinition> ent : this.fields.entrySet()) { Object val = ent.getValue().getFieldAccessor().getValue(bean); m.put(ent.getKey(), val); } return m; } public void setFromMap(Object bean, Map<String, Object> data) { for (Map.Entry<String, Object> ent : data.entrySet()) { set(bean, ent.getKey(), ent.getValue()); } } public void addAnnotation(AnnotationDefinition annotationDefinition) { if (this.annotations == null) { this.annotations = new HashMap<String, AnnotationDefinition>(); } this.annotations.put( annotationDefinition.getName(), annotationDefinition ); } public Collection<AnnotationDefinition> getAnnotations() { return annotations != null ? annotations.values() : Collections.<AnnotationDefinition>emptyList(); } public AnnotationDefinition getAnnotation(Class<?> annotationClass) { return annotations != null ? annotations.get(annotationClass.getName()) : null; } public List<Annotation> getClassAnnotations() { return Collections.unmodifiableList( new ArrayList( getAnnotations() ) ); } public Map<String, Object> getMetaData() { return metaData; } public Object getMetaData(String name) { return metaData != null ? metaData.get(name) : null; } public void addMetaData( String key, Object value ) { if ( this.metaData == null ) { metaData = new HashMap<String,Object>(); } metaData.put( key, value ); } public void addModifiedPropsByMethod(Method method, List<String> props) { if (modifiedPropsByMethod == null) { modifiedPropsByMethod = new HashMap<String, List<String>>(); } String methodName = modifiedPropsByMethodKey(method); modifiedPropsByMethod.put(methodName, props); } public List<String> getModifiedPropsByMethod(Method method) { return getModifiedPropsByMethod(method.getName(), method.getParameterTypes().length ); } public List<String> getModifiedPropsByMethod(String methodName, int args) { if (modifiedPropsByMethod == null) { return null; } List<String> byExactNumberOfArgs = modifiedPropsByMethod.get( methodName + "_" + args ); List<String> bestEffortVarArgs = modifiedPropsByMethod.get( methodName + "_*" ); if ( byExactNumberOfArgs != null ) { return byExactNumberOfArgs; } return bestEffortVarArgs; // << indeed maybe null } public static String modifiedPropsByMethodKey(Method method) { return method.getName() + "_" + ( method.isVarArgs() ? "*" : method.getParameterTypes().length ); } public boolean isReactive() { return getAnnotation( Reactive.class ) != null; } public boolean isTraitable() { return traitable != null && traitable != TRAITING_MODE.NONE; } public void setTraitable( boolean traitable ) { setTraitable( traitable, false ); } public void setTraitable( boolean traitable, boolean enableLogical ) { if ( ! traitable ) { this.traitable = TRAITING_MODE.NONE; } else { this.traitable = enableLogical ? TRAITING_MODE.LOGICAL : TRAITING_MODE.BASIC; } } public boolean isFullTraiting() { return this.traitable == TRAITING_MODE.LOGICAL; } public boolean isAbstrakt() { return abstrakt; } public void setAbstrakt(boolean abstrakt) { this.abstrakt = abstrakt; } public String toString() { return "ClassDefinition{" + "className='" + className + '\'' + ", superClass='" + superClass + '\'' + ", interfaces=" + (interfaces == null ? null : Arrays.asList(interfaces)) + ", definedClass=" + definedClass + ", traitable=" + traitable + ", abstract=" + abstrakt + ", fields=" + fields + ", annotations=" + annotations + '}'; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import com.google.common.collect.ImmutableList; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFTableFeaturePropInstructionsVer13 implements OFTableFeaturePropInstructions { private static final Logger logger = LoggerFactory.getLogger(OFTableFeaturePropInstructionsVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int MINIMUM_LENGTH = 4; private final static List<OFInstructionId> DEFAULT_INSTRUCTION_IDS = ImmutableList.<OFInstructionId>of(); // OF message fields private final List<OFInstructionId> instructionIds; // // Immutable default instance final static OFTableFeaturePropInstructionsVer13 DEFAULT = new OFTableFeaturePropInstructionsVer13( DEFAULT_INSTRUCTION_IDS ); // package private constructor - used by readers, builders, and factory OFTableFeaturePropInstructionsVer13(List<OFInstructionId> instructionIds) { if(instructionIds == null) { throw new NullPointerException("OFTableFeaturePropInstructionsVer13: property instructionIds cannot be null"); } this.instructionIds = instructionIds; } // Accessors for OF message fields @Override public int getType() { return 0x0; } @Override public List<OFInstructionId> getInstructionIds() { return instructionIds; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } public OFTableFeaturePropInstructions.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFTableFeaturePropInstructions.Builder { final OFTableFeaturePropInstructionsVer13 parentMessage; // OF message fields private boolean instructionIdsSet; private List<OFInstructionId> instructionIds; BuilderWithParent(OFTableFeaturePropInstructionsVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0x0; } @Override public List<OFInstructionId> getInstructionIds() { return instructionIds; } @Override public OFTableFeaturePropInstructions.Builder setInstructionIds(List<OFInstructionId> instructionIds) { this.instructionIds = instructionIds; this.instructionIdsSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFTableFeaturePropInstructions build() { List<OFInstructionId> instructionIds = this.instructionIdsSet ? this.instructionIds : parentMessage.instructionIds; if(instructionIds == null) throw new NullPointerException("Property instructionIds must not be null"); // return new OFTableFeaturePropInstructionsVer13( instructionIds ); } } static class Builder implements OFTableFeaturePropInstructions.Builder { // OF message fields private boolean instructionIdsSet; private List<OFInstructionId> instructionIds; @Override public int getType() { return 0x0; } @Override public List<OFInstructionId> getInstructionIds() { return instructionIds; } @Override public OFTableFeaturePropInstructions.Builder setInstructionIds(List<OFInstructionId> instructionIds) { this.instructionIds = instructionIds; this.instructionIdsSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } // @Override public OFTableFeaturePropInstructions build() { List<OFInstructionId> instructionIds = this.instructionIdsSet ? this.instructionIds : DEFAULT_INSTRUCTION_IDS; if(instructionIds == null) throw new NullPointerException("Property instructionIds must not be null"); return new OFTableFeaturePropInstructionsVer13( instructionIds ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFTableFeaturePropInstructions> { @Override public OFTableFeaturePropInstructions readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0x0 short type = bb.readShort(); if(type != (short) 0x0) throw new OFParseError("Wrong type: Expected=0x0(0x0), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); List<OFInstructionId> instructionIds = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFInstructionIdVer13.READER); OFTableFeaturePropInstructionsVer13 tableFeaturePropInstructionsVer13 = new OFTableFeaturePropInstructionsVer13( instructionIds ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", tableFeaturePropInstructionsVer13); return tableFeaturePropInstructionsVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFTableFeaturePropInstructionsVer13Funnel FUNNEL = new OFTableFeaturePropInstructionsVer13Funnel(); static class OFTableFeaturePropInstructionsVer13Funnel implements Funnel<OFTableFeaturePropInstructionsVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFTableFeaturePropInstructionsVer13 message, PrimitiveSink sink) { // fixed value property type = 0x0 sink.putShort((short) 0x0); // FIXME: skip funnel of length FunnelUtils.putList(message.instructionIds, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFTableFeaturePropInstructionsVer13> { @Override public void write(ChannelBuffer bb, OFTableFeaturePropInstructionsVer13 message) { int startIndex = bb.writerIndex(); // fixed value property type = 0x0 bb.writeShort((short) 0x0); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); ChannelUtils.writeList(bb, message.instructionIds); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFTableFeaturePropInstructionsVer13("); b.append("instructionIds=").append(instructionIds); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFTableFeaturePropInstructionsVer13 other = (OFTableFeaturePropInstructionsVer13) obj; if (instructionIds == null) { if (other.instructionIds != null) return false; } else if (!instructionIds.equals(other.instructionIds)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((instructionIds == null) ? 0 : instructionIds.hashCode()); return result; } }
/** * Licensed to DigitalPebble Ltd under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * DigitalPebble licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpebble.stormcrawler.bolt; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.URL; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.lang.StringUtils; import org.apache.storm.Config; import org.apache.storm.metric.api.IMetric; import org.apache.storm.metric.api.MeanReducer; import org.apache.storm.metric.api.MultiCountMetric; import org.apache.storm.metric.api.MultiReducedMetric; import org.apache.storm.task.OutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Tuple; import org.apache.storm.tuple.Values; import org.apache.storm.utils.Utils; import org.slf4j.LoggerFactory; import com.digitalpebble.stormcrawler.Constants; import com.digitalpebble.stormcrawler.Metadata; import com.digitalpebble.stormcrawler.persistence.Status; import com.digitalpebble.stormcrawler.protocol.HttpHeaders; import com.digitalpebble.stormcrawler.protocol.Protocol; import com.digitalpebble.stormcrawler.protocol.ProtocolFactory; import com.digitalpebble.stormcrawler.protocol.ProtocolResponse; import com.digitalpebble.stormcrawler.protocol.RobotRules; import com.digitalpebble.stormcrawler.util.ConfUtils; import com.digitalpebble.stormcrawler.util.PerSecondReducer; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import crawlercommons.domains.PaidLevelDomain; import crawlercommons.robots.BaseRobotRules; /** * A single-threaded fetcher with no internal queue. Use of this fetcher * requires that the user implement an external queue that enforces crawl-delay * politeness constraints. */ @SuppressWarnings("serial") public class SimpleFetcherBolt extends StatusEmitterBolt { private static final org.slf4j.Logger LOG = LoggerFactory .getLogger(SimpleFetcherBolt.class); private static final String SITEMAP_DISCOVERY_PARAM_KEY = "sitemap.discovery"; public static final String QUEUE_MODE_HOST = "byHost"; public static final String QUEUE_MODE_DOMAIN = "byDomain"; public static final String QUEUE_MODE_IP = "byIP"; private Config conf; private MultiCountMetric eventCounter; private MultiReducedMetric averagedMetrics; private MultiReducedMetric perSecMetrics; private ProtocolFactory protocolFactory; private int taskID = -1; boolean sitemapsAutoDiscovery = false; // TODO configure the max time private Cache<String, Long> throttler = CacheBuilder.newBuilder() .expireAfterAccess(30, TimeUnit.SECONDS).build(); private String queueMode; /** default crawl delay in msec, can be overridden by robots directives **/ private long crawlDelay = 1000; /** max value accepted from robots.txt **/ private long maxCrawlDelay = 30000; private final AtomicInteger activeThreads = new AtomicInteger(0); private void checkConfiguration() { // ensure that a value has been set for the agent name and that that // agent name is the first value in the agents we advertise for robot // rules parsing String agentName = (String) getConf().get("http.agent.name"); if (agentName == null || agentName.trim().length() == 0) { String message = "Fetcher: No agents listed in 'http.agent.name'" + " property."; LOG.error(message); throw new IllegalArgumentException(message); } } private Config getConf() { return this.conf; } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { super.prepare(stormConf, context, collector); this.conf = new Config(); this.conf.putAll(stormConf); checkConfiguration(); this.taskID = context.getThisTaskId(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ENGLISH); long start = System.currentTimeMillis(); LOG.info("[Fetcher #{}] : starting at {}", taskID, sdf.format(start)); // Register a "MultiCountMetric" to count different events in this bolt // Storm will emit the counts every n seconds to a special bolt via a // system stream // The data can be accessed by registering a "MetricConsumer" in the // topology int metricsTimeBucketSecs = ConfUtils.getInt(conf, "fetcher.metrics.time.bucket.secs", 10); this.eventCounter = context.registerMetric("fetcher_counter", new MultiCountMetric(), metricsTimeBucketSecs); this.averagedMetrics = context.registerMetric("fetcher_average", new MultiReducedMetric(new MeanReducer()), metricsTimeBucketSecs); this.perSecMetrics = context.registerMetric("fetcher_average_persec", new MultiReducedMetric(new PerSecondReducer()), metricsTimeBucketSecs); // create gauges context.registerMetric("activethreads", new IMetric() { @Override public Object getValueAndReset() { return activeThreads.get(); } }, metricsTimeBucketSecs); context.registerMetric("throttler_size", new IMetric() { @Override public Object getValueAndReset() { return throttler.size(); } }, metricsTimeBucketSecs); protocolFactory = new ProtocolFactory(conf); sitemapsAutoDiscovery = ConfUtils.getBoolean(stormConf, SITEMAP_DISCOVERY_PARAM_KEY, false); queueMode = ConfUtils.getString(conf, "fetcher.queue.mode", QUEUE_MODE_HOST); // check that the mode is known if (!queueMode.equals(QUEUE_MODE_IP) && !queueMode.equals(QUEUE_MODE_DOMAIN) && !queueMode.equals(QUEUE_MODE_HOST)) { LOG.error("Unknown partition mode : {} - forcing to byHost", queueMode); queueMode = QUEUE_MODE_HOST; } LOG.info("Using queue mode : {}", queueMode); this.crawlDelay = (long) (ConfUtils.getFloat(conf, "fetcher.server.delay", 1.0f) * 1000); this.maxCrawlDelay = (long) ConfUtils.getInt(conf, "fetcher.max.crawl.delay", 30) * 1000; } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { super.declareOutputFields(declarer); declarer.declare(new Fields("url", "content", "metadata")); } @Override public void cleanup() { protocolFactory.cleanup(); } @Override public void execute(Tuple input) { String urlString = input.getStringByField("url"); if (StringUtils.isBlank(urlString)) { LOG.info("[Fetcher #{}] Missing value for field url in tuple {}", taskID, input); // ignore silently collector.ack(input); return; } Metadata metadata = null; if (input.contains("metadata")) metadata = (Metadata) input.getValueByField("metadata"); if (metadata == null) metadata = Metadata.empty; URL url; try { url = new URL(urlString); } catch (MalformedURLException e) { LOG.error("{} is a malformed URL", urlString); // Report to status stream and ack if (metadata == Metadata.empty) { metadata = new Metadata(); } metadata.setValue(Constants.STATUS_ERROR_CAUSE, "malformed URL"); collector.emit( com.digitalpebble.stormcrawler.Constants.StatusStreamName, input, new Values(urlString, metadata, Status.ERROR)); collector.ack(input); return; } String key = getPolitenessKey(url); long delay = 0; try { activeThreads.incrementAndGet(); Protocol protocol = protocolFactory.getProtocol(url); BaseRobotRules rules = protocol.getRobotRules(urlString); boolean fromCache = false; if (rules instanceof RobotRules && ((RobotRules) rules).getContentLengthFetched().length == 0) { fromCache = true; eventCounter.scope("robots.fromCache").incrBy(1); } else { eventCounter.scope("robots.fetched").incrBy(1); } // autodiscovery of sitemaps // the sitemaps will be sent down the topology // as many times as there is a URL for a given host // the status updater will certainly cache things // but we could also have a simple cache mechanism here // as well. // if the robot come from the cache there is no point // in sending the sitemap URLs again // check in the metadata if discovery setting has been // overridden boolean smautodisco = sitemapsAutoDiscovery; String localSitemapDiscoveryVal = metadata .getFirstValue(SITEMAP_DISCOVERY_PARAM_KEY); if ("true".equalsIgnoreCase(localSitemapDiscoveryVal)) { smautodisco = true; } else if ("false".equalsIgnoreCase(localSitemapDiscoveryVal)) { smautodisco = false; } if (!fromCache && smautodisco) { for (String sitemapURL : rules.getSitemaps()) { emitOutlink(input, url, sitemapURL, metadata, SiteMapParserBolt.isSitemapKey, "true"); } } activeThreads.decrementAndGet(); if (!rules.isAllowed(urlString)) { LOG.info("Denied by robots.txt: {}", urlString); metadata.setValue(Constants.STATUS_ERROR_CAUSE, "robots.txt"); // Report to status stream and ack collector .emit(com.digitalpebble.stormcrawler.Constants.StatusStreamName, input, new Values(urlString, metadata, Status.ERROR)); collector.ack(input); return; } // check when we are allowed to process it long timeWaiting = 0; Long timeAllowed = throttler.getIfPresent(key); if (timeAllowed != null) { long now = System.currentTimeMillis(); long timeToWait = timeAllowed - now; if (timeToWait > 0) { timeWaiting = timeToWait; try { Thread.sleep(timeToWait); } catch (InterruptedException e) { LOG.error("[Fetcher #{}] caught InterruptedException caught while waiting"); Thread.currentThread().interrupt(); } } } delay = this.crawlDelay; // get the delay from robots // value is negative when not set long robotsDelay = rules.getCrawlDelay(); if (robotsDelay > 0) { // cap the value to a maximum // as some sites specify ridiculous values if (robotsDelay > maxCrawlDelay) { LOG.debug("Delay from robots capped at {} for {}", robotsDelay, url); delay = maxCrawlDelay; } else { delay = robotsDelay; } } LOG.debug("[Fetcher #{}] : Fetching {}", taskID, urlString); activeThreads.incrementAndGet(); long start = System.currentTimeMillis(); ProtocolResponse response = protocol.getProtocolOutput(urlString, metadata); long timeFetching = System.currentTimeMillis() - start; final int byteLength = response.getContent().length; averagedMetrics.scope("wait_time").update(timeWaiting); averagedMetrics.scope("fetch_time").update(timeFetching); averagedMetrics.scope("bytes_fetched").update(byteLength); eventCounter.scope("fetched").incrBy(1); eventCounter.scope("bytes_fetched").incrBy(byteLength); perSecMetrics.scope("bytes_fetched_perSec").update(byteLength); perSecMetrics.scope("fetched_perSec").update(1); LOG.info( "[Fetcher #{}] Fetched {} with status {} in {} after waiting {}", taskID, urlString, response.getStatusCode(), timeFetching, timeWaiting); response.getMetadata().putAll(metadata); response.getMetadata().setValue("fetch.statusCode", Integer.toString(response.getStatusCode())); response.getMetadata().setValue("fetch.loadingTime", Long.toString(timeFetching)); // determine the status based on the status code final Status status = Status.fromHTTPCode(response.getStatusCode()); // used when sending to status stream final Values values4status = new Values(urlString, response.getMetadata(), status); // if the status is OK emit on default stream if (status.equals(Status.FETCHED)) { if (response.getStatusCode() == 304) { // mark this URL as fetched so that it gets // rescheduled // but do not try to parse or index collector .emit(com.digitalpebble.stormcrawler.Constants.StatusStreamName, input, values4status); } else { collector.emit(Utils.DEFAULT_STREAM_ID, input, new Values(urlString, response.getContent(), response.getMetadata())); } } else if (status.equals(Status.REDIRECTION)) { // find the URL it redirects to String redirection = response.getMetadata().getFirstValue( HttpHeaders.LOCATION); // stores the URL it redirects to // used for debugging mainly - do not resolve the target // URL if (StringUtils.isNotBlank(redirection)) { response.getMetadata().setValue("_redirTo", redirection); } if (allowRedirs() && StringUtils.isNotBlank(redirection)) { emitOutlink(input, url, redirection, response.getMetadata()); } // Mark URL as redirected collector .emit(com.digitalpebble.stormcrawler.Constants.StatusStreamName, input, values4status); } else { // Error collector .emit(com.digitalpebble.stormcrawler.Constants.StatusStreamName, input, values4status); } } catch (Exception exece) { String message = exece.getMessage(); if (message == null) message = ""; // common exceptions for which we log only a short message if (exece.getCause() instanceof java.util.concurrent.TimeoutException || message.contains(" timed out")) { LOG.error("Socket timeout fetching {}", urlString); message = "Socket timeout fetching"; } else if (exece.getCause() instanceof java.net.UnknownHostException || exece instanceof java.net.UnknownHostException) { LOG.error("Unknown host {}", urlString); message = "Unknown host"; } else { LOG.error("Exception while fetching {}", urlString, exece); message = exece.getClass().getName(); } eventCounter.scope("exception").incrBy(1); // could be an empty, immutable Metadata if (metadata.size() == 0) { metadata = new Metadata(); } // add the reason of the failure in the metadata metadata.setValue("fetch.exception", message); collector.emit( com.digitalpebble.stormcrawler.Constants.StatusStreamName, input, new Values(urlString, metadata, Status.FETCH_ERROR)); } activeThreads.decrementAndGet(); // update the throttler throttler.put(key, System.currentTimeMillis() + delay); collector.ack(input); } private String getPolitenessKey(URL u) { String key; if (QUEUE_MODE_IP.equalsIgnoreCase(queueMode)) { try { final InetAddress addr = InetAddress.getByName(u.getHost()); key = addr.getHostAddress(); } catch (final UnknownHostException e) { // unable to resolve it, so don't fall back to host name LOG.warn("Unable to resolve: {}, skipping.", u.getHost()); return null; } } else if (QUEUE_MODE_DOMAIN.equalsIgnoreCase(queueMode)) { key = PaidLevelDomain.getPLD(u.getHost()); if (key == null) { LOG.warn("Unknown domain for url: {}, using hostname as key", u.toExternalForm()); key = u.getHost(); } } else { key = u.getHost(); if (key == null) { LOG.warn("Unknown host for url: {}, using URL string as key", u.toExternalForm()); key = u.toExternalForm(); } } return key.toLowerCase(Locale.ROOT); } }
package com.voxeo.rayo.client; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.media.mscontrol.join.Joinable; import org.joda.time.Duration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.rayo.core.AcceptCommand; import com.rayo.core.AnswerCommand; import com.rayo.core.CallCommand; import com.rayo.core.CallRef; import com.rayo.core.CallRejectReason; import com.rayo.core.DialCommand; import com.rayo.core.DtmfCommand; import com.rayo.core.HangupCommand; import com.rayo.core.JoinCommand; import com.rayo.core.JoinDestinationType; import com.rayo.core.OfferEvent; import com.rayo.core.RedirectCommand; import com.rayo.core.RejectCommand; import com.rayo.core.UnjoinCommand; import com.rayo.core.verb.Ask; import com.rayo.core.verb.Choices; import com.rayo.core.verb.HoldCommand; import com.rayo.core.verb.Input; import com.rayo.core.verb.InputMode; import com.rayo.core.verb.MuteCommand; import com.rayo.core.verb.Output; import com.rayo.core.verb.Record; import com.rayo.core.verb.RecordPauseCommand; import com.rayo.core.verb.RecordResumeCommand; import com.rayo.core.verb.Say; import com.rayo.core.verb.SeekCommand; import com.rayo.core.verb.SpeedDownCommand; import com.rayo.core.verb.SpeedUpCommand; import com.rayo.core.verb.Ssml; import com.rayo.core.verb.StopCommand; import com.rayo.core.verb.Transfer; import com.rayo.core.verb.UnholdCommand; import com.rayo.core.verb.UnmuteCommand; import com.rayo.core.verb.VerbRef; import com.rayo.core.verb.VolumeDownCommand; import com.rayo.core.verb.VolumeUpCommand; import com.voxeo.moho.Participant.JoinType; import com.voxeo.rayo.client.auth.AuthenticationListener; import com.voxeo.rayo.client.exception.DialTimeoutException; import com.voxeo.rayo.client.filter.XmppObjectFilter; import com.voxeo.rayo.client.listener.RayoMessageListener; import com.voxeo.rayo.client.listener.StanzaListener; import com.voxeo.rayo.client.registry.Call; import com.voxeo.rayo.client.registry.CallsRegistry; import com.voxeo.rayo.client.verb.ClientPauseCommand; import com.voxeo.rayo.client.verb.ClientResumeCommand; import com.voxeo.rayo.client.xmpp.extensions.Extension; import com.voxeo.rayo.client.xmpp.stanza.IQ; import com.voxeo.rayo.client.xmpp.stanza.Ping; import com.voxeo.rayo.client.xmpp.stanza.Presence; import com.voxeo.rayo.client.xmpp.stanza.Presence.Show; import com.voxeo.rayo.client.xmpp.stanza.Presence.Type; import com.voxeo.rayo.client.xmpp.stanza.Stanza; /** * This class servers as a client to the Rayo XMPP platform. * * @author martin * */ public class RayoClient { private Logger logger = LoggerFactory.getLogger(RayoClient.class); protected final XmppConnection connection; public static final String DEFAULT_RESOURCE = "voxeo"; private CallsRegistry callRegistry = new CallsRegistry(); private String rayoServer; private ReentrantReadWriteLock connectionLock = new ReentrantReadWriteLock(); private Timer pingTimer = null; public class Grammar { private String content; private String type; public Grammar(String content, String type) { this.content = content; this.type = type; } } /** * Creates a new client object. This object will be used to interact with an Rayo server. * * @param server Rayo Server that this client will be connecting to */ public RayoClient(String xmppServer, String rayoServer) { this(xmppServer, null, rayoServer); } /** * Creates a new client object that will use the specified port number. * This object will be used to interact with an Rayo server. * * @param server Server that this client will be connecting to * @param port Port number that the server is listening at */ public RayoClient(String xmppServer, Integer port, String rayoServer) { connection = new SimpleXmppConnection(xmppServer, port); this.rayoServer = rayoServer; } /** * Creates a Rayo Client using the given XMPP connection * * @param connection XMPP connection that will be used * @param rayoServer Rayo Sever to connect this Rayo client to */ public RayoClient(XmppConnection connection, String rayoServer) { this.connection = connection; this.rayoServer = rayoServer; } /** * Connects and authenticates into the Rayo Server. By default it will use the resource 'voxeo'. * * @param username Rayo username * @param password Rayo password * * @throws XmppException If the client is not able to connect or authenticate into the Rayo Server */ public void connect(String username, String password) throws XmppException { connect(username, password, DEFAULT_RESOURCE); } /** * Connects and authenticates into the Rayo Server. By default it will use the resource 'voxeo'. * * @param username Rayo username * @param password Rayo password * @param resource Resource that will be used in this communication * * @throws XmppException If the client is not able to connect or authenticate into the Rayo Server */ public void connect(String username, String password, String resource) throws XmppException { connect(username, password, resource, 5); } /** * Connects and authenticates into the Rayo Server. By default it will use the resource 'voxeo'. * * @param username Rayo username * @param password Rayo password * @param resource Resource that will be used in this communication * * @throws XmppException If the client is not able to connect or authenticate into the Rayo Server */ public void connect(String username, String password, String resource, int timeout) throws XmppException { Lock lock = connectionLock.writeLock(); lock.lock(); if (connection.isConnected()) { try { disconnect(); } catch (Exception e) { e.printStackTrace(); } } try { logger.info("Connecting Rayo client XMPP Connection"); if (!connection.isConnected()) { connection.connect(timeout); connection.login(username, password, resource, timeout); connection.addStanzaListener(new RayoMessageListener("offer") { @Override @SuppressWarnings("rawtypes") public void messageReceived(Object object) { //TODO: Stanza should have methods to fetch the JID node name, domain, etc. Stanza stanza = (Stanza)object; int at = stanza.getFrom().indexOf('@'); String callId = stanza.getFrom().substring(0, at); String domain = stanza.getFrom().substring(at+1); if (domain.contains(":")) { domain = domain.substring(0, domain.indexOf(':')); } Call call = new Call(callId, domain); callRegistry.registerCall(callId, call); } }); connection.addStanzaListener(new RayoMessageListener("end") { @Override @SuppressWarnings("rawtypes") public void messageReceived(Object object) { //TODO: Stanza should have methods to fetch the JID node name, domain, etc. Stanza stanza = (Stanza)object; int at = stanza.getFrom().indexOf('@'); String callId = stanza.getFrom().substring(0, at); callRegistry.unregisterCal(callId); } }); broadcastAvailability(); TimerTask pingTask = new TimerTask() { @Override public void run() { ping(); } }; pingTimer = new Timer(); pingTimer.schedule(pingTask, 5000, 30000); connection.addStanzaListener(new RayoMessageListener("ping") { @Override public void messageReceived(Object object) { IQ iq = (IQ)object; if (!iq.isError()) { // pong try { sendIQ(iq.result()); } catch (XmppException e) { e.printStackTrace(); } } } }); } else { logger.error("Trying to connect while the old XMPP connection is active. Please, disconnect first"); } logger.info("Rayo client is now connected"); } catch (XmppException xe) { logger.error("Error while trying to opean an XMPP connection"); xe.printStackTrace(); throw xe; } catch (Exception e) { logger.error("Error while trying to opean an XMPP connection"); e.printStackTrace(); throw new XmppException(e.getMessage()); } finally { lock.unlock(); } } public void setAvailable(boolean status) throws XmppException { if (status == true) { broadcastAvailability(); } else { broadcastUnavailability(); } } public void setStatus(Show status) throws XmppException { Presence presence = new Presence() .setId(UUID.randomUUID().toString()) .setFrom(connection.getUsername() + "@" + connection.getServiceName() + "/" + connection.getResource()) .setTo(rayoServer) .setShow(status); connection.send(presence); } private void broadcastAvailability() throws XmppException { Presence presence = new Presence() .setId(UUID.randomUUID().toString()) .setShow(Show.chat); connection.send(presence); presence = new Presence() .setId(UUID.randomUUID().toString()) .setFrom(connection.getUsername() + "@" + connection.getServiceName() + "/" + connection.getResource()) .setTo(rayoServer) .setShow(Show.chat); connection.send(presence); } private void broadcastUnavailability() throws XmppException { Presence presence = new Presence() .setId(UUID.randomUUID().toString()) .setFrom(connection.getUsername() + "@" + connection.getServiceName() + "/" + connection.getResource()) .setTo(rayoServer) .setType(Type.unavailable); connection.send(presence); presence = new Presence() .setId(UUID.randomUUID().toString()) .setType(Type.unavailable); connection.send(presence); } public void unavailable(String mixerName) throws XmppException { Presence presence = new Presence() .setId(UUID.randomUUID().toString()) .setFrom(connection.getUsername() + "@" + connection.getServiceName() + "/" + connection.getResource()) .setTo(mixerName + "@" +rayoServer) .setType(Type.unavailable); connection.send(presence); } public void available(String mixerName) throws XmppException { Presence presence = new Presence() .setId(UUID.randomUUID().toString()) .setFrom(connection.getUsername() + "@" + connection.getServiceName() + "/" + connection.getResource()) .setTo(mixerName + "@" + rayoServer) .setShow(Show.chat); connection.send(presence); } /** * Adds a callback class to listen for events on all the incoming stanzas. * * @param listener Stanza Callback. */ public void addStanzaListener(StanzaListener listener) { Lock lock = connectionLock.readLock(); lock.lock(); try { connection.addStanzaListener(listener); } finally { lock.unlock(); } } /** * Removes a stanza listener * * @param listener Stanza Callback to be removed */ public void removeStanzaListener(StanzaListener listener) { Lock lock = connectionLock.readLock(); lock.lock(); try { connection.removeStanzaListener(listener); } finally { lock.unlock(); } } /** * Adds a callback class to listen for authentication events. * * @param listener Callback. */ public void addAuthenticationListener(AuthenticationListener listener) { Lock lock = connectionLock.readLock(); lock.lock(); try { connection.addAuthenticationListener(listener); } finally { lock.unlock(); } } /** * Adds an XMPP filter * * @param filter Filter object to be added */ public void addFilter(XmppObjectFilter filter) { Lock lock = connectionLock.readLock(); lock.lock(); try { connection.addFilter(filter); } finally { lock.unlock(); } } /** * Removes an XMPP filter * * @param filter Filter object to be removed */ public void removeFilter(XmppObjectFilter filter) { Lock lock = connectionLock.readLock(); lock.lock(); try { connection.removeFilter(filter); } finally { lock.unlock(); } } /** * Disconnects this client connection from the Rayo server * */ public void disconnect() throws XmppException { Lock lock = connectionLock.writeLock(); lock.lock(); try { logger.info("Disconnecting Rayo client XMPP Connection"); if (connection.isConnected()) { broadcastUnavailability(); connection.disconnect(); } } finally { logger.info("Rayo Client XMPP Connection has been disconnected"); lock.unlock(); if (pingTimer != null) { pingTimer.cancel(); pingTimer = null; } } } /** * <p>Waits for an Offer Event. Shortcut method to wait for an incoming call.</p> * * @return OfferEvent Offer event that has been received * * @throws XmppException If there is any problem waiting for offer event */ public OfferEvent waitForOffer() throws XmppException { return waitForOffer(null); } /** * <p>Waits for an Offer Event. Shortcut method to wait for an incoming call.</p> * * @timeout Timeout * @return OfferEvent Offer event that has been received * * @throws XmppException If there is any problem waiting for offer event */ public OfferEvent waitForOffer(Integer timeout) throws XmppException { final StringBuilder callId = new StringBuilder(); RayoMessageListener tempListener = new RayoMessageListener("offer") { @Override @SuppressWarnings("rawtypes") public void messageReceived(Object object) { Stanza stanza = (Stanza)object; callId.append(stanza.getFrom().substring(0, stanza.getFrom().indexOf('@'))); } }; addStanzaListener(tempListener); try { OfferEvent stanza = waitFor("offer", OfferEvent.class, timeout); OfferEvent offer = new OfferEvent(callId.toString()); offer.setTo(stanza.getTo()); offer.setFrom(stanza.getFrom()); offer.setHeaders(stanza.getHeaders()); return offer; } finally { removeStanzaListener(tempListener); } } /** * <p>Waits for an Rayo message. This is a blocking call and therefore should be used carefully. * When invoked, the invoking thread will block until it receives the specified Rayo * message.</p> * * @param rayoMessage Rayo message that the invoking thread will be waiting for * * @return Object The first Rayo messaging received that matches the specified message name * * @throws XmppException If there is any problem waiting for the message */ public Object waitFor(String rayoMessage) throws XmppException { Lock lock = connectionLock.readLock(); lock.lock(); try { Extension extension = (Extension)connection.waitForExtension(rayoMessage); return extension.getObject(); } finally { lock.unlock(); } } /** * <p>Waits for an Rayo message. This is a blocking call and therefore should be used carefully. * When invoked, the invoking thread will block until it receives the specified Rayo * message.</p> * * @param rayoMessage Rayo message that the invoking thread will be waiting for * @param clazz Class to cast the returning object to * * @return T The first Rayo messaging received that matches the specified message name * * @throws XmppException If there is any problem waiting for the message */ @SuppressWarnings("unchecked") public <T> T waitFor(String rayoMessage, Class<T> clazz) throws XmppException { Lock lock = connectionLock.readLock(); lock.lock(); try { Extension extension = (Extension)connection.waitForExtension(rayoMessage); return (T)extension.getObject(); } finally { lock.unlock(); } } /** * <p>Waits for an Rayo message. This is a blocking call but uses a timeout to specify * the amount of time that the connection will wait until the specified message is received. * If no message is received during the specified timeout then a <code>null</code> object * will be returned.</p> * * @param rayoMessage Rayo message that the invoking thread will be waiting for * @param timeout Timeout that will be used when waiting for an incoming Rayo message * * @return Object The first Rayo messaging received that matches the specified message name * or <code>null</code> if no message is received during the specified timeout * * @throws XmppException If there is any problem waiting for the message */ public Object waitFor(String extensionName, int timeout) throws XmppException { Lock lock = connectionLock.readLock(); lock.lock(); try { Extension extension = (Extension)connection.waitForExtension(extensionName, timeout); return extension.getObject(); } finally { lock.unlock(); } } /** * <p>Waits for an Rayo message. This is a blocking call but uses a timeout to specify * the amount of time that the connection will wait until the specified message is received. * If no message is received during the specified timeout then a <code>null</code> object * will be returned.</p> * * @param rayoMessage Rayo message that the invoking thread will be waiting for * @param clazz Class to cast the returning object to * @param timeout Timeout that will be used when waiting for an incoming Rayo message * * @return Object The first Rayo messaging received that matches the specified message name * or <code>null</code> if no message is received during the specified timeout * * @throws XmppException If there is any problem waiting for the message */ @SuppressWarnings("unchecked") public <T> T waitFor(String extensionName, Class<T> clazz, Integer timeout) throws XmppException { Lock lock = connectionLock.readLock(); lock.lock(); try { Extension extension = (Extension)connection.waitForExtension(extensionName, timeout); return (T)extension.getObject(); } finally { lock.unlock(); } } /** * Answers the call with the id specified as a parameter. * * @param callId Id of the call that will be answered * * @return IQ Resulting IQ * * @throws XmppException If there is any issue while answering the call */ public IQ answer(String callId) throws XmppException { return answer(callId, new AnswerCommand()); } /** * Answers the call with the id specified as a parameter. * * @param callId Id of the call that will be answered * @param command Answer command * * @return IQ Resulting IQ * * @throws XmppException If there is any issue while answering the call */ public IQ answer(String callId, AnswerCommand command) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(command)); return sendIQ(iq); } /** * Accepts the call with the id specified as a parameter. * * @param callId Id of the call that will be accepted * @return IQ Resulting IQ * @throws XmppException If there is any issue while accepting the call */ public IQ accept(String callId) throws XmppException { return accept(callId, false); } /** * Accepts the call with the id specified as a parameter. * * @param callId Id of the call that will be accepted * @param earlyMedia Whether early media is supported or not * * @return IQ Resulting IQ * @throws XmppException If there is any issue while accepting the call */ public IQ accept(String callId, boolean earlyMedia) throws XmppException { AcceptCommand command = new AcceptCommand(); command.setEarlyMedia(earlyMedia); return accept(callId, command); } /** * Accepts the call with the id specified as a parameter. * * @param callId Id of the call that will be accepted * @param command Accept command * @return IQ Resulting IQ * @throws XmppException If there is any issue while accepting the call */ public IQ accept(String callId, AcceptCommand command) throws XmppException { return command(command, callId); } /** * Rejects the latest call that this connection has received from the Rayo server * * @param callId Id of the call * @return IQ Resulting IQ * * @throws XmppException If there is any issue while rejecting the call */ public IQ reject(String callId) throws XmppException { return reject(CallRejectReason.DECLINE, callId); } /** * Rejects a call id * * @param reject Reject command * @param callId Id of the call * @return IQ Resulting IQ * * @throws XmppException If there is any issue while rejecting the call */ public IQ reject(String callId, RejectCommand reject) throws XmppException { return command(reject, callId); } /** * Rejects the call with the id specified as a parameter. * * @param callId Id of the call that will be accepted * @return IQ Resulting IQ * * @throws XmppException If there is any issue while rejecting the call */ public IQ reject(CallRejectReason reason, String callId) throws XmppException { RejectCommand reject = new RejectCommand(callId, reason); return command(reject, callId); } public VerbRef outputSsml(String ssml, String callId) throws XmppException { return internalOutput(new Ssml(ssml), callId); } public VerbRef output(URI uri, String callId) throws XmppException { return internalOutput(new Ssml(String.format("<audio src=\"%s\"/>",uri.toString())), callId); } public VerbRef output(String text, String callId) throws XmppException { return internalOutput(new Ssml(text), callId); } /** * Sends a 'Say' command including some SSML text * * @param ssml SSML text * @param callId Id of the call to which the say command will be sent * * @return VerbRef VerbRef instance that allows to handle the say stream * * @throws XmppException If there is any issues while sending the say command */ public VerbRef saySsml(String ssml, String callId) throws XmppException { return internalSay(new Ssml(ssml), callId); } /** * Sends a 'Say' command to Rayo that will play the specified audio file * * @param audio URI to the audio file * @param callId Id of the call * * @return VerbRef VerbRef instance that allows to handle the say stream * * @throws XmppException If there is any issues while sending the say command * @throws URISyntaxException If the specified audio file is not a valid URI */ public VerbRef sayAudio(String audio, String callId) throws XmppException, URISyntaxException { return say(new URI(audio), callId); } /** * Sends a 'Say' command to Rayo that will play the specified audio file * * @param uri URI to an audio resource that will be played * @param callId Id of the call to which the say command will be sent * @return VerbRef VerbRef instance that allows to handle the say stream * * @throws XmppException If there is any issues while sending the say command */ public VerbRef say(URI uri, String callId) throws XmppException { return internalSay(new Ssml(String.format("<audio src=\"%s\"/>",uri.toString())), callId); } /** * Instructs Rayo to say the specified text on the call with the specified id * * @param text Text that we want to say * @param callId Id of the call to which the say command will be sent * @return VerbRef VerbRef instance that allows to handle the say stream * * @throws XmppException If there is any issues while sending the say command */ public VerbRef say(String text, String callId) throws XmppException { return internalSay(new Ssml(text), callId); } /** * Transfers a specific call to another destination * * @param to URI where the call will be transfered * @param callId Id of the call we want to transfer * @return IQ Resulting IQ * @throws XmppException If there is any issue while transfering the call */ public IQ transfer(URI to, String callId) throws XmppException { List<URI> list = new ArrayList<URI>(); list.add(to); return transfer(null, list, callId); } /** * Transfers a specific call to another destination * * @param to URI where the call will be transfered * @param callId Id of the call we want to transfer * @return IQ Resulting IQ * @throws XmppException If there is any issue while transfering the call * @throws URISyntaxException If an invalid URI is passed as a parameter */ public IQ transfer(String to, String callId) throws XmppException, URISyntaxException { return transfer(new URI(to), callId); } public IQ transfer(List<URI> to, String callId) throws XmppException { return transfer(null, to, callId); } /** * Transfers a call to another phone speaking some text before doing the transfer. * * @param text Text that will be prompted to the user * @param to URI of the call destination * @param callId Id of the call that we want to transfer * @return IQ Resulting IQ * @throws XmppException If there is any issue while transfering the call */ public IQ transfer(String text, List<URI> to, String callId) throws XmppException { Transfer transfer = new Transfer(); transfer.setTimeout(new Duration(20000)); transfer.setTerminator('#'); if (text != null) { Ssml ssml = new Ssml(text); transfer.setRingbackTone(ssml); } transfer.setTo(to); return transfer(transfer, callId); } /** * Transfers a call to another phone with the specified settings * * @param transfer Model object with all the transfer settings * @param callId Id of the call that we want to transfer * @return IQ Resulting IQ * * @throws XmppException If there is any issue while transfering the call */ public IQ transfer(Transfer transfer,String callId) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(transfer)); return sendIQ(iq); } public IQ hold(String callId) throws XmppException { HoldCommand hold = new HoldCommand(); return command(hold,callId); } public IQ unhold(String callId) throws XmppException { UnholdCommand unhold = new UnholdCommand(); return command(unhold,callId); } public IQ mute(String callId) throws XmppException { MuteCommand mute = new MuteCommand(); return command(mute,callId); } public IQ unmute(String callId) throws XmppException { UnmuteCommand unmute = new UnmuteCommand(); return command(unmute,callId); } /** * Calls a specific destination * * @param to URI to dial * * @throws XmppException If there is any issue while dialing */ public CallRef dial(URI to) throws XmppException { return dial(null, null, to); } /** * Sends a dial message to the specified Rayo/Gateway node * to dial a destination from the specified URI * * @param to URI that we want to dial * * @throws XmppException If there is any issue while transfering the call */ public CallRef dial(String destination, URI to) throws XmppException { return dial(destination, null, to); } /** * Sends a dial message to the connected node * to dial a destination from the specified URI * * @param from URI that is dialing * @param to URI that we want to dial * * @throws XmppException If there is any issue while transfering the call */ public CallRef dial(URI from, URI to) throws XmppException { return dial(null, from, to); } /** * Sends a dial message to the connected node * to dial a destination from the specified URI * * @param from URI that is dialing * @param to URI that we want to dial * @param headers Map with dial headers * * @throws XmppException If there is any issue while transfering the call */ public CallRef dial(URI from, URI to, Map<String, String> headers) throws XmppException { return dial(null, from, to, headers); } /** * Sends a dial message to a specific rayo node or gateway * to dial a destination from the specified URI * * @param String Rayo/Gateway node * @param from URI that is dialing * @param to URI that we want to dial * @param headers Map with dial headers * * @throws XmppException If there is any issue while transfering the call */ public CallRef dial(String destination, URI from, URI to) throws XmppException { return dial(destination, from, to, new HashMap<String, String>()); } /** * Sends a dial message to a specific rayo node or gateway * to dial a destination from the specified URI * * @param String Rayo/Gateway node * @param from URI that is dialing * @param to URI that we want to dial * @param headers Map with dial headers * * @throws XmppException If there is any issue while transfering the call */ public CallRef dial(String destination, URI from, URI to, Map<String, String> headers) throws XmppException { DialCommand dial = new DialCommand(); dial.setTo(to); dial.setHeaders(headers); if (from == null) { try { from = new URI(String.format("sip:%s:5060",InetAddress.getLocalHost().getHostAddress())); } catch (Exception e) { e.printStackTrace(); } } dial.setFrom(from); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(rayoServer) .setChild(Extension.create(dial)); CallRef ref = getCallRef(sendAndGetRef(null, iq)); if (ref == null) { throw new DialTimeoutException(); } // dials return a call id on refs, so different than other components //ref.setCallId(ref.getVerbId()); return ref; } private IQ sendAndGetRef(String callId, IQ iq) throws XmppException { Lock lock = connectionLock.readLock(); lock.lock(); try { IQ result = ((IQ)connection.sendAndWait(iq)); if (result != null) { if (result.hasChild("error")) { throw new XmppException(result.getError()); } return result; } else { return null; } } finally { lock.unlock(); } } private CallRef getCallRef(IQ iq) { return (CallRef)iq.getExtension().getObject(); } private VerbRef getVerbRef(IQ iq) { CallRef callRef = (CallRef)iq.getExtension().getObject(); return new VerbRef(iq.getFromJid().getNode(), callRef.getCallId()); } /** * Instructs Rayo to ask a question with the specified choices on the call with the given id * * @param text Text that will be prompted * @param choicesText Choices * @param callId Id of the call in which the question will be asked * @return IQ Resulting IQ * @throws XmppException If there is any issue while asking the question */ public IQ ask(String text, String choicesText, String callId) throws XmppException { Ask ask = new Ask(); Ssml ssml = new Ssml(text); ask.setPrompt(ssml); List<Choices> list = new ArrayList<Choices>(); Choices choices = new Choices(); choices.setContent(choicesText); choices.setContentType("application/grammar+voxeo"); list.add(choices); ask.setChoices(list); ask.setTerminator('#'); ask.setMode(InputMode.DTMF); ask.setTimeout(new Duration(650000)); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(ask)); return sendIQ(iq); } public VerbRef input(String simpleGrammar, String callId) throws XmppException { Input input = new Input(); List<Choices> choices = new ArrayList<Choices>(); Choices choice = new Choices(); choice.setContent(simpleGrammar); choice.setContentType("application/grammar+voxeo"); choices.add(choice); input.setGrammars(choices); return input(input, callId); } public VerbRef input(Input input, String callId, Grammar... grammars) throws XmppException { List<Choices> choices = new ArrayList<Choices>(); for (Grammar grammar: grammars) { Choices choice = new Choices(); choice.setContent(grammar.content); choice.setContentType(grammar.type); choices.add(choice); } input.setGrammars(choices); return input(input, callId); } public VerbRef input(Input input, String callId) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(input)); return getVerbRef(sendAndGetRef(callId, iq)); } private VerbRef internalSay(Ssml item, String callId) throws XmppException { Say say = new Say(); say.setPrompt(item); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(say)); return getVerbRef(sendAndGetRef(callId, iq)); } private VerbRef internalOutput(Ssml item, String callId) throws XmppException { Output output = new Output(); output.setPrompt(item); return output(output, callId); } public VerbRef output(Output output, String callId) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(output)); return getVerbRef(sendAndGetRef(callId, iq)); } /** * Pauses a verb component * * @param ref Verb component that we want to pause * @return IQ Resulting IQ */ public IQ pause(VerbRef ref) throws XmppException { ClientPauseCommand pause = new ClientPauseCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(pause)); return sendIQ(iq); } /** * Resumes a verb component * * @param ref Verb component that we want to resume * @return IQ Resulting IQ */ public IQ resume(VerbRef ref) throws XmppException { ClientResumeCommand resume = new ClientResumeCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(resume)); return sendIQ(iq); } /** * Speeds up * * @param ref Verb component that we want to speed up * @return IQ Resulting IQ */ public IQ speedUp(VerbRef ref) throws XmppException { SpeedUpCommand speedup = new SpeedUpCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(speedup)); return sendIQ(iq); } /** * Speeds down * * @param ref Verb component that we want to speed up * @return IQ Resulting IQ */ public IQ speedDown(VerbRef ref) throws XmppException { SpeedDownCommand speedDown = new SpeedDownCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(speedDown)); return sendIQ(iq); } /** * Turn volume up * * @param ref Verb component that we want to turn volume up * @return IQ Resulting IQ */ public IQ volumeUp(VerbRef ref) throws XmppException { VolumeUpCommand volumeUp = new VolumeUpCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(volumeUp)); return sendIQ(iq); } /** * Turn volume down * * @param ref Verb component that we want to turn volume down * @return IQ Resulting IQ */ public IQ volumeDown(VerbRef ref) throws XmppException { VolumeDownCommand volumeDown = new VolumeDownCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(volumeDown)); return sendIQ(iq); } /** * Pauses a records component * * @param ref Verb component that we want to pause */ public IQ pauseRecord(VerbRef ref) throws XmppException { RecordPauseCommand pause = new RecordPauseCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(pause)); return sendIQ(iq); } /** * Resumes a record component * * @param ref Verb component that we want to resume */ public IQ resumeRecord(VerbRef ref) throws XmppException { RecordResumeCommand resume = new RecordResumeCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(resume)); return sendIQ(iq); } /** * Performs a seek operation on the given verb * * @param ref Verb component that we want to resume * @param command Seek command to execute */ public IQ seek(VerbRef ref, SeekCommand command) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(command)); return sendIQ(iq); } /** * Stops a verb component * * @param ref Verb component that we want to stop */ public IQ stop(VerbRef ref) throws XmppException { StopCommand stop = new StopCommand(); IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(ref.getCallId(),ref.getVerbId())) .setChild(Extension.create(stop)); return sendIQ(iq); } public VerbRef record(String callId) throws XmppException { return record(new Record(), callId); } public VerbRef record(Record record, String callId) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(record)); return getVerbRef(sendAndGetRef(callId, iq)); } /** * Hangs up the specified call id * * @param callId Id of the call to be hung up * @return IQ Resulting IQ */ public IQ hangup(String callId) throws XmppException { return hangup(callId, new HangupCommand(null)); } /** * Hangs up the specified call id * * @param command Hangup command * @return IQ Resulting IQ */ public IQ hangup(String callId, HangupCommand command) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(command)); return sendIQ(iq); } public IQ unjoin(String from, JoinDestinationType type, String callId) throws XmppException { UnjoinCommand unjoin = new UnjoinCommand(); unjoin.setFrom(from); unjoin.setType(type); return command(unjoin,callId); } public IQ join(String to, String media, String direction, JoinDestinationType type, String callId) throws XmppException { JoinCommand join = new JoinCommand(); join.setTo(to); if (direction != null) { join.setDirection(Joinable.Direction.valueOf(direction.toUpperCase())); } else { join.setDirection(null); } if (media != null) { join.setMedia(JoinType.valueOf(media.toUpperCase())); } else { join.setMedia(null); } join.setType(type); return command(join,callId); } public IQ join(JoinCommand join, String callId) throws XmppException { return command(join,callId); } public IQ dtmf(String tones, String callId) throws XmppException { DtmfCommand dtmf = new DtmfCommand(tones); return command(dtmf, callId); } public IQ command(CallCommand command, String callId) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(buildTo(callId)) .setChild(Extension.create(command)); return sendIQ(iq); } public CallRef dial(DialCommand command) throws XmppException { IQ iq = new IQ(IQ.Type.set) .setFrom(buildFrom()) .setTo(rayoServer) .setChild(Extension.create(command)); CallRef ref = getCallRef(sendAndGetRef(null, iq)); if (ref == null) { throw new DialTimeoutException(); } // dials return a call id on refs, so different than other components //ref.setCallId(ref.getVerbId()); return ref; } /** * Redirects an existing call to the given URI * * @param uri URI for redirecting the call to * @param callId Id of the call to redirect */ public IQ redirect(URI uri, String callId) throws XmppException { RedirectCommand redirect = new RedirectCommand(); redirect.setTo(uri); return redirect(redirect, callId); } /** * Redirects an existing call * * @param command Redirect command * @param callId Id of the call to redirect * @return IQ Resulting IQ */ public IQ redirect(RedirectCommand command, String callId) throws XmppException { return command(command, callId); } protected IQ sendIQ(IQ iq) throws XmppException { Lock lock = connectionLock.readLock(); lock.lock(); try { return (IQ)connection.sendAndWait(iq); } finally { lock.unlock(); } } private String buildFrom() { return connection.getUsername() + "@" + connection.getServiceName() + "/" + connection.getResource(); } private String buildTo(String callId) { return buildTo(callId, null); } private String buildTo(String callId, String resourceId) { String to = callId + "@" + rayoServer; if (resourceId != null) { to = to + "/" + resourceId; } return to; } public XmppConnection getXmppConnection() { return connection; } private void ping() { Lock lock = connectionLock.readLock(); lock.lock(); try { if (connection.isConnected()) { IQ ping = new IQ(IQ.Type.get) .setFrom(buildFrom()) .setTo(rayoServer) .setChild(new Ping()); try { connection.send(ping); } catch (XmppException e) { e.printStackTrace(); } } } finally { lock.unlock(); } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.ide.ui; import com.fasterxml.jackson.jr.ob.JSON; import com.intellij.ide.plugins.cl.PluginAwareClassLoader; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.ImageDataByPathLoader; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.IconPathPatcher; import com.intellij.openapi.util.SystemInfoRt; import com.intellij.ui.ColorHexUtil; import com.intellij.ui.Gray; import com.intellij.util.ObjectUtils; import com.intellij.util.SVGLoader; import com.intellij.util.io.DigestUtil; import com.intellij.util.ui.JBDimension; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import javax.swing.plaf.BorderUIResource; import javax.swing.plaf.ColorUIResource; import java.awt.*; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.security.MessageDigest; import java.util.*; import java.util.function.Function; /** * @author Konstantin Bulenkov */ public final class UITheme { public static final String FILE_EXT_ENDING = ".theme.json"; private static final Logger LOG = Logger.getInstance(UITheme.class); private String name; private boolean dark; private String author; private String id; private String editorScheme; private Map<String, Object> ui; private @Nullable Map<String, Object> icons; private IconPathPatcher patcher; private Map<String, Object> background; private Map<String, Object> emptyFrameBackground; private @Nullable Map<String, Object> colors; private @Nullable Map<String, Object> iconColorsOnSelection; private ClassLoader providerClassLoader = getClass().getClassLoader(); private String editorSchemeName; private SVGLoader.SvgElementColorPatcherProvider colorPatcher; private SVGLoader.SvgElementColorPatcherProvider selectionColorPatcher; private static final String OS_MACOS_KEY = "os.mac"; private static final String OS_WINDOWS_KEY = "os.windows"; private static final String OS_LINUX_KEY = "os.linux"; private static final String OS_DEFAULT_KEY = "os.default"; private UITheme() { } public String getName() { return name; } public boolean isDark() { return dark; } public String getAuthor() { return author; } public URL getResource(String path) { if (isTempTheme()) { File file = new File(path); if (file.exists()) { try { return file.toURI().toURL(); } catch (MalformedURLException e) { LOG.warn(e); } } } return providerClassLoader.getResource(path); } public @Nullable InputStream getResourceAsStream(String path) { if (isTempTheme()) { Path file = Path.of(path); if (Files.exists(file)) { try { return Files.newInputStream(file); } catch (IOException e) { LOG.error(e); } } } return providerClassLoader.getResourceAsStream(path); } private boolean isTempTheme() { return "Temp theme".equals(id); } // it caches classes - must be not extracted to util class // .disable(JSON.Feature.PRESERVE_FIELD_ORDERING) - cannot be disabled, for unknown reason order is important // for example, button label font color for light theme is not white, but black private static final JSON JSON_READER = JSON.builder() .enable(JSON.Feature.READ_ONLY) .build(); public static @NotNull UITheme loadFromJson(@NotNull InputStream stream, @NotNull @NonNls String themeId, @Nullable ClassLoader provider, @NotNull Function<? super String, String> iconsMapper) throws IOException { UITheme theme = JSON_READER.beanFrom(UITheme.class, stream); theme.id = themeId; return loadFromJson(theme, provider, iconsMapper); } public static @NotNull UITheme loadFromJson(byte[] data, @NotNull @NonNls String themeId, @Nullable ClassLoader provider, @NotNull Function<? super String, String> iconsMapper) throws IOException { UITheme theme = JSON_READER.beanFrom(UITheme.class, data); theme.id = themeId; return loadFromJson(theme, provider, iconsMapper); } private static @NotNull UITheme loadFromJson(@NotNull UITheme theme, @Nullable ClassLoader provider, @NotNull Function<? super String, String> iconsMapper) throws IllegalStateException { if (provider != null) { theme.providerClassLoader = provider; } initializeNamedColors(theme); if (theme.iconColorsOnSelection != null && !theme.iconColorsOnSelection.isEmpty()) { Map<String, String> colors = new HashMap<>(theme.iconColorsOnSelection.size()); for (Map.Entry<String, Object> entry : theme.iconColorsOnSelection.entrySet()) { colors.put(entry.getKey(), entry.getValue().toString()); } Map<String, Integer> alpha = new HashMap<>(colors.size()); colors.forEach((key, value) -> alpha.put(value, 255)); theme.selectionColorPatcher = new SVGLoader.SvgElementColorPatcherProvider() { @Override public SVGLoader.@Nullable SvgElementColorPatcher forPath(@Nullable String path) { return SVGLoader.newPatcher(null, colors, alpha); } }; } if (theme.icons != null && !theme.icons.isEmpty()) { theme.patcher = new IconPathPatcher() { @Override public @Nullable String patchPath(@NotNull String path, @Nullable ClassLoader classLoader) { if (classLoader instanceof PluginAwareClassLoader) { String pluginId = ((PluginAwareClassLoader)classLoader).getPluginId().getIdString(); Object icons = theme.icons.get(pluginId); if (icons instanceof Map) { @SuppressWarnings("unchecked") Object pluginIconPath = ((Map<String, Object>)icons).get(path); if (pluginIconPath instanceof String) { return iconsMapper.apply((String)pluginIconPath); } } } Object value = theme.icons.get(path); if (value == null && path.charAt(0) != '/') { value = theme.icons.get('/' + path); } return value instanceof String ? iconsMapper.apply((String)value) : null; } @Override public @Nullable ClassLoader getContextClassLoader(@NotNull String path, @Nullable ClassLoader originalClassLoader) { return theme.providerClassLoader; } }; Object palette = theme.icons.get("ColorPalette"); if (palette instanceof Map) { @SuppressWarnings("rawtypes") Map colors = (Map)palette; PaletteScopeManager paletteScopeManager = new PaletteScopeManager(); for (Object o : colors.keySet()) { String colorKey = o.toString(); PaletteScope scope = paletteScopeManager.getScope(colorKey); if (scope == null) { continue; } String key = toColorString(colorKey, theme.isDark()); Object v = colors.get(colorKey); if (v instanceof String) { String value = (String)v; Object namedColor = theme.colors != null ? theme.colors.get(value) : null; if (namedColor instanceof String) { value = (String)namedColor; } String alpha = null; if (value.length() == 9) { alpha = value.substring(7); value = value.substring(0, 7); } if (ColorHexUtil.fromHex(key, null) != null && ColorHexUtil.fromHex(value, null) != null) { scope.newPalette.put(key, value); int fillTransparency = -1; if (alpha != null) { try { fillTransparency = Integer.parseInt(alpha, 16); } catch (Exception ignore) { } } if (fillTransparency != -1) { scope.alphas.put(value, fillTransparency); } } } } theme.colorPatcher = new SVGLoader.SvgElementColorPatcherProvider() { @Override public @Nullable SVGLoader.SvgElementColorPatcher forPath(@Nullable String path) { PaletteScope scope = paletteScopeManager.getScopeByPath(path); return scope == null ? null : SVGLoader.newPatcher(scope.digest(), scope.newPalette, scope.alphas); } }; } } return theme; } private static void initializeNamedColors(UITheme theme) { Map<String, Object> map = theme.colors; if (map == null) { return; } Set<String> namedColors = map.keySet(); for (String key : namedColors) { Object value = map.get(key); if (value instanceof String && !((String)value).startsWith("#")) { map.put(key, ObjectUtils.notNull(map.get(map.get(key)), Gray.TRANSPARENT)); } } if (theme.iconColorsOnSelection != null) { HashSet<Map.Entry<String, Object>> entries = new HashSet<>(theme.iconColorsOnSelection.entrySet()); theme.iconColorsOnSelection.clear(); for (Map.Entry<String, Object> entry : entries) { Object key = entry.getKey(); Object value = entry.getValue(); if (!key.toString().startsWith("#")) key = map.get(key); if (!value.toString().startsWith("#")) value = map.get(value); if (key.toString().startsWith("#") & value.toString().startsWith("#")) { theme.iconColorsOnSelection.put(key.toString(), value); } } } } private static String toColorString(@NotNull String key, boolean darkTheme) { if (darkTheme && colorPalette.get(key + ".Dark") != null) { key += ".Dark"; } String color = colorPalette.get(key); return color == null ? key.toLowerCase(Locale.ENGLISH) : color.toLowerCase(Locale.ENGLISH); } @TestOnly public static Map<String, String> getColorPalette() { return Collections.unmodifiableMap(colorPalette); } private static final @NonNls Map<String, String> colorPalette; static { colorPalette = Map.ofEntries( Map.entry("Actions.Red", "#DB5860"), Map.entry("Actions.Red.Dark", "#C75450"), Map.entry("Actions.Yellow", "#EDA200"), Map.entry("Actions.Yellow.Dark", "#F0A732"), Map.entry("Actions.Green", "#59A869"), Map.entry("Actions.Green.Dark", "#499C54"), Map.entry("Actions.Blue", "#389FD6"), Map.entry("Actions.Blue.Dark", "#3592C4"), Map.entry("Actions.Grey", "#6E6E6E"), Map.entry("Actions.Grey.Dark", "#AFB1B3"), Map.entry("Actions.GreyInline", "#7F8B91"), Map.entry("Actions.GreyInline.Dark", "#7F8B91"), Map.entry("Objects.Grey", "#9AA7B0"), Map.entry("Objects.Blue", "#40B6E0"), Map.entry("Objects.Green", "#62B543"), Map.entry("Objects.Yellow", "#F4AF3D"), Map.entry("Objects.YellowDark", "#D9A343"), Map.entry("Objects.Purple", "#B99BF8"), Map.entry("Objects.Pink", "#F98B9E"), Map.entry("Objects.Red", "#F26522"), Map.entry("Objects.RedStatus", "#E05555"), Map.entry("Objects.GreenAndroid", "#3DDC84"), Map.entry("Objects.BlackText", "#231F20"), Map.entry("Checkbox.Background.Default", "#FFFFFF"), Map.entry("Checkbox.Background.Default.Dark", "#43494A"), Map.entry("Checkbox.Background.Disabled", "#F2F2F2"), Map.entry("Checkbox.Background.Disabled.Dark", "#3C3F41"), Map.entry("Checkbox.Border.Default", "#b0b0b0"), Map.entry("Checkbox.Border.Default.Dark", "#6B6B6B"), Map.entry("Checkbox.Border.Disabled", "#BDBDBD"), Map.entry("Checkbox.Border.Disabled.Dark", "#545556"), Map.entry("Checkbox.Focus.Thin.Default", "#7B9FC7"), Map.entry("Checkbox.Focus.Thin.Default.Dark", "#466D94"), Map.entry("Checkbox.Focus.Wide", "#97C3F3"), Map.entry("Checkbox.Focus.Wide.Dark", "#3D6185"), Map.entry("Checkbox.Foreground.Disabled", "#ABABAB"), Map.entry("Checkbox.Foreground.Disabled.Dark", "#606060"), Map.entry("Checkbox.Background.Selected", "#4F9EE3"), Map.entry("Checkbox.Background.Selected.Dark", "#43494A"), Map.entry("Checkbox.Border.Selected", "#4B97D9"), Map.entry("Checkbox.Border.Selected.Dark", "#6B6B6B"), Map.entry("Checkbox.Foreground.Selected", "#FEFEFE"), Map.entry("Checkbox.Foreground.Selected.Dark", "#A7A7A7"), Map.entry("Checkbox.Focus.Thin.Selected", "#ACCFF7"), Map.entry("Checkbox.Focus.Thin.Selected.Dark", "#466D94"), Map.entry("Tree.iconColor", "#808080"), Map.entry("Tree.iconColor.Dark", "#AFB1B3") ); } public @NonNls String getId() { return id; } public @Nullable String getEditorScheme() { return editorScheme; } public Map<String, Object> getBackground() { return background; } public Map<String, Object> getEmptyFrameBackground() { return emptyFrameBackground; } public void applyProperties(@NotNull UIDefaults defaults) { if (ui == null) { return; } loadColorPalette(defaults); for (Map.Entry<String, Object> entry : ui.entrySet()) { apply(this, entry.getKey(), entry.getValue(), defaults); } } private void loadColorPalette(@NotNull UIDefaults defaults) { if (colors != null) { for (Map.Entry<String, Object> entry : colors.entrySet()) { Object value = entry.getValue(); if (value instanceof String) { Color color = parseColor((String)value); if (color != null) { defaults.put("ColorPalette." + entry.getKey(), color); } } } } } public IconPathPatcher getPatcher() { return patcher; } public SVGLoader.SvgElementColorPatcherProvider getColorPatcher() { return colorPatcher; } public SVGLoader.SvgElementColorPatcherProvider getSelectionColorPatcher() { return selectionColorPatcher; } public @NotNull ClassLoader getProviderClassLoader() { return providerClassLoader; } private static void apply(@NotNull UITheme theme, String key, Object value, UIDefaults defaults) { if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>)value; if (isOSCustomization(map)) { applyOSCustomizations(theme, map, key, defaults); } else { for (Map.Entry<String, Object> o : map.entrySet()) { apply(theme, createUIKey(key, o.getKey()), o.getValue(), defaults); } } } else { String valueStr = value.toString(); Color color = null; if (theme.colors != null) { Object obj = theme.colors.get(valueStr); if (obj != null) { color = parseColor(obj.toString()); if (color != null && !key.startsWith("*")) { defaults.put(key, color); return; } } } value = color == null ? parseValue(key, valueStr) : color; if (key.startsWith("*.")) { String tail = key.substring(1); addPattern(key, value, defaults); for (Object k : defaults.keySet().toArray()) { if (k instanceof String && ((String)k).endsWith(tail)) { defaults.put(k, value); } } } else { defaults.put(key, value); } } } private static @NotNull String createUIKey(String key, String propertyName) { if ("UI".equals(propertyName)) { return key + propertyName; } else { return key + "." + propertyName; } } private static void applyOSCustomizations(@NotNull UITheme theme, Map<String, Object> map, String key, UIDefaults defaults) { String osKey = SystemInfoRt.isWindows ? OS_WINDOWS_KEY : SystemInfoRt.isMac ? OS_MACOS_KEY : SystemInfoRt.isLinux ? OS_LINUX_KEY : null; if (osKey != null && map.containsKey(osKey)) { apply(theme, key, map.get(osKey), defaults); } else if (map.containsKey(OS_DEFAULT_KEY)) { apply(theme, key, map.get(OS_DEFAULT_KEY), defaults); } } private static boolean isOSCustomization(Map<String, Object> map) { return map.containsKey(OS_MACOS_KEY) || map.containsKey(OS_WINDOWS_KEY) || map.containsKey(OS_LINUX_KEY) || map.containsKey(OS_DEFAULT_KEY); } @SuppressWarnings("unchecked") private static void addPattern(String key, Object value, UIDefaults defaults) { Object o = defaults.get("*"); if (!(o instanceof Map)) { o = new HashMap<String, Object>(); defaults.put("*", o); } @SuppressWarnings("rawtypes") Map map = (Map<?, ?>)o; if (key != null && key.startsWith("*.")) { map.put(key.substring(2), value); } } public static Object parseValue(String key, @NotNull String value, @NotNull ClassLoader classLoader) { switch (value) { case "null": return null; case "true": return Boolean.TRUE; case "false": return Boolean.FALSE; } if (value.endsWith(".png") || value.endsWith(".svg")) { Icon icon = ImageDataByPathLoader.findIconFromThemePath(value, classLoader); if (icon != null) { return icon; } } if (key.endsWith("Insets") || key.endsWith("padding")) { return parseInsets(value); } else if (key.endsWith("Border") || key.endsWith("border")) { try { String[] ints = value.split(","); if (ints.length == 4) { return new BorderUIResource.EmptyBorderUIResource(parseInsets(value)); } else if (ints.length == 5) { return JBUI.asUIResource(JBUI.Borders.customLine(ColorHexUtil.fromHex(ints[4]), Integer.parseInt(ints[0]), Integer.parseInt(ints[1]), Integer.parseInt(ints[2]), Integer.parseInt(ints[3]))); } Color color = ColorHexUtil.fromHexOrNull(value); if (color == null) { Class<?> aClass = classLoader.loadClass(value); Constructor<?> constructor = aClass.getDeclaredConstructor(); constructor.setAccessible(true); return constructor.newInstance(); } else { return JBUI.asUIResource(JBUI.Borders.customLine(color, 1)); } } catch (Exception e) { LOG.warn(e); } } else if (key.endsWith("Size")) { return parseSize(value); } else if (key.endsWith("Width") || key.endsWith("Height")) { return getInteger(value, key); } else if (key.endsWith("grayFilter")) { return parseGrayFilter(value); } else if (value.startsWith("AllIcons.")) { return IconLoader.getReflectiveIcon(value, UITheme.class.getClassLoader()); } else { Color color = parseColor(value); if (color != null) { return new ColorUIResource(color); } Integer intVal = getInteger(value, null); if (intVal != null) { return intVal; } } return value; } public static Object parseValue(String key, @NotNull String value) { ClassLoader classLoader = UIManager.getLookAndFeel().getClass().getClassLoader(); return parseValue(key, value, classLoader); } private static Insets parseInsets(@NotNull String value) { String[] numbers = value.split(","); return new JBInsets(Integer.parseInt(numbers[0]), Integer.parseInt(numbers[1]), Integer.parseInt(numbers[2]), Integer.parseInt(numbers[3])) .asUIResource(); } private static UIUtil.GrayFilter parseGrayFilter(String value) { String[] numbers = value.split(","); return new UIUtil.GrayFilter(Integer.parseInt(numbers[0]), Integer.parseInt(numbers[1]), Integer.parseInt(numbers[2])) .asUIResource(); } @SuppressWarnings("UseJBColor") private static @Nullable Color parseColor(String value) { if (value != null) { //noinspection SSBasedInspection if (value.startsWith("#")) { value = value.substring(1); } if (value.length() == 8) { Color color = ColorHexUtil.fromHex(value.substring(0, 6)); try { int alpha = Integer.parseInt(value.substring(6, 8), 16); return new ColorUIResource(new Color(color.getRed(), color.getGreen(), color.getBlue(), alpha)); } catch (Exception ignore) { } return null; } } Color color = ColorHexUtil.fromHex(value, null); return color == null ? null : new ColorUIResource(color); } private static Integer getInteger(String value, @Nullable String key) { try { //noinspection SSBasedInspection if (value.endsWith(".0")) { value = value.substring(0, value.length() - ".0".length()); } return Integer.parseInt(value); } catch (NumberFormatException e) { if (key != null) { LOG.warn(key + " = " + value); } return null; } } private static Dimension parseSize(@NotNull String value) { String[] numbers = value.split(","); return new JBDimension(Integer.parseInt(numbers[0]), Integer.parseInt(numbers[1])).asUIResource(); } public String getEditorSchemeName() { return editorSchemeName; } public void setEditorSchemeName(String editorSchemeName) { this.editorSchemeName = editorSchemeName; } static final class PaletteScope { final Map<String, String> newPalette = new HashMap<>(); final Map<String, Integer> alphas = new HashMap<>(); private byte[] hash; byte @NotNull [] digest() { if (hash != null) { return hash; } MessageDigest hasher = DigestUtil.sha512(); // order is significant if (!newPalette.isEmpty()) { for (Map.Entry<String, String> e : new TreeMap<>(newPalette).entrySet()) { hasher.update(e.getKey().getBytes(StandardCharsets.UTF_8)); hasher.update(e.getValue().getBytes(StandardCharsets.UTF_8)); } } if (!alphas.isEmpty()) { // order is significant for (Map.Entry<String, Integer> e : new TreeMap<>(alphas).entrySet()) { hasher.update(e.getKey().getBytes(StandardCharsets.UTF_8)); Integer value = e.getValue(); if (value != null) { int i = value.intValue(); hasher.update((byte)i); hasher.update((byte)(i >>> 8)); hasher.update((byte)(i >>> 16)); hasher.update((byte)(i >>> 24)); } } } hash = hasher.digest(); return hash; } } static final class PaletteScopeManager { final PaletteScope ui = new PaletteScope(); final PaletteScope checkBoxes = new PaletteScope(); final PaletteScope radioButtons = new PaletteScope(); final PaletteScope trees = new PaletteScope(); PaletteScope getScope(String colorKey) { if (colorKey.startsWith("Checkbox.")) return checkBoxes; if (colorKey.startsWith("Radio.")) return radioButtons; if (colorKey.startsWith("Tree.iconColor")) return trees; if (colorKey.startsWith("Objects.")) return ui; if (colorKey.startsWith("Actions.")) return ui; if (colorKey.startsWith("#")) return ui; LOG.warn("No color scope defined for key: " + colorKey); return null; } @Nullable PaletteScope getScopeByPath(@Nullable String path) { if (path != null && path.contains("/com/intellij/ide/ui/laf/icons/")) { String file = path.substring(path.lastIndexOf('/') + 1); if (file.equals("treeCollapsed.svg") || file.equals("treeExpanded.svg")) return trees; if (file.startsWith("check")) return checkBoxes; if (file.startsWith("radio")) return checkBoxes; //same set of colors as for checkboxes return null; } return ui; } } //<editor-fold desc="JSON deserialization methods"> @SuppressWarnings("unused") private void setName(String name) { this.name = name; } @SuppressWarnings("unused") private void setDark(boolean dark) { this.dark = dark; } @SuppressWarnings("unused") private void setAuthor(String author) { this.author = author; } @SuppressWarnings("unused") private void setUi(Map<String, Object> ui) { this.ui = ui; } @SuppressWarnings("unused") private void setIcons(@Nullable Map<String, Object> icons) { this.icons = icons; } @SuppressWarnings("unused") public void setEditorScheme(String editorScheme) { this.editorScheme = editorScheme; } public void setBackground(Map<String, Object> background) { this.background = background; } public void setIconColorsOnSelection(@Nullable Map<String, Object> iconColorsOnSelection) { this.iconColorsOnSelection = iconColorsOnSelection; } public void setEmptyFrameBackground(Map<String, Object> emptyFrameBackground) { this.emptyFrameBackground = emptyFrameBackground; } public @Nullable Map<String, Object> getColors() { return colors; } public void setColors(@Nullable Map<String, Object> colors) { this.colors = colors; } //</editor-fold> }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2019.01.11 at 02:39:34 PM EST // package schemas.docbook; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docbook.org/ns/docbook}inlinemediaobject"/> * &lt;element ref="{http://docbook.org/ns/docbook}remark"/> * &lt;element ref="{http://docbook.org/ns/docbook}superscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}subscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}xref"/> * &lt;element ref="{http://docbook.org/ns/docbook}link"/> * &lt;element ref="{http://docbook.org/ns/docbook}olink"/> * &lt;element ref="{http://docbook.org/ns/docbook}anchor"/> * &lt;element ref="{http://docbook.org/ns/docbook}biblioref"/> * &lt;element ref="{http://docbook.org/ns/docbook}alt"/> * &lt;element ref="{http://docbook.org/ns/docbook}annotation"/> * &lt;element ref="{http://docbook.org/ns/docbook}indexterm"/> * &lt;element ref="{http://docbook.org/ns/docbook}phrase"/> * &lt;element ref="{http://docbook.org/ns/docbook}replaceable"/> * &lt;/choice> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.attributes"/> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.linking.attributes"/> * &lt;attribute name="role" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "contractnum") public class Contractnum { @XmlElementRefs({ @XmlElementRef(name = "link", namespace = "http://docbook.org/ns/docbook", type = Link.class, required = false), @XmlElementRef(name = "phrase", namespace = "http://docbook.org/ns/docbook", type = Phrase.class, required = false), @XmlElementRef(name = "anchor", namespace = "http://docbook.org/ns/docbook", type = Anchor.class, required = false), @XmlElementRef(name = "alt", namespace = "http://docbook.org/ns/docbook", type = Alt.class, required = false), @XmlElementRef(name = "indexterm", namespace = "http://docbook.org/ns/docbook", type = Indexterm.class, required = false), @XmlElementRef(name = "remark", namespace = "http://docbook.org/ns/docbook", type = Remark.class, required = false), @XmlElementRef(name = "xref", namespace = "http://docbook.org/ns/docbook", type = Xref.class, required = false), @XmlElementRef(name = "inlinemediaobject", namespace = "http://docbook.org/ns/docbook", type = Inlinemediaobject.class, required = false), @XmlElementRef(name = "olink", namespace = "http://docbook.org/ns/docbook", type = Olink.class, required = false), @XmlElementRef(name = "biblioref", namespace = "http://docbook.org/ns/docbook", type = Biblioref.class, required = false), @XmlElementRef(name = "annotation", namespace = "http://docbook.org/ns/docbook", type = Annotation.class, required = false), @XmlElementRef(name = "replaceable", namespace = "http://docbook.org/ns/docbook", type = Replaceable.class, required = false), @XmlElementRef(name = "superscript", namespace = "http://docbook.org/ns/docbook", type = Superscript.class, required = false), @XmlElementRef(name = "subscript", namespace = "http://docbook.org/ns/docbook", type = Subscript.class, required = false) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "role") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(name = "id", namespace = "http://www.w3.org/XML/1998/namespace") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "version") @XmlSchemaType(name = "anySimpleType") protected String commonVersion; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String xmlLang; @XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String base; @XmlAttribute(name = "remap") @XmlSchemaType(name = "anySimpleType") protected String remap; @XmlAttribute(name = "xreflabel") @XmlSchemaType(name = "anySimpleType") protected String xreflabel; @XmlAttribute(name = "revisionflag") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String revisionflag; @XmlAttribute(name = "dir") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute(name = "arch") @XmlSchemaType(name = "anySimpleType") protected String arch; @XmlAttribute(name = "audience") @XmlSchemaType(name = "anySimpleType") protected String audience; @XmlAttribute(name = "condition") @XmlSchemaType(name = "anySimpleType") protected String condition; @XmlAttribute(name = "conformance") @XmlSchemaType(name = "anySimpleType") protected String conformance; @XmlAttribute(name = "os") @XmlSchemaType(name = "anySimpleType") protected String os; @XmlAttribute(name = "revision") @XmlSchemaType(name = "anySimpleType") protected String commonRevision; @XmlAttribute(name = "security") @XmlSchemaType(name = "anySimpleType") protected String security; @XmlAttribute(name = "userlevel") @XmlSchemaType(name = "anySimpleType") protected String userlevel; @XmlAttribute(name = "vendor") @XmlSchemaType(name = "anySimpleType") protected String vendor; @XmlAttribute(name = "wordsize") @XmlSchemaType(name = "anySimpleType") protected String wordsize; @XmlAttribute(name = "annotations") @XmlSchemaType(name = "anySimpleType") protected String annotations; @XmlAttribute(name = "linkend") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object linkend; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(name = "type", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkType; @XmlAttribute(name = "role", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkRole; @XmlAttribute(name = "arcrole", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String arcrole; @XmlAttribute(name = "title", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkTitle; @XmlAttribute(name = "show", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(name = "actuate", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Link } * {@link Phrase } * {@link Anchor } * {@link Alt } * {@link Indexterm } * {@link Remark } * {@link Xref } * {@link Inlinemediaobject } * {@link Olink } * {@link Biblioref } * {@link Annotation } * {@link Replaceable } * {@link String } * {@link Superscript } * {@link Subscript } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the commonVersion property. * * @return * possible object is * {@link String } * */ public String getCommonVersion() { return commonVersion; } /** * Sets the value of the commonVersion property. * * @param value * allowed object is * {@link String } * */ public void setCommonVersion(String value) { this.commonVersion = value; } /** * Gets the value of the xmlLang property. * * @return * possible object is * {@link String } * */ public String getXmlLang() { return xmlLang; } /** * Sets the value of the xmlLang property. * * @param value * allowed object is * {@link String } * */ public void setXmlLang(String value) { this.xmlLang = value; } /** * Gets the value of the base property. * * @return * possible object is * {@link String } * */ public String getBase() { return base; } /** * Sets the value of the base property. * * @param value * allowed object is * {@link String } * */ public void setBase(String value) { this.base = value; } /** * Gets the value of the remap property. * * @return * possible object is * {@link String } * */ public String getRemap() { return remap; } /** * Sets the value of the remap property. * * @param value * allowed object is * {@link String } * */ public void setRemap(String value) { this.remap = value; } /** * Gets the value of the xreflabel property. * * @return * possible object is * {@link String } * */ public String getXreflabel() { return xreflabel; } /** * Sets the value of the xreflabel property. * * @param value * allowed object is * {@link String } * */ public void setXreflabel(String value) { this.xreflabel = value; } /** * Gets the value of the revisionflag property. * * @return * possible object is * {@link String } * */ public String getRevisionflag() { return revisionflag; } /** * Sets the value of the revisionflag property. * * @param value * allowed object is * {@link String } * */ public void setRevisionflag(String value) { this.revisionflag = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the arch property. * * @return * possible object is * {@link String } * */ public String getArch() { return arch; } /** * Sets the value of the arch property. * * @param value * allowed object is * {@link String } * */ public void setArch(String value) { this.arch = value; } /** * Gets the value of the audience property. * * @return * possible object is * {@link String } * */ public String getAudience() { return audience; } /** * Sets the value of the audience property. * * @param value * allowed object is * {@link String } * */ public void setAudience(String value) { this.audience = value; } /** * Gets the value of the condition property. * * @return * possible object is * {@link String } * */ public String getCondition() { return condition; } /** * Sets the value of the condition property. * * @param value * allowed object is * {@link String } * */ public void setCondition(String value) { this.condition = value; } /** * Gets the value of the conformance property. * * @return * possible object is * {@link String } * */ public String getConformance() { return conformance; } /** * Sets the value of the conformance property. * * @param value * allowed object is * {@link String } * */ public void setConformance(String value) { this.conformance = value; } /** * Gets the value of the os property. * * @return * possible object is * {@link String } * */ public String getOs() { return os; } /** * Sets the value of the os property. * * @param value * allowed object is * {@link String } * */ public void setOs(String value) { this.os = value; } /** * Gets the value of the commonRevision property. * * @return * possible object is * {@link String } * */ public String getCommonRevision() { return commonRevision; } /** * Sets the value of the commonRevision property. * * @param value * allowed object is * {@link String } * */ public void setCommonRevision(String value) { this.commonRevision = value; } /** * Gets the value of the security property. * * @return * possible object is * {@link String } * */ public String getSecurity() { return security; } /** * Sets the value of the security property. * * @param value * allowed object is * {@link String } * */ public void setSecurity(String value) { this.security = value; } /** * Gets the value of the userlevel property. * * @return * possible object is * {@link String } * */ public String getUserlevel() { return userlevel; } /** * Sets the value of the userlevel property. * * @param value * allowed object is * {@link String } * */ public void setUserlevel(String value) { this.userlevel = value; } /** * Gets the value of the vendor property. * * @return * possible object is * {@link String } * */ public String getVendor() { return vendor; } /** * Sets the value of the vendor property. * * @param value * allowed object is * {@link String } * */ public void setVendor(String value) { this.vendor = value; } /** * Gets the value of the wordsize property. * * @return * possible object is * {@link String } * */ public String getWordsize() { return wordsize; } /** * Sets the value of the wordsize property. * * @param value * allowed object is * {@link String } * */ public void setWordsize(String value) { this.wordsize = value; } /** * Gets the value of the annotations property. * * @return * possible object is * {@link String } * */ public String getAnnotations() { return annotations; } /** * Sets the value of the annotations property. * * @param value * allowed object is * {@link String } * */ public void setAnnotations(String value) { this.annotations = value; } /** * Gets the value of the linkend property. * * @return * possible object is * {@link Object } * */ public Object getLinkend() { return linkend; } /** * Sets the value of the linkend property. * * @param value * allowed object is * {@link Object } * */ public void setLinkend(Object value) { this.linkend = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the xlinkType property. * * @return * possible object is * {@link String } * */ public String getXlinkType() { return xlinkType; } /** * Sets the value of the xlinkType property. * * @param value * allowed object is * {@link String } * */ public void setXlinkType(String value) { this.xlinkType = value; } /** * Gets the value of the xlinkRole property. * * @return * possible object is * {@link String } * */ public String getXlinkRole() { return xlinkRole; } /** * Sets the value of the xlinkRole property. * * @param value * allowed object is * {@link String } * */ public void setXlinkRole(String value) { this.xlinkRole = value; } /** * Gets the value of the arcrole property. * * @return * possible object is * {@link String } * */ public String getArcrole() { return arcrole; } /** * Sets the value of the arcrole property. * * @param value * allowed object is * {@link String } * */ public void setArcrole(String value) { this.arcrole = value; } /** * Gets the value of the xlinkTitle property. * * @return * possible object is * {@link String } * */ public String getXlinkTitle() { return xlinkTitle; } /** * Sets the value of the xlinkTitle property. * * @param value * allowed object is * {@link String } * */ public void setXlinkTitle(String value) { this.xlinkTitle = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } }