gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2002-2017 Drew Noakes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.imaging;
import com.drew.imaging.zip.ZipFileTypeDetector;
import com.drew.lang.ByteTrie;
import com.drew.lang.annotations.NotNull;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.util.zip.ZipInputStream;
import java.io.InputStream;
/**
* Examines the a file's first bytes and estimates the file's type.
*/
public class FileTypeDetector
{
private final static ByteTrie<FileType> _root;
private final static int[] _offsets;
static
{
_root = new ByteTrie<FileType>();
_root.setDefaultValue(FileType.Unknown);
// Potential supported offsets
_offsets = new int[]{0, 4};
// https://en.wikipedia.org/wiki/List_of_file_signatures
_root.addPath(FileType.Jpeg, new byte[]{(byte)0xff, (byte)0xd8});
_root.addPath(FileType.Tiff, "II".getBytes(), new byte[]{0x2a, 0x00});
_root.addPath(FileType.Tiff, "MM".getBytes(), new byte[]{0x00, 0x2a});
_root.addPath(FileType.Psd, "8BPS".getBytes());
_root.addPath(FileType.Png, new byte[]{(byte)0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44, 0x52});
_root.addPath(FileType.Bmp, "BM".getBytes()); // Standard Bitmap Windows and OS/2
_root.addPath(FileType.Bmp, "BA".getBytes()); // OS/2 Bitmap Array
_root.addPath(FileType.Bmp, "CI".getBytes()); // OS/2 Color Icon
_root.addPath(FileType.Bmp, "CP".getBytes()); // OS/2 Color Pointer
_root.addPath(FileType.Bmp, "IC".getBytes()); // OS/2 Icon
_root.addPath(FileType.Bmp, "PT".getBytes()); // OS/2 Pointer
_root.addPath(FileType.Gif, "GIF87a".getBytes());
_root.addPath(FileType.Gif, "GIF89a".getBytes());
_root.addPath(FileType.Ico, new byte[]{0x00, 0x00, 0x01, 0x00});
_root.addPath(FileType.Pcx, new byte[]{0x0A, 0x00, 0x01}); // multiple PCX versions, explicitly listed
_root.addPath(FileType.Pcx, new byte[]{0x0A, 0x02, 0x01});
_root.addPath(FileType.Pcx, new byte[]{0x0A, 0x03, 0x01});
_root.addPath(FileType.Pcx, new byte[]{0x0A, 0x05, 0x01});
_root.addPath(FileType.Wav, "WAVE".getBytes());
_root.addPath(FileType.Avi, "AVI ".getBytes());
_root.addPath(FileType.Webp, "WEBP".getBytes());
_root.addPath(FileType.Iff, "FORM".getBytes());
_root.addPath(FileType.Riff, "RIFF".getBytes());
_root.addPath(FileType.Pdf, "%PDF".getBytes());
_root.addPath(FileType.Aiff, "AIFF".getBytes()); // Should be FORM....AIFF
_root.addPath(FileType.Aiff, "AIFC".getBytes()); // Compressed form of AIFF
_root.addPath(FileType.Arw, "II".getBytes(), new byte[]{0x2a, 0x00, 0x08, 0x00});
_root.addPath(FileType.Crw, "II".getBytes(), new byte[]{0x1a, 0x00, 0x00, 0x00}, "HEAPCCDR".getBytes());
_root.addPath(FileType.Cr2, "II".getBytes(), new byte[]{0x2a, 0x00, 0x10, 0x00, 0x00, 0x00, 0x43, 0x52});
_root.addPath(FileType.Nef, "MM".getBytes(), new byte[]{0x00, 0x2a, 0x00, 0x00, 0x00, (byte)0x80, 0x00});
_root.addPath(FileType.Orf, "IIRO".getBytes(), new byte[]{(byte)0x08, 0x00});
_root.addPath(FileType.Orf, "MMOR".getBytes(), new byte[]{(byte)0x00, 0x00});
_root.addPath(FileType.Orf, "IIRS".getBytes(), new byte[]{(byte)0x08, 0x00});
_root.addPath(FileType.Raf, "FUJIFILMCCD-RAW".getBytes());
_root.addPath(FileType.Rw2, "II".getBytes(), new byte[]{0x55, 0x00});
_root.addPath(FileType.Sit, new byte[]{ 0x53, 0x74, 0x75, 0x66, 0x66, 0x49, 0x74, 0x20, 0x28, 0x63, 0x29, 0x31, 0x39, 0x39, 0x37, 0x2D}); // StuffIt (c)1997-
_root.addPath(FileType.Sit, new byte[]{ 0x53, 0x49, 0x54, 0x21, 0x00 }); // SIT!);
_root.addPath(FileType.Sitx, new byte[]{ 0x53, 0x74, 0x75, 0x66, 0x66, 0x49, 0x74, 0x21 });
_root.addPath(FileType.Aac, new byte[]{(byte)0xFF, (byte)0xF1});
_root.addPath(FileType.Aac, new byte[]{(byte)0xFF, (byte)0xF9});
_root.addPath(FileType.Ram, new byte[]{0x72, 0x74, 0x73, 0x70, 0x3A, 0x2F, 0x2F});
_root.addPath(FileType.Cfbf, new byte[]{(byte)0xD0, (byte)0xCF, 0x11, (byte)0xE0, (byte)0xA1, (byte)0xB1, 0x1A, (byte)0xE1, 0x00});
_root.addPath(FileType.Qxp, new byte[]{0x00, 0x00, 0x49, 0x49, 0x58, 0x50, 0x52, 0x33}); // "..IIXPR3" (little-endian - intel)
_root.addPath(FileType.Qxp, new byte[]{0x00, 0x00, 0x4D, 0x4D, 0x58, 0x50, 0x52, 0x33}); // "..MMXPR3" (big-endian - motorola)
_root.addPath(FileType.Rtf, new byte[]{0x7B, 0x5C, 0x72, 0x74, 0x66, 0x31});
_root.addPath(FileType.Swf, "CWS".getBytes());
_root.addPath(FileType.Swf, "FWS".getBytes());
_root.addPath(FileType.Swf, "ZWS".getBytes());
_root.addPath(FileType.Asf, new byte[]{0x30, 0x26, (byte)0xB2, 0x75, (byte)0x8E, 0x66, (byte)0xCF, 0x11, (byte)0xA6, (byte)0xD9, 0x00, (byte)0xAA, 0x00, 0x62, (byte)0xCE, 0x6C});
_root.addPath(FileType.Vob, new byte[]{0x00, 0x00, 0x01, (byte)0xBA});
_root.addPath(FileType.Mxf, new byte[]{0x06, 0x0e, 0x2b, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0d, 0x01, 0x02, 0x01, 0x01, 0x02}); // has offset?
_root.addPath(FileType.Flv, new byte[]{0x46, 0x4C, 0x56});
_root.addPath(FileType.Zip, "PK".getBytes());
_root.addPath(FileType.Indd, new byte[]{0x06, 0x06, (byte)0xED, (byte)0xF5, (byte)0xD8, 0x1D, 0x46, (byte)0xE5, (byte)0xBD, 0x31, (byte)0xEF, (byte)0xE7, (byte)0xFE, 0x74, (byte)0xB7, 0x1D});
// Potential root atoms... typically starts with FTYP... often at 4 byte offset
_root.addPath(FileType.Mov, new byte[]{0x6D, 0x6F, 0x6F, 0x76}); // moov
_root.addPath(FileType.Mov, new byte[]{0x77, 0x69, 0x64, 0x65}); // wide
_root.addPath(FileType.Mov, new byte[]{0x6D, 0x64, 0x61, 0x74}); // mdat
_root.addPath(FileType.Mov, new byte[]{0x66, 0x72, 0x65, 0x65}); // free
_root.addPath(FileType.Mov, "ftypqt ".getBytes());
_root.addPath(FileType.Mp4, "ftypavc1".getBytes());
_root.addPath(FileType.Mp4, "ftypiso2".getBytes());
_root.addPath(FileType.Mp4, "ftypisom".getBytes());
_root.addPath(FileType.Mp4, "ftypM4A ".getBytes());
_root.addPath(FileType.Mp4, "ftypM4B ".getBytes());
_root.addPath(FileType.Mp4, "ftypM4P ".getBytes());
_root.addPath(FileType.Mp4, "ftypM4V ".getBytes());
_root.addPath(FileType.Mp4, "ftypM4VH".getBytes());
_root.addPath(FileType.Mp4, "ftypM4VP".getBytes());
_root.addPath(FileType.Mp4, "ftypmmp4".getBytes());
_root.addPath(FileType.Mp4, "ftypmp41".getBytes());
_root.addPath(FileType.Mp4, "ftypmp42".getBytes());
_root.addPath(FileType.Mp4, "ftypmp71".getBytes());
_root.addPath(FileType.Mp4, "ftypMSNV".getBytes());
_root.addPath(FileType.Mp4, "ftypNDAS".getBytes());
_root.addPath(FileType.Mp4, "ftypNDSC".getBytes());
_root.addPath(FileType.Mp4, "ftypNDSH".getBytes());
_root.addPath(FileType.Mp4, "ftypNDSM".getBytes());
_root.addPath(FileType.Mp4, "ftypNDSP".getBytes());
_root.addPath(FileType.Mp4, "ftypNDSS".getBytes());
_root.addPath(FileType.Mp4, "ftypNDXC".getBytes());
_root.addPath(FileType.Mp4, "ftypNDXH".getBytes());
_root.addPath(FileType.Mp4, "ftypNDXM".getBytes());
_root.addPath(FileType.Mp4, "ftypNDXP".getBytes());
_root.addPath(FileType.Mp4, "ftypNDXS".getBytes());
_root.addPath(FileType.Heif, "ftypmif1".getBytes());
_root.addPath(FileType.Heif, "ftypmsf1".getBytes());
_root.addPath(FileType.Heif, "ftypheic".getBytes());
_root.addPath(FileType.Heif, "ftypheix".getBytes());
_root.addPath(FileType.Heif, "ftyphevc".getBytes());
_root.addPath(FileType.Heif, "ftyphevx".getBytes());
_root.addPath(FileType.Eps, "%!PS".getBytes());
_root.addPath(FileType.AdobeEps, new byte[]{(byte)0xC5, (byte)0xD0, (byte)0xD3, (byte)0xC6});
}
private FileTypeDetector() throws Exception
{
throw new Exception("Not intended for instantiation");
}
@NotNull
public static FileType detectFileType(@NotNull final BufferedInputStream inputStream, @NotNull final int offset) throws IOException
{
if (!inputStream.markSupported())
throw new IOException("Stream must support mark/reset");
int maxByteCount = _root.getMaxDepth();
inputStream.mark(maxByteCount);
byte[] bytes = new byte[maxByteCount];
inputStream.skip(offset);
int bytesRead = inputStream.read(bytes);
if (bytesRead == -1)
throw new IOException("Stream ended before file's magic number could be determined.");
inputStream.reset();
FileType fileType = _root.find(bytes);
//noinspection ConstantConditions
return fileType;
}
/**
* Examines the file's bytes and estimates the file's type.
* <p>
* Requires a {@link BufferedInputStream} in order to mark and reset the stream to the position
* at which it was provided to this method once completed.
* <p>
* Requires the stream to contain at least eight bytes.
*
* @throws IOException if an IO error occurred or the input stream ended unexpectedly.
*/
@NotNull
public static FileType detectFileType(@NotNull final BufferedInputStream inputStream) throws IOException
{
FileType fileType = FileType.Unknown;
for (int offset : _offsets) {
fileType = detectFileType(inputStream, offset);
if (fileType.getIsContainer()) {
fileType = handleContainer(inputStream, fileType);
}
if (!fileType.equals(FileType.Unknown)) {
break;
}
}
return fileType;
}
/**
* Calls detectFileType at correct offset for the container type being passed in.
* In the case of fileTypes without magic bytes to identify with (Zip), the fileType will be
* found within this method alone.
*
* @throws IOException if an IO error occurred or the input stream ended unexpectedly.
*/
@NotNull
public static FileType handleContainer(@NotNull final BufferedInputStream inputStream, @NotNull FileType fileType) throws IOException
{
switch (fileType) {
case Riff:
return detectFileType(inputStream, 8);
case Zip:
return ZipFileTypeDetector.detectFileType(inputStream);
case Iff:
return detectFileType(inputStream, 8);
case Cfbf:
case Tiff:
default:
return fileType;
}
}
}
| |
/**
* Copyright (c) 2000-2013 Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.docs.guestbook.model.impl;
import com.liferay.docs.guestbook.model.Guestbook;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.model.CacheModel;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Date;
/**
* The cache model class for representing Guestbook in entity cache.
*
* @author Rich Sezov
* @see Guestbook
* @generated
*/
public class GuestbookCacheModel implements CacheModel<Guestbook>,
Externalizable {
@Override
public String toString() {
StringBundler sb = new StringBundler(27);
sb.append("{uuid=");
sb.append(uuid);
sb.append(", guestbookId=");
sb.append(guestbookId);
sb.append(", groupId=");
sb.append(groupId);
sb.append(", companyId=");
sb.append(companyId);
sb.append(", userId=");
sb.append(userId);
sb.append(", userName=");
sb.append(userName);
sb.append(", createDate=");
sb.append(createDate);
sb.append(", modifiedDate=");
sb.append(modifiedDate);
sb.append(", name=");
sb.append(name);
sb.append(", status=");
sb.append(status);
sb.append(", statusByUserId=");
sb.append(statusByUserId);
sb.append(", statusByUserName=");
sb.append(statusByUserName);
sb.append(", statusDate=");
sb.append(statusDate);
sb.append("}");
return sb.toString();
}
@Override
public Guestbook toEntityModel() {
GuestbookImpl guestbookImpl = new GuestbookImpl();
if (uuid == null) {
guestbookImpl.setUuid(StringPool.BLANK);
}
else {
guestbookImpl.setUuid(uuid);
}
guestbookImpl.setGuestbookId(guestbookId);
guestbookImpl.setGroupId(groupId);
guestbookImpl.setCompanyId(companyId);
guestbookImpl.setUserId(userId);
if (userName == null) {
guestbookImpl.setUserName(StringPool.BLANK);
}
else {
guestbookImpl.setUserName(userName);
}
if (createDate == Long.MIN_VALUE) {
guestbookImpl.setCreateDate(null);
}
else {
guestbookImpl.setCreateDate(new Date(createDate));
}
if (modifiedDate == Long.MIN_VALUE) {
guestbookImpl.setModifiedDate(null);
}
else {
guestbookImpl.setModifiedDate(new Date(modifiedDate));
}
if (name == null) {
guestbookImpl.setName(StringPool.BLANK);
}
else {
guestbookImpl.setName(name);
}
guestbookImpl.setStatus(status);
guestbookImpl.setStatusByUserId(statusByUserId);
if (statusByUserName == null) {
guestbookImpl.setStatusByUserName(StringPool.BLANK);
}
else {
guestbookImpl.setStatusByUserName(statusByUserName);
}
if (statusDate == Long.MIN_VALUE) {
guestbookImpl.setStatusDate(null);
}
else {
guestbookImpl.setStatusDate(new Date(statusDate));
}
guestbookImpl.resetOriginalValues();
return guestbookImpl;
}
@Override
public void readExternal(ObjectInput objectInput) throws IOException {
uuid = objectInput.readUTF();
guestbookId = objectInput.readLong();
groupId = objectInput.readLong();
companyId = objectInput.readLong();
userId = objectInput.readLong();
userName = objectInput.readUTF();
createDate = objectInput.readLong();
modifiedDate = objectInput.readLong();
name = objectInput.readUTF();
status = objectInput.readInt();
statusByUserId = objectInput.readLong();
statusByUserName = objectInput.readUTF();
statusDate = objectInput.readLong();
}
@Override
public void writeExternal(ObjectOutput objectOutput)
throws IOException {
if (uuid == null) {
objectOutput.writeUTF(StringPool.BLANK);
}
else {
objectOutput.writeUTF(uuid);
}
objectOutput.writeLong(guestbookId);
objectOutput.writeLong(groupId);
objectOutput.writeLong(companyId);
objectOutput.writeLong(userId);
if (userName == null) {
objectOutput.writeUTF(StringPool.BLANK);
}
else {
objectOutput.writeUTF(userName);
}
objectOutput.writeLong(createDate);
objectOutput.writeLong(modifiedDate);
if (name == null) {
objectOutput.writeUTF(StringPool.BLANK);
}
else {
objectOutput.writeUTF(name);
}
objectOutput.writeInt(status);
objectOutput.writeLong(statusByUserId);
if (statusByUserName == null) {
objectOutput.writeUTF(StringPool.BLANK);
}
else {
objectOutput.writeUTF(statusByUserName);
}
objectOutput.writeLong(statusDate);
}
public String uuid;
public long guestbookId;
public long groupId;
public long companyId;
public long userId;
public String userName;
public long createDate;
public long modifiedDate;
public String name;
public int status;
public long statusByUserId;
public String statusByUserName;
public long statusDate;
}
| |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.rest.api.service;
import static io.gravitee.repository.management.model.Audit.AuditProperties.THEME;
import static io.gravitee.repository.management.model.ThemeReferenceType.ENVIRONMENT;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static java.util.Optional.empty;
import static java.util.Optional.of;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import io.gravitee.repository.exceptions.TechnicalException;
import io.gravitee.repository.management.api.ThemeRepository;
import io.gravitee.repository.management.model.Theme;
import io.gravitee.rest.api.model.InlinePictureEntity;
import io.gravitee.rest.api.model.PictureEntity;
import io.gravitee.rest.api.model.UrlPictureEntity;
import io.gravitee.rest.api.model.theme.*;
import io.gravitee.rest.api.service.common.GraviteeContext;
import io.gravitee.rest.api.service.exceptions.DuplicateThemeNameException;
import io.gravitee.rest.api.service.exceptions.ThemeNotFoundException;
import io.gravitee.rest.api.service.impl.ThemeServiceImpl;
import io.gravitee.rest.api.service.impl.ThemeServiceImpl.ThemeDefinitionMapper;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.test.util.ReflectionTestUtils;
/**
* @author Guillaume CUSNIEUX (guillaume.cusnieux at graviteesource.com)
* @author GraviteeSource Team
*/
@RunWith(MockitoJUnitRunner.class)
public class ThemeServiceTest {
private static final String THEME_ID = "default";
private static final String THEMES_PATH = "src/test/resources/themes";
@InjectMocks
private ThemeService themeService = new ThemeServiceImpl();
@Mock
private ThemeRepository themeRepository;
@Mock
private AuditService auditService;
private ThemeServiceImpl themeServiceImpl = new ThemeServiceImpl();
@Before
public void init() {
ReflectionTestUtils.setField(themeService, "themesPath", THEMES_PATH);
ReflectionTestUtils.setField(themeServiceImpl, "themesPath", THEMES_PATH);
}
@Test
public void shouldFindById() throws TechnicalException, JsonProcessingException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
ThemeDefinition themeDefinition = new ThemeDefinition();
themeDefinition.setData(Collections.EMPTY_LIST);
String definition = definitionMapper.writeValueAsString(themeDefinition);
final Theme theme = mock(Theme.class);
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.getDefinition()).thenReturn(definition);
when(theme.getReferenceId()).thenReturn("DEFAULT");
when(theme.getCreatedAt()).thenReturn(new Date(1));
when(theme.getUpdatedAt()).thenReturn(new Date(2));
when(theme.getFavicon()).thenReturn("favicon.png");
when(themeRepository.findById(THEME_ID)).thenReturn(of(theme));
final ThemeEntity themeEntity = themeService.findById(THEME_ID);
assertEquals(THEME_ID, themeEntity.getId());
assertEquals("NAME", themeEntity.getName());
assertEquals(definition, definitionMapper.writeValueAsString(themeEntity.getDefinition()));
assertEquals(new Date(1), themeEntity.getCreatedAt());
assertEquals(new Date(2), themeEntity.getUpdatedAt());
assertEquals(themeEntity.getFavicon(), theme.getFavicon());
}
@Test(expected = ThemeNotFoundException.class)
public void shouldThrowThemeNotFoundException() throws TechnicalException {
when(themeRepository.findById(THEME_ID)).thenReturn(empty());
themeService.findById(THEME_ID);
}
@Test(expected = ThemeNotFoundException.class)
public void shouldThrowThemeNotFoundExceptionWhenThemeIsNotInDefaultEnv() throws TechnicalException, JsonProcessingException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
ThemeDefinition themeDefinition = new ThemeDefinition();
themeDefinition.setData(Collections.EMPTY_LIST);
String definition = definitionMapper.writeValueAsString(themeDefinition);
final Theme theme = mock(Theme.class);
when(theme.getReferenceId()).thenReturn("NOT-DEFAULT");
when(themeRepository.findById(THEME_ID)).thenReturn(of(theme));
final ThemeEntity themeEntity = themeService.findById(THEME_ID);
assertEquals(THEME_ID, themeEntity.getId());
assertEquals("NAME", themeEntity.getName());
assertEquals(definition, definitionMapper.writeValueAsString(themeEntity.getDefinition()));
assertEquals(new Date(1), themeEntity.getCreatedAt());
assertEquals(new Date(2), themeEntity.getUpdatedAt());
}
@Test
public void shouldFindAll() throws TechnicalException, JsonProcessingException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
String definition = themeServiceImpl.getDefaultDefinition();
final Theme theme = mock(Theme.class);
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.getDefinition()).thenReturn(definition);
when(theme.getCreatedAt()).thenReturn(new Date(1));
when(theme.getUpdatedAt()).thenReturn(new Date(2));
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(singleton(theme));
final Set<ThemeEntity> themes = themeService.findAll();
final ThemeEntity themeEntity = themes.iterator().next();
assertEquals(THEME_ID, themeEntity.getId());
assertEquals("NAME", themeEntity.getName());
assertTrue(definitionMapper.isSame(definition, definitionMapper.writeValueAsString(themeEntity.getDefinition())));
assertEquals(new Date(1), themeEntity.getCreatedAt());
assertEquals(new Date(2), themeEntity.getUpdatedAt());
}
@Test
public void shouldFindEnabled() throws TechnicalException {
String definition = themeServiceImpl.getDefaultDefinition();
final Theme theme = mock(Theme.class);
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.isEnabled()).thenReturn(true);
when(theme.getDefinition()).thenReturn(definition);
when(theme.getCreatedAt()).thenReturn(new Date(1));
when(theme.getUpdatedAt()).thenReturn(new Date(2));
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(singleton(theme));
assertNotNull(themeService.findEnabled());
}
@Test
public void shouldGetDefaultIfNoThemeEnabled() throws TechnicalException {
final Theme theme = mock(Theme.class);
when(theme.isEnabled()).thenReturn(false);
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(singleton(theme));
assertNotNull(themeService.findEnabled());
}
@Test
public void shouldCreate() throws TechnicalException, IOException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
ThemeDefinition themeDefinition = new ThemeDefinition();
themeDefinition.setData(Collections.EMPTY_LIST);
String definition = definitionMapper.writeValueAsString(themeDefinition);
final NewThemeEntity newThemeEntity = new NewThemeEntity();
newThemeEntity.setName("NAME");
newThemeEntity.setDefinition(themeDefinition);
final Theme createdTheme = new Theme();
createdTheme.setId(THEME_ID);
createdTheme.setName("NAME");
createdTheme.setDefinition(definition);
createdTheme.setCreatedAt(new Date());
createdTheme.setUpdatedAt(new Date());
when(themeRepository.create(any())).thenReturn(createdTheme);
final ThemeEntity themeEntity = themeService.create(newThemeEntity);
assertNotNull(themeEntity.getId());
assertEquals("NAME", themeEntity.getName());
assertNotNull(themeEntity.getDefinition());
assertEquals(0, themeEntity.getDefinition().getData().size());
assertNotNull(themeEntity.getCreatedAt());
assertNotNull(themeEntity.getUpdatedAt());
final Theme theme = new Theme();
theme.setName("NAME");
theme.setDefinition(definition);
theme.setReferenceId("REF_ID");
theme.setReferenceType(ENVIRONMENT.name());
verify(themeRepository, times(1))
.create(
argThat(
argument -> {
return (
"NAME".equals(argument.getName()) &&
argument.getDefinition() != null &&
"DEFAULT".equals(argument.getReferenceId()) &&
ENVIRONMENT.name().equals(argument.getReferenceType()) &&
!argument.getId().isEmpty() &&
argument.getCreatedAt() != null &&
argument.getUpdatedAt() != null
);
}
)
);
verify(auditService, times(1))
.createEnvironmentAuditLog(
eq(ImmutableMap.of(THEME, THEME_ID)),
eq(Theme.AuditEvent.THEME_CREATED),
any(Date.class),
isNull(),
any()
);
}
@Test(expected = DuplicateThemeNameException.class)
public void shouldThrowDuplicateThemeNameExceptionOnCreate() throws TechnicalException {
final Theme theme = mock(Theme.class);
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.getDefinition()).thenReturn(themeServiceImpl.getDefaultDefinition());
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(singleton(theme));
final NewThemeEntity newThemeEntity = new NewThemeEntity();
newThemeEntity.setName("NAME");
themeService.create(newThemeEntity);
}
@Test
public void shouldUpdate() throws TechnicalException, JsonProcessingException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
ThemeDefinition themeDefinition = new ThemeDefinition();
themeDefinition.setData(Collections.EMPTY_LIST);
String definition = definitionMapper.writeValueAsString(themeDefinition);
final UpdateThemeEntity updateThemeEntity = new UpdateThemeEntity();
updateThemeEntity.setId(THEME_ID);
updateThemeEntity.setName("NAME");
updateThemeEntity.setDefinition(themeDefinition);
final Theme updatedTheme = new Theme();
updatedTheme.setId(THEME_ID);
updatedTheme.setName("NAME");
updatedTheme.setDefinition(definition);
updatedTheme.setCreatedAt(new Date());
updatedTheme.setUpdatedAt(new Date());
when(themeRepository.update(any())).thenReturn(updatedTheme);
when(themeRepository.findById(THEME_ID)).thenReturn(of(updatedTheme));
final ThemeEntity themeEntity = themeService.update(updateThemeEntity);
assertNotNull(themeEntity.getId());
assertEquals("NAME", themeEntity.getName());
assertEquals(definition, definitionMapper.writeValueAsString(themeEntity.getDefinition()));
assertNotNull(themeEntity.getCreatedAt());
assertNotNull(themeEntity.getUpdatedAt());
final Theme theme = new Theme();
theme.setName("NAME");
theme.setDefinition(definition);
theme.setReferenceId("REF_ID");
theme.setReferenceType(ENVIRONMENT.name());
verify(themeRepository, times(1))
.update(
argThat(
argument ->
"NAME".equals(argument.getName()) &&
argument.getDefinition() != null &&
"DEFAULT".equals(argument.getReferenceId()) &&
ENVIRONMENT.name().equals(argument.getReferenceType()) &&
THEME_ID.equals(argument.getId()) &&
argument.getUpdatedAt() != null
)
);
verify(auditService, times(1))
.createEnvironmentAuditLog(
eq(ImmutableMap.of(THEME, THEME_ID)),
eq(Theme.AuditEvent.THEME_UPDATED),
any(Date.class),
any(),
any()
);
}
@Test(expected = DuplicateThemeNameException.class)
public void shouldThrowDuplicateThemeNameExceptionOnUpdate() throws TechnicalException {
final Theme theme = mock(Theme.class);
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.getDefinition()).thenReturn(themeServiceImpl.getDefaultDefinition());
final Theme theme2 = mock(Theme.class);
when(theme2.getId()).thenReturn("foobar");
when(theme2.getName()).thenReturn("NAME");
when(theme2.getDefinition()).thenReturn(themeServiceImpl.getDefaultDefinition());
when(themeRepository.findById(THEME_ID)).thenReturn(of(theme));
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(new HashSet(asList(theme, theme2)));
final UpdateThemeEntity updateThemeEntity = new UpdateThemeEntity();
updateThemeEntity.setId(THEME_ID);
updateThemeEntity.setName("NAME");
themeService.update(updateThemeEntity);
}
@Test
public void shouldNotUpdate() throws TechnicalException {
final UpdateThemeEntity updateThemeEntity = new UpdateThemeEntity();
updateThemeEntity.setId(THEME_ID);
when(themeRepository.findById(THEME_ID)).thenReturn(empty());
final Theme theme = mock(Theme.class);
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.getDefinition()).thenReturn(themeServiceImpl.getDefaultDefinition());
when(themeRepository.create(any())).thenReturn(theme);
themeService.update(updateThemeEntity);
verify(themeRepository).create(any());
}
@Test
public void shouldResetToDefaultTheme() throws TechnicalException, JsonProcessingException {
ThemeDefinition themeDefinition = new ThemeDefinition();
themeDefinition.setData(Collections.EMPTY_LIST);
final Theme theme = new Theme();
theme.setId(THEME_ID);
theme.setName("NAME");
theme.setDefinition(themeServiceImpl.getDefinition(THEMES_PATH + "/custom-definition.json"));
theme.setReferenceId("DEFAULT");
theme.setReferenceType(ENVIRONMENT.name());
theme.setCreatedAt(new Date());
theme.setUpdatedAt(new Date());
themeService.resetToDefaultTheme(THEME_ID);
verify(themeRepository, times(1)).delete(THEME_ID);
verify(auditService, times(1))
.createEnvironmentAuditLog(
eq(ImmutableMap.of(THEME, THEME_ID)),
eq(Theme.AuditEvent.THEME_RESET),
any(Date.class),
any(),
any()
);
}
@Test
public void shouldDelete() throws TechnicalException {
final Theme theme = mock(Theme.class);
when(themeRepository.findById(THEME_ID)).thenReturn(of(theme));
themeService.delete(THEME_ID);
verify(themeRepository, times(1)).delete(THEME_ID);
verify(auditService, times(1))
.createEnvironmentAuditLog(
eq(ImmutableMap.of(THEME, THEME_ID)),
eq(Theme.AuditEvent.THEME_DELETED),
any(Date.class),
isNull(),
eq(theme)
);
}
@Test
public void shouldLoadDefaultThemeDefinition() throws IOException {
ObjectMapper mapper = new ObjectMapper();
String definition = themeServiceImpl.getDefaultDefinition();
ThemeDefinition themeDefinition = mapper.readValue(definition, ThemeDefinition.class);
assertNotNull(themeDefinition);
assertNotNull(themeDefinition.getData());
assertEquals(38, themeDefinition.getData().size());
}
@Test
public void shouldMergeThemeDefinition() throws IOException, TechnicalException {
ThemeDefinitionMapper mapper = new ThemeDefinitionMapper();
String def = themeServiceImpl.getDefinition(THEMES_PATH + "/base-definition.json");
ThemeDefinition baseDefinition = mapper.readValue(def, ThemeDefinition.class);
String customDef = themeServiceImpl.getDefinition(THEMES_PATH + "/custom-definition.json");
ThemeDefinition customDefinition = mapper.readValue(customDef, ThemeDefinition.class);
assertEquals(33, customDefinition.getData().size());
assertNull(mapper.getThemeComponentDefinition(baseDefinition, "gv-pagination"));
assertNotNull(mapper.getThemeComponentDefinition(customDefinition, "gv-pagination"));
assertEquals(mapper.getThemeComponentDefinition(baseDefinition, "gv-plans").getCss().size(), 5);
assertEquals(mapper.getThemeComponentDefinition(customDefinition, "gv-plans").getCss().size(), 4);
assertEquals(mapper.getThemeComponentDefinition(baseDefinition, "gv-popover").getCss().size(), 2);
assertEquals(mapper.getThemeComponentDefinition(customDefinition, "gv-popover").getCss().size(), 3);
ThemeCssDefinition gvThemeColor = mapper.getThemeCssDefinition(baseDefinition, "gv-theme", "--gv-theme-color");
assertNull(gvThemeColor.getDefaultValue());
assertEquals(gvThemeColor.getType(), ThemeCssType.COLOR);
assertEquals(gvThemeColor.getValue(), "#009B5B");
ThemeCssDefinition gvButtonFz = mapper.getThemeCssDefinition(baseDefinition, "gv-button", "--gv-button--fz");
assertNull(gvButtonFz.getDefaultValue());
assertEquals(gvButtonFz.getType(), ThemeCssType.LENGTH);
assertEquals(gvButtonFz.getValue(), "var(--gv-theme-font-size-m, 14px)");
assertEquals(gvButtonFz.getDescription(), "Font size");
ThemeDefinition mergedDefinition = mapper.merge(def, customDef);
assertEquals(34, mergedDefinition.getData().size());
assertNull(mapper.getThemeComponentDefinition(mergedDefinition, "gv-pagination"));
assertEquals(mapper.getThemeComponentDefinition(mergedDefinition, "gv-plans").getCss().size(), 5);
assertEquals(mapper.getThemeComponentDefinition(mergedDefinition, "gv-popover").getCss().size(), 2);
ThemeCssDefinition gvThemeColorMerged = mapper.getThemeCssDefinition(mergedDefinition, "gv-theme", "--gv-theme-color");
assertNull(gvThemeColorMerged.getDefaultValue());
assertEquals(gvThemeColorMerged.getType(), ThemeCssType.COLOR);
assertEquals(gvThemeColorMerged.getValue(), "#FAFAFA");
ThemeCssDefinition gvButtonFzMerged = mapper.getThemeCssDefinition(mergedDefinition, "gv-button", "--gv-button--fz");
assertNull(gvButtonFzMerged.getDefaultValue());
assertEquals(gvButtonFzMerged.getType(), ThemeCssType.LENGTH);
assertEquals(gvButtonFzMerged.getValue(), "200px");
assertEquals(gvButtonFzMerged.getDescription(), "Font size");
}
@Test
public void shouldMergeThemeDefinitionWithLegacy() throws IOException, TechnicalException {
ThemeDefinitionMapper mapper = new ThemeDefinitionMapper();
String def = themeServiceImpl.getDefaultDefinition();
ThemeDefinition themeDefinition = mapper.readValue(def, ThemeDefinition.class);
String customDef = themeServiceImpl.getDefinition(THEMES_PATH + "/legacy-definition.json");
ThemeDefinition legacyDefinition = mapper.readValue(customDef, ThemeDefinition.class);
assertEquals(38, themeDefinition.getData().size());
assertEquals(35, legacyDefinition.getData().size());
ThemeDefinition mergedDefinition = mapper.merge(def, customDef);
assertNotNull(mergedDefinition);
assertEquals(38, mergedDefinition.getData().size());
assertNotNull(mapper.getThemeCssDefinition(legacyDefinition, "gv-theme", "--gv-theme--c"));
assertNull(mapper.getThemeCssDefinition(themeDefinition, "gv-theme", "--gv-theme--c"));
assertNull(mapper.getThemeCssDefinition(mergedDefinition, "gv-theme", "--gv-theme--c"));
assertNull(mapper.getThemeCssDefinition(legacyDefinition, "gv-theme", "--gv-theme-color"));
assertNotNull(mapper.getThemeCssDefinition(themeDefinition, "gv-theme", "--gv-theme-color"));
assertNotNull(mapper.getThemeCssDefinition(mergedDefinition, "gv-theme", "--gv-theme-color"));
}
@Test
public void shouldCompareDefinition() throws IOException, TechnicalException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
String definition = themeServiceImpl.getDefaultDefinition();
ThemeDefinition themeDefinition = definitionMapper.readDefinition(definition);
String formattedDefinition = definitionMapper.writerWithDefaultPrettyPrinter().writeValueAsString(themeDefinition);
assertNotEquals(definition, formattedDefinition);
assertTrue(definitionMapper.isSame(definition, formattedDefinition));
assertFalse(definitionMapper.isSame(definition, themeServiceImpl.getDefinition(THEMES_PATH + "/custom-definition.json")));
}
@Test
public void shouldCreateDefaultTheme() throws TechnicalException, IOException {
ThemeDefinitionMapper definitionMapper = new ThemeDefinitionMapper();
String definition = themeServiceImpl.getDefaultDefinition();
final UpdateThemeEntity themeToCreate = new UpdateThemeEntity();
themeToCreate.setId(THEME_ID);
themeToCreate.setName("Default");
themeToCreate.setDefinition(definitionMapper.readDefinition(definition));
final Theme createdTheme = new Theme();
createdTheme.setId(THEME_ID);
createdTheme.setName("Default");
createdTheme.setDefinition(definition);
createdTheme.setCreatedAt(new Date());
createdTheme.setUpdatedAt(new Date());
when(themeRepository.create(any())).thenReturn(createdTheme);
assertEquals(definitionMapper.readTree(definition), definitionMapper.readTree(definition));
assertEquals(definition, definition);
themeService.update(themeToCreate);
verify(themeRepository, times(1))
.create(
argThat(
argument -> {
try {
return (
"Default".equals(argument.getName()) &&
definitionMapper.readTree(argument.getDefinition()).equals(definitionMapper.readTree(definition)) &&
"DEFAULT".equals(argument.getReferenceId()) &&
ENVIRONMENT.name().equals(argument.getReferenceType()) &&
!argument.getId().isEmpty() &&
argument.getCreatedAt() != null &&
argument.getUpdatedAt() != null
);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
)
);
verify(auditService, times(1))
.createEnvironmentAuditLog(
eq(ImmutableMap.of(THEME, THEME_ID)),
eq(Theme.AuditEvent.THEME_CREATED),
any(Date.class),
isNull(),
any()
);
}
@Test
public void shouldUpdateDefaultTheme() throws TechnicalException, IOException {
ObjectMapper mapper = new ObjectMapper();
ThemeDefinitionMapper themeDefinitionMapper = new ThemeDefinitionMapper();
String definition = themeServiceImpl.getDefaultDefinition();
final Theme theme = mock(Theme.class);
when(theme.getDefinition()).thenReturn(definition);
final Theme theme2 = mock(Theme.class);
when(theme2.getId()).thenReturn(THEME_ID);
when(theme2.getName()).thenReturn("NAME");
String customDefinition = themeServiceImpl.getDefinition(THEMES_PATH + "/custom-definition.json");
when(theme2.getDefinition()).thenReturn(customDefinition);
when(theme2.getReferenceType()).thenReturn(ENVIRONMENT.name());
when(theme2.getReferenceId()).thenReturn("DEFAULT");
when(theme2.getCreatedAt()).thenReturn(new Date(1));
when(theme2.getUpdatedAt()).thenReturn(new Date(2));
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(new HashSet(asList(theme, theme2)));
String mergeDefinition = themeDefinitionMapper.writeValueAsString(themeDefinitionMapper.merge(definition, customDefinition));
themeService.updateDefaultTheme();
verify(themeRepository, times(1))
.update(
argThat(
argument -> {
try {
return (
"NAME".equals(argument.getName()) &&
mapper.readTree(argument.getDefinition()).equals(mapper.readTree(mergeDefinition)) &&
"DEFAULT".equals(argument.getReferenceId()) &&
ENVIRONMENT.name().equals(argument.getReferenceType()) &&
!argument.getId().isEmpty() &&
argument.getCreatedAt() != null &&
argument.getUpdatedAt() != null
);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
)
);
verify(auditService, times(1))
.createEnvironmentAuditLog(
eq(ImmutableMap.of(THEME, THEME_ID)),
eq(Theme.AuditEvent.THEME_UPDATED),
any(Date.class),
any(),
any()
);
}
@Test
public void shouldGetBackgroundImageUrl() throws TechnicalException {
final Theme theme = mock(Theme.class);
when(theme.getBackgroundImage()).thenReturn("http://localhost/image");
when(theme.getId()).thenReturn(THEME_ID);
when(theme.getName()).thenReturn("NAME");
when(theme.isEnabled()).thenReturn(true);
when(theme.getDefinition()).thenReturn(themeServiceImpl.getDefaultDefinition());
when(themeRepository.findByReferenceIdAndReferenceType(GraviteeContext.getCurrentEnvironment(), ENVIRONMENT.name()))
.thenReturn(singleton(theme));
PictureEntity backgroundImage = themeService.getBackgroundImage(THEME_ID);
assertNotNull(backgroundImage);
assertTrue(backgroundImage instanceof UrlPictureEntity);
}
@Test
public void shouldGetBackgroundImage() throws TechnicalException {
final Theme theme = mock(Theme.class);
Mockito.lenient().when(theme.getReferenceType()).thenReturn(ENVIRONMENT.name());
PictureEntity backgroundImage = themeService.getBackgroundImage(THEME_ID);
assertNull(backgroundImage);
}
@Test
public void shouldGetLogo() throws TechnicalException {
final Theme theme = mock(Theme.class, withSettings().lenient());
Mockito.lenient().when(theme.getReferenceType()).thenReturn(ENVIRONMENT.name());
when(theme.getReferenceId()).thenReturn("DEFAULT");
when(theme.getLogo()).thenReturn(themeServiceImpl.getDefaultLogo());
PictureEntity logo = themeService.getLogo(THEME_ID);
assertNotNull(logo);
assertTrue(logo instanceof InlinePictureEntity);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ESAllocationTestCase;
import org.elasticsearch.cluster.EmptyClusterInfoService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.gateway.TestGatewayAllocator;
import org.hamcrest.Matchers;
import java.util.stream.Collectors;
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
public class BalanceConfigurationTests extends ESAllocationTestCase {
private final Logger logger = LogManager.getLogger(BalanceConfigurationTests.class);
// TODO maybe we can randomize these numbers somehow
final int numberOfNodes = 25;
final int numberOfIndices = 12;
final int numberOfShards = 2;
final int numberOfReplicas = 2;
public void testIndexBalance() {
/* Tests balance over indices only */
final float indexBalance = 1.0f;
final float replicaBalance = 0.0f;
final float balanceThreshold = 1.0f;
Settings.Builder settings = Settings.builder();
settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(),
ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString());
settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance);
settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance);
settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceThreshold);
AllocationService strategy = createAllocationService(settings.build(), new TestGatewayAllocator());
ClusterState clusterState = initCluster(strategy);
assertIndexBalance(clusterState.getRoutingTable(), clusterState.getRoutingNodes(), numberOfNodes, numberOfIndices,
numberOfReplicas, numberOfShards, balanceThreshold);
clusterState = addNode(clusterState, strategy);
assertIndexBalance(clusterState.getRoutingTable(), clusterState.getRoutingNodes(), numberOfNodes + 1,
numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold);
clusterState = removeNodes(clusterState, strategy);
assertIndexBalance(clusterState.getRoutingTable(), clusterState.getRoutingNodes(),
(numberOfNodes + 1) - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold);
}
public void testReplicaBalance() {
/* Tests balance over replicas only */
final float indexBalance = 0.0f;
final float replicaBalance = 1.0f;
final float balanceThreshold = 1.0f;
Settings.Builder settings = Settings.builder();
settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(),
ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString());
settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance);
settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance);
settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceThreshold);
AllocationService strategy = createAllocationService(settings.build(), new TestGatewayAllocator());
ClusterState clusterState = initCluster(strategy);
assertReplicaBalance(clusterState.getRoutingNodes(), numberOfNodes, numberOfIndices,
numberOfReplicas, numberOfShards, balanceThreshold);
clusterState = addNode(clusterState, strategy);
assertReplicaBalance(clusterState.getRoutingNodes(), numberOfNodes + 1,
numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold);
clusterState = removeNodes(clusterState, strategy);
assertReplicaBalance(clusterState.getRoutingNodes(),
numberOfNodes + 1 - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold);
}
private ClusterState initCluster(AllocationService strategy) {
MetaData.Builder metaDataBuilder = MetaData.builder();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
for (int i = 0; i < numberOfIndices; i++) {
IndexMetaData.Builder index = IndexMetaData.builder("test" + i).settings(settings(Version.CURRENT))
.numberOfShards(numberOfShards).numberOfReplicas(numberOfReplicas);
metaDataBuilder = metaDataBuilder.put(index);
}
MetaData metaData = metaDataBuilder.build();
for (ObjectCursor<IndexMetaData> cursor : metaData.indices().values()) {
routingTableBuilder.addAsNew(cursor.value);
}
RoutingTable initialRoutingTable = routingTableBuilder.build();
logger.info("start " + numberOfNodes + " nodes");
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder();
for (int i = 0; i < numberOfNodes; i++) {
nodes.add(newNode("node" + i));
}
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING
.getDefault(Settings.EMPTY)).nodes(nodes).metaData(metaData).routingTable(initialRoutingTable).build();
clusterState = strategy.reroute(clusterState, "reroute");
logger.info("restart all the primary shards, replicas will start initializing");
clusterState = startInitializingShardsAndReroute(strategy, clusterState);
logger.info("start the replica shards");
clusterState = startInitializingShardsAndReroute(strategy, clusterState);
logger.info("complete rebalancing");
return applyStartedShardsUntilNoChange(clusterState, strategy);
}
private ClusterState addNode(ClusterState clusterState, AllocationService strategy) {
logger.info("now, start 1 more node, check that rebalancing will happen because we set it to always");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.add(newNode("node" + numberOfNodes)))
.build();
RoutingTable routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
// move initializing to started
return applyStartedShardsUntilNoChange(clusterState, strategy);
}
private ClusterState removeNodes(ClusterState clusterState, AllocationService strategy) {
logger.info("Removing half the nodes (" + (numberOfNodes + 1) / 2 + ")");
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes());
boolean removed = false;
for (int i = (numberOfNodes + 1) / 2; i <= numberOfNodes; i++) {
nodes.remove("node" + i);
removed = true;
}
clusterState = ClusterState.builder(clusterState).nodes(nodes.build()).build();
if (removed) {
clusterState = strategy.disassociateDeadNodes(clusterState, randomBoolean(), "removed nodes");
}
logger.info("start all the primary shards, replicas will start initializing");
clusterState = startInitializingShardsAndReroute(strategy, clusterState);
logger.info("start the replica shards");
clusterState = startInitializingShardsAndReroute(strategy, clusterState);
logger.info("rebalancing");
clusterState = strategy.reroute(clusterState, "reroute");
logger.info("complete rebalancing");
return applyStartedShardsUntilNoChange(clusterState, strategy);
}
private void assertReplicaBalance(RoutingNodes nodes, int numberOfNodes, int numberOfIndices, int numberOfReplicas,
int numberOfShards, float threshold) {
final int unassigned = nodes.unassigned().size();
if (unassigned > 0) {
// Ensure that if there any unassigned shards, all of their replicas are unassigned as well
// (i.e. unassigned count is always [replicas] + 1 for each shard unassigned shardId)
nodes.shardsWithState(UNASSIGNED).stream().collect(
Collectors.toMap(
ShardRouting::shardId,
s -> 1,
(a, b) -> a + b
)).values().forEach(
count -> assertEquals(numberOfReplicas + 1, count.longValue())
);
}
assertEquals(numberOfNodes, nodes.size());
final int numShards = numberOfIndices * numberOfShards * (numberOfReplicas + 1) - unassigned;
final float avgNumShards = (float) (numShards) / (float) (numberOfNodes);
final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - threshold)));
final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + threshold)));
for (RoutingNode node : nodes) {
assertThat(node.shardsWithState(STARTED).size(), Matchers.greaterThanOrEqualTo(minAvgNumberOfShards));
assertThat(node.shardsWithState(STARTED).size(), Matchers.lessThanOrEqualTo(maxAvgNumberOfShards));
}
}
private void assertIndexBalance(RoutingTable routingTable, RoutingNodes nodes, int numberOfNodes, int numberOfIndices,
int numberOfReplicas, int numberOfShards, float threshold) {
final int numShards = numberOfShards * (numberOfReplicas + 1);
final float avgNumShards = (float) (numShards) / (float) (numberOfNodes);
final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - threshold)));
final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + threshold)));
for (ObjectCursor<String> index : routingTable.indicesRouting().keys()) {
for (RoutingNode node : nodes) {
assertThat(node.shardsWithState(index.value, STARTED).size(), Matchers.greaterThanOrEqualTo(minAvgNumberOfShards));
assertThat(node.shardsWithState(index.value, STARTED).size(), Matchers.lessThanOrEqualTo(maxAvgNumberOfShards));
}
}
}
public void testPersistedSettings() {
Settings.Builder settings = Settings.builder();
settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2);
settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3);
settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0);
ClusterSettings service = new ClusterSettings(Settings.builder().build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
BalancedShardsAllocator allocator = new BalancedShardsAllocator(settings.build(), service);
assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f));
assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f));
assertThat(allocator.getThreshold(), Matchers.equalTo(2.0f));
settings = Settings.builder();
settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2);
settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3);
settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0);
settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(),
ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString());
service.applySettings(settings.build());
assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f));
assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f));
assertThat(allocator.getThreshold(), Matchers.equalTo(2.0f));
settings = Settings.builder();
settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5);
settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.1);
settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 3.0);
service.applySettings(settings.build());
assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.5f));
assertThat(allocator.getShardBalance(), Matchers.equalTo(0.1f));
assertThat(allocator.getThreshold(), Matchers.equalTo(3.0f));
}
public void testNoRebalanceOnPrimaryOverload() {
Settings.Builder settings = Settings.builder();
AllocationService strategy = new AllocationService(randomAllocationDeciders(settings.build(),
new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random()),
new TestGatewayAllocator(), new ShardsAllocator() {
/*
* // this allocator tries to rebuild this scenario where a rebalance is
* // triggered solely by the primary overload on node [1] where a shard
* // is rebalanced to node 0
routing_nodes:
-----node_id[0][V]
--------[test][0], node[0], [R], s[STARTED]
--------[test][4], node[0], [R], s[STARTED]
-----node_id[1][V]
--------[test][0], node[1], [P], s[STARTED]
--------[test][1], node[1], [P], s[STARTED]
--------[test][3], node[1], [R], s[STARTED]
-----node_id[2][V]
--------[test][1], node[2], [R], s[STARTED]
--------[test][2], node[2], [R], s[STARTED]
--------[test][4], node[2], [P], s[STARTED]
-----node_id[3][V]
--------[test][2], node[3], [P], s[STARTED]
--------[test][3], node[3], [P], s[STARTED]
---- unassigned
*/
public void allocate(RoutingAllocation allocation) {
RoutingNodes.UnassignedShards unassigned = allocation.routingNodes().unassigned();
ShardRouting[] drain = unassigned.drain();
ArrayUtil.timSort(drain, (a, b) -> { return a.primary() ? -1 : 1; }); // we have to allocate primaries first
for (ShardRouting sr : drain) {
switch (sr.id()) {
case 0:
if (sr.primary()) {
allocation.routingNodes().initializeShard(sr, "node1", null, -1, allocation.changes());
} else {
allocation.routingNodes().initializeShard(sr, "node0", null, -1, allocation.changes());
}
break;
case 1:
if (sr.primary()) {
allocation.routingNodes().initializeShard(sr, "node1", null, -1, allocation.changes());
} else {
allocation.routingNodes().initializeShard(sr, "node2", null, -1, allocation.changes());
}
break;
case 2:
if (sr.primary()) {
allocation.routingNodes().initializeShard(sr, "node3", null, -1, allocation.changes());
} else {
allocation.routingNodes().initializeShard(sr, "node2", null, -1, allocation.changes());
}
break;
case 3:
if (sr.primary()) {
allocation.routingNodes().initializeShard(sr, "node3", null, -1, allocation.changes());
} else {
allocation.routingNodes().initializeShard(sr, "node1", null, -1, allocation.changes());
}
break;
case 4:
if (sr.primary()) {
allocation.routingNodes().initializeShard(sr, "node2", null, -1, allocation.changes());
} else {
allocation.routingNodes().initializeShard(sr, "node0", null, -1, allocation.changes());
}
break;
}
}
}
@Override
public ShardAllocationDecision decideShardAllocation(ShardRouting shard, RoutingAllocation allocation) {
throw new UnsupportedOperationException("explain not supported");
}
}, EmptyClusterInfoService.INSTANCE);
MetaData.Builder metaDataBuilder = MetaData.builder();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
IndexMetaData.Builder indexMeta = IndexMetaData.builder("test")
.settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1);
metaDataBuilder = metaDataBuilder.put(indexMeta);
MetaData metaData = metaDataBuilder.build();
for (ObjectCursor<IndexMetaData> cursor : metaData.indices().values()) {
routingTableBuilder.addAsNew(cursor.value);
}
RoutingTable routingTable = routingTableBuilder.build();
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder();
for (int i = 0; i < 4; i++) {
DiscoveryNode node = newNode("node" + i);
nodes.add(node);
}
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING
.getDefault(Settings.EMPTY)).nodes(nodes).metaData(metaData).routingTable(routingTable).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
RoutingNodes routingNodes = clusterState.getRoutingNodes();
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRouting : routingNode) {
assertThat(shardRouting.state(), Matchers.equalTo(ShardRoutingState.INITIALIZING));
}
}
strategy = createAllocationService(settings.build(), new TestGatewayAllocator());
logger.info("use the new allocator and check if it moves shards");
routingTable = startInitializingShardsAndReroute(strategy, clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.getRoutingNodes();
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRouting : routingNode) {
assertThat(shardRouting.state(), Matchers.equalTo(ShardRoutingState.STARTED));
}
}
logger.info("start the replica shards");
routingTable = startInitializingShardsAndReroute(strategy, clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.getRoutingNodes();
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRouting : routingNode) {
assertThat(shardRouting.state(), Matchers.equalTo(ShardRoutingState.STARTED));
}
}
logger.info("rebalancing");
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.getRoutingNodes();
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRouting : routingNode) {
assertThat(shardRouting.state(), Matchers.equalTo(ShardRoutingState.STARTED));
}
}
}
}
| |
package org.apache.lucene.search.grouping.term;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.grouping.AbstractGroupFacetCollector;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.SentinelIntSet;
import org.apache.lucene.util.UnicodeUtil;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* An implementation of {@link AbstractGroupFacetCollector} that computes grouped facets based on the indexed terms
* from DocValues.
*
* @lucene.experimental
*/
public abstract class TermGroupFacetCollector extends AbstractGroupFacetCollector {
final List<GroupedFacetHit> groupedFacetHits;
final SentinelIntSet segmentGroupedFacetHits;
SortedDocValues groupFieldTermsIndex;
/**
* Factory method for creating the right implementation based on the fact whether the facet field contains
* multiple tokens per documents.
*
* @param groupField The group field
* @param facetField The facet field
* @param facetFieldMultivalued Whether the facet field has multiple tokens per document
* @param facetPrefix The facet prefix a facet entry should start with to be included.
* @param initialSize The initial allocation size of the internal int set and group facet list which should roughly
* match the total number of expected unique groups. Be aware that the heap usage is
* 4 bytes * initialSize.
* @return <code>TermGroupFacetCollector</code> implementation
*/
public static TermGroupFacetCollector createTermGroupFacetCollector(String groupField,
String facetField,
boolean facetFieldMultivalued,
BytesRef facetPrefix,
int initialSize) {
if (facetFieldMultivalued) {
return new MV(groupField, facetField, facetPrefix, initialSize);
} else {
return new SV(groupField, facetField, facetPrefix, initialSize);
}
}
TermGroupFacetCollector(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
super(groupField, facetField, facetPrefix);
groupedFacetHits = new ArrayList<>(initialSize);
segmentGroupedFacetHits = new SentinelIntSet(initialSize, Integer.MIN_VALUE);
}
@Override
public boolean needsScores() {
return true; // TODO, maybe we don't?
}
// Implementation for single valued facet fields.
static class SV extends TermGroupFacetCollector {
private SortedDocValues facetFieldTermsIndex;
SV(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
super(groupField, facetField, facetPrefix, initialSize);
}
@Override
public void collect(int doc) throws IOException {
int facetOrd = facetFieldTermsIndex.getOrd(doc);
if (facetOrd < startFacetOrd || facetOrd >= endFacetOrd) {
return;
}
int groupOrd = groupFieldTermsIndex.getOrd(doc);
int segmentGroupedFacetsIndex = groupOrd * (facetFieldTermsIndex.getValueCount()+1) + facetOrd;
if (segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
return;
}
segmentTotalCount++;
segmentFacetCounts[facetOrd+1]++;
segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
BytesRef groupKey;
if (groupOrd == -1) {
groupKey = null;
} else {
groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
}
BytesRef facetKey;
if (facetOrd == -1) {
facetKey = null;
} else {
facetKey = BytesRef.deepCopyOf(facetFieldTermsIndex.lookupOrd(facetOrd));
}
groupedFacetHits.add(new GroupedFacetHit(groupKey, facetKey));
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
if (segmentFacetCounts != null) {
segmentResults.add(createSegmentResult());
}
groupFieldTermsIndex = DocValues.getSorted(context.reader(), groupField);
facetFieldTermsIndex = DocValues.getSorted(context.reader(), facetField);
// 1+ to allow for the -1 "not set":
segmentFacetCounts = new int[facetFieldTermsIndex.getValueCount()+1];
segmentTotalCount = 0;
segmentGroupedFacetHits.clear();
for (GroupedFacetHit groupedFacetHit : groupedFacetHits) {
int facetOrd = groupedFacetHit.facetValue == null ? -1 : facetFieldTermsIndex.lookupTerm(groupedFacetHit.facetValue);
if (groupedFacetHit.facetValue != null && facetOrd < 0) {
continue;
}
int groupOrd = groupedFacetHit.groupValue == null ? -1 : groupFieldTermsIndex.lookupTerm(groupedFacetHit.groupValue);
if (groupedFacetHit.groupValue != null && groupOrd < 0) {
continue;
}
int segmentGroupedFacetsIndex = groupOrd * (facetFieldTermsIndex.getValueCount()+1) + facetOrd;
segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
}
if (facetPrefix != null) {
startFacetOrd = facetFieldTermsIndex.lookupTerm(facetPrefix);
if (startFacetOrd < 0) {
// Points to the ord one higher than facetPrefix
startFacetOrd = -startFacetOrd - 1;
}
BytesRefBuilder facetEndPrefix = new BytesRefBuilder();
facetEndPrefix.append(facetPrefix);
facetEndPrefix.append(UnicodeUtil.BIG_TERM);
endFacetOrd = facetFieldTermsIndex.lookupTerm(facetEndPrefix.get());
assert endFacetOrd < 0;
endFacetOrd = -endFacetOrd - 1; // Points to the ord one higher than facetEndPrefix
} else {
startFacetOrd = -1;
endFacetOrd = facetFieldTermsIndex.getValueCount();
}
}
@Override
protected SegmentResult createSegmentResult() throws IOException {
return new SegmentResult(segmentFacetCounts, segmentTotalCount, facetFieldTermsIndex.termsEnum(), startFacetOrd, endFacetOrd);
}
private static class SegmentResult extends AbstractGroupFacetCollector.SegmentResult {
final TermsEnum tenum;
SegmentResult(int[] counts, int total, TermsEnum tenum, int startFacetOrd, int endFacetOrd) throws IOException {
super(counts, total - counts[0], counts[0], endFacetOrd+1);
this.tenum = tenum;
this.mergePos = startFacetOrd == -1 ? 1 : startFacetOrd+1;
if (mergePos < maxTermPos) {
assert tenum != null;
tenum.seekExact(startFacetOrd == -1 ? 0 : startFacetOrd);
mergeTerm = tenum.term();
}
}
@Override
protected void nextTerm() throws IOException {
mergeTerm = tenum.next();
}
}
}
// Implementation for multi valued facet fields.
static class MV extends TermGroupFacetCollector {
private SortedSetDocValues facetFieldDocTermOrds;
private TermsEnum facetOrdTermsEnum;
private int facetFieldNumTerms;
MV(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
super(groupField, facetField, facetPrefix, initialSize);
}
@Override
public void collect(int doc) throws IOException {
int groupOrd = groupFieldTermsIndex.getOrd(doc);
if (facetFieldNumTerms == 0) {
int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1);
if (facetPrefix != null || segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
return;
}
segmentTotalCount++;
segmentFacetCounts[facetFieldNumTerms]++;
segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
BytesRef groupKey;
if (groupOrd == -1) {
groupKey = null;
} else {
groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
}
groupedFacetHits.add(new GroupedFacetHit(groupKey, null));
return;
}
facetFieldDocTermOrds.setDocument(doc);
long ord;
boolean empty = true;
while ((ord = facetFieldDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
process(groupOrd, (int) ord);
empty = false;
}
if (empty) {
process(groupOrd, facetFieldNumTerms); // this facet ord is reserved for docs not containing facet field.
}
}
private void process(int groupOrd, int facetOrd) {
if (facetOrd < startFacetOrd || facetOrd >= endFacetOrd) {
return;
}
int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1) + facetOrd;
if (segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
return;
}
segmentTotalCount++;
segmentFacetCounts[facetOrd]++;
segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
BytesRef groupKey;
if (groupOrd == -1) {
groupKey = null;
} else {
groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
}
final BytesRef facetValue;
if (facetOrd == facetFieldNumTerms) {
facetValue = null;
} else {
facetValue = BytesRef.deepCopyOf(facetFieldDocTermOrds.lookupOrd(facetOrd));
}
groupedFacetHits.add(new GroupedFacetHit(groupKey, facetValue));
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
if (segmentFacetCounts != null) {
segmentResults.add(createSegmentResult());
}
groupFieldTermsIndex = DocValues.getSorted(context.reader(), groupField);
facetFieldDocTermOrds = DocValues.getSortedSet(context.reader(), facetField);
facetFieldNumTerms = (int) facetFieldDocTermOrds.getValueCount();
if (facetFieldNumTerms == 0) {
facetOrdTermsEnum = null;
} else {
facetOrdTermsEnum = facetFieldDocTermOrds.termsEnum();
}
// [facetFieldNumTerms() + 1] for all possible facet values and docs not containing facet field
segmentFacetCounts = new int[facetFieldNumTerms + 1];
segmentTotalCount = 0;
segmentGroupedFacetHits.clear();
for (GroupedFacetHit groupedFacetHit : groupedFacetHits) {
int groupOrd = groupedFacetHit.groupValue == null ? -1 : groupFieldTermsIndex.lookupTerm(groupedFacetHit.groupValue);
if (groupedFacetHit.groupValue != null && groupOrd < 0) {
continue;
}
int facetOrd;
if (groupedFacetHit.facetValue != null) {
if (facetOrdTermsEnum == null || !facetOrdTermsEnum.seekExact(groupedFacetHit.facetValue)) {
continue;
}
facetOrd = (int) facetOrdTermsEnum.ord();
} else {
facetOrd = facetFieldNumTerms;
}
// (facetFieldDocTermOrds.numTerms() + 1) for all possible facet values and docs not containing facet field
int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1) + facetOrd;
segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
}
if (facetPrefix != null) {
TermsEnum.SeekStatus seekStatus;
if (facetOrdTermsEnum != null) {
seekStatus = facetOrdTermsEnum.seekCeil(facetPrefix);
} else {
seekStatus = TermsEnum.SeekStatus.END;
}
if (seekStatus != TermsEnum.SeekStatus.END) {
startFacetOrd = (int) facetOrdTermsEnum.ord();
} else {
startFacetOrd = 0;
endFacetOrd = 0;
return;
}
BytesRefBuilder facetEndPrefix = new BytesRefBuilder();
facetEndPrefix.append(facetPrefix);
facetEndPrefix.append(UnicodeUtil.BIG_TERM);
seekStatus = facetOrdTermsEnum.seekCeil(facetEndPrefix.get());
if (seekStatus != TermsEnum.SeekStatus.END) {
endFacetOrd = (int) facetOrdTermsEnum.ord();
} else {
endFacetOrd = facetFieldNumTerms; // Don't include null...
}
} else {
startFacetOrd = 0;
endFacetOrd = facetFieldNumTerms + 1;
}
}
@Override
protected SegmentResult createSegmentResult() throws IOException {
return new SegmentResult(segmentFacetCounts, segmentTotalCount, facetFieldNumTerms, facetOrdTermsEnum, startFacetOrd, endFacetOrd);
}
private static class SegmentResult extends AbstractGroupFacetCollector.SegmentResult {
final TermsEnum tenum;
SegmentResult(int[] counts, int total, int missingCountIndex, TermsEnum tenum, int startFacetOrd, int endFacetOrd) throws IOException {
super(counts, total - counts[missingCountIndex], counts[missingCountIndex],
endFacetOrd == missingCountIndex + 1 ? missingCountIndex : endFacetOrd);
this.tenum = tenum;
this.mergePos = startFacetOrd;
if (tenum != null) {
tenum.seekExact(mergePos);
mergeTerm = tenum.term();
}
}
@Override
protected void nextTerm() throws IOException {
mergeTerm = tenum.next();
}
}
}
}
class GroupedFacetHit {
final BytesRef groupValue;
final BytesRef facetValue;
GroupedFacetHit(BytesRef groupValue, BytesRef facetValue) {
this.groupValue = groupValue;
this.facetValue = facetValue;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.prometheus;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import io.airlift.json.JsonCodec;
import io.trino.metadata.Metadata;
import io.trino.spi.HostAddress;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ConnectorSplit;
import io.trino.spi.connector.ConnectorSplitSource;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.Range;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.predicate.ValueSet;
import io.trino.spi.type.TypeManager;
import io.trino.spi.type.TypeOperators;
import io.trino.type.InternalTypeManager;
import org.apache.http.NameValuePair;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.temporal.TemporalAmount;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static io.airlift.json.JsonCodec.jsonCodec;
import static io.trino.metadata.MetadataManager.createTestMetadataManager;
import static io.trino.plugin.prometheus.MetadataUtil.METRIC_CODEC;
import static io.trino.plugin.prometheus.PrometheusClient.TIMESTAMP_COLUMN_TYPE;
import static io.trino.plugin.prometheus.PrometheusClock.fixedClockAt;
import static io.trino.plugin.prometheus.PrometheusSplitManager.OFFSET_MILLIS;
import static io.trino.plugin.prometheus.PrometheusSplitManager.decimalSecondString;
import static io.trino.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED;
import static io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone;
import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
import static java.time.Instant.ofEpochMilli;
import static java.time.ZoneOffset.UTC;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.http.client.utils.URLEncodedUtils.parse;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
@Test(singleThreaded = true)
public class TestPrometheusSplit
{
private PrometheusHttpServer prometheusHttpServer;
private final PrometheusSplit split = new PrometheusSplit(URI.create("http://127.0.0.1/test.file"));
private static final Metadata METADATA = createTestMetadataManager();
private static final TypeManager TYPE_MANAGER = new InternalTypeManager(METADATA, new TypeOperators());
private static final int NUMBER_MORE_THAN_EXPECTED_NUMBER_SPLITS = 100;
@BeforeClass
public void setUp()
{
prometheusHttpServer = new PrometheusHttpServer();
}
@Test
public void testAddresses()
{
// http split with default port
PrometheusSplit httpSplit = new PrometheusSplit(URI.create("http://prometheus.com/prometheus"));
assertEquals(httpSplit.getAddresses(), ImmutableList.of(HostAddress.fromString("prometheus.com")));
assertTrue(httpSplit.isRemotelyAccessible());
// http split with custom port
httpSplit = new PrometheusSplit(URI.create("http://prometheus.com:8080/prometheus"));
assertEquals(httpSplit.getAddresses(), ImmutableList.of(HostAddress.fromParts("prometheus.com", 8080)));
assertTrue(httpSplit.isRemotelyAccessible());
// http split with default port
PrometheusSplit httpsSplit = new PrometheusSplit(URI.create("https://prometheus.com/prometheus"));
assertEquals(httpsSplit.getAddresses(), ImmutableList.of(HostAddress.fromString("prometheus.com")));
assertTrue(httpsSplit.isRemotelyAccessible());
// http split with custom port
httpsSplit = new PrometheusSplit(URI.create("https://prometheus.com:8443/prometheus"));
assertEquals(httpsSplit.getAddresses(), ImmutableList.of(HostAddress.fromParts("prometheus.com", 8443)));
assertTrue(httpsSplit.isRemotelyAccessible());
}
@Test
public void testJsonRoundTrip()
{
JsonCodec<PrometheusSplit> codec = jsonCodec(PrometheusSplit.class);
String json = codec.toJson(split);
PrometheusSplit copy = codec.fromJson(json);
assertEquals(copy.getUri(), split.getUri());
assertEquals(copy.getAddresses(), ImmutableList.of(HostAddress.fromString("127.0.0.1")));
assertTrue(copy.isRemotelyAccessible());
}
@Test
public void testQueryWithTableNameNeedingURLEncodeInSplits()
throws URISyntaxException
{
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prom-metrics-non-standard-name.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up now");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splits = splitManager.getSplits(
null,
null,
new PrometheusTableHandle("default", table.getName()),
null,
(DynamicFilter) null);
PrometheusSplit split = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
String queryInSplit = split.getUri().getQuery();
String timeShouldBe = decimalSecondString(now.toEpochMilli() -
config.getMaxQueryRangeDuration().toMillis() +
config.getQueryChunkSizeDuration().toMillis() -
OFFSET_MILLIS * 20);
assertEquals(queryInSplit,
new URI("http://doesnotmatter:9090/api/v1/query?query=up+now[" + getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(config) + "]" + "&time=" +
timeShouldBe).getQuery());
}
@Test
public void testQueryDividedIntoSplitsFirstSplitHasRightTime()
throws URISyntaxException
{
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prometheus-metrics.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splits = splitManager.getSplits(
null,
null,
new PrometheusTableHandle("default", table.getName()),
null,
(DynamicFilter) null);
PrometheusSplit split = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
String queryInSplit = split.getUri().getQuery();
String timeShouldBe = decimalSecondString(now.toEpochMilli() -
config.getMaxQueryRangeDuration().toMillis() +
config.getQueryChunkSizeDuration().toMillis() -
OFFSET_MILLIS * 20);
assertEquals(queryInSplit,
new URI("http://doesnotmatter:9090/api/v1/query?query=up[" + getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(config) + "]" + "&time=" +
timeShouldBe).getQuery());
}
@Test
public void testQueryDividedIntoSplitsLastSplitHasRightTime()
throws URISyntaxException
{
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prometheus-metrics.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splitsMaybe = splitManager.getSplits(
null,
null,
new PrometheusTableHandle("default", table.getName()),
null,
(DynamicFilter) null);
List<ConnectorSplit> splits = splitsMaybe.getNextBatch(NOT_PARTITIONED, NUMBER_MORE_THAN_EXPECTED_NUMBER_SPLITS).getNow(null).getSplits();
int lastSplitIndex = splits.size() - 1;
PrometheusSplit lastSplit = (PrometheusSplit) splits.get(lastSplitIndex);
String queryInSplit = lastSplit.getUri().getQuery();
String timeShouldBe = decimalSecondString(now.toEpochMilli());
URI uriAsFormed = new URI("http://doesnotmatter:9090/api/v1/query?query=up[" +
getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(config) + "]" +
"&time=" + timeShouldBe);
assertEquals(queryInSplit, uriAsFormed.getQuery());
}
@Test
public void testQueryDividedIntoSplitsShouldHaveCorrectSpacingBetweenTimes()
{
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prometheus-metrics.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splits = splitManager.getSplits(
null,
null,
new PrometheusTableHandle("default", table.getName()),
null,
(DynamicFilter) null);
PrometheusSplit split1 = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
Map<String, String> paramsMap1 = parse(split1.getUri(), StandardCharsets.UTF_8).stream().collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue));
PrometheusSplit split2 = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
Map<String, String> paramsMap2 = parse(split2.getUri(), StandardCharsets.UTF_8).stream().collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue));
assertEquals(paramsMap1.get("query"), "up[1d]");
assertEquals(paramsMap2.get("query"), "up[1d]");
long diff = Double.valueOf(paramsMap2.get("time")).longValue() - Double.valueOf(paramsMap1.get("time")).longValue();
assertEquals(config.getQueryChunkSizeDuration().getValue(TimeUnit.SECONDS), diff, 0.0001);
}
@Test
public void testSplitTimesCorrect()
{
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(3, TimeUnit.DAYS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(1, TimeUnit.DAYS);
Instant now = ofEpochMilli(1000000000L);
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName");
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(
now,
maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
List<String> expectedSplitTimes = ImmutableList.of(
"827199.998", "913599.999", "1000000");
assertEquals(splitTimes, expectedSplitTimes);
}
@Test
public void testSplitTimesCorrectNonModuloZeroDurationToChunk()
{
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(3, TimeUnit.DAYS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(2, TimeUnit.DAYS);
Instant now = ofEpochMilli(1000000000L);
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName");
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
List<String> expectedSplitTimes = ImmutableList.of(
"827199.999", "1000000");
assertEquals(splitTimes, expectedSplitTimes);
}
@Test
public void testSplitTimesCorrectVersusMock()
{
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(120, TimeUnit.SECONDS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(30, TimeUnit.SECONDS);
Instant now = ofEpochMilli(1568638172000L);
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName");
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
List<String> promTimesReturned = mockPrometheusResponseToChunkedQueries(queryChunkSizeDuration, splitTimes);
assertEquals(promTimesReturned, convertMockTimesToStrings(promTimeValuesMock));
}
@Test
public void testSplitTimesAreTimesNearBoundaryNotMissing()
{
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(120, TimeUnit.SECONDS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(30, TimeUnit.SECONDS);
Instant now = ofEpochMilli(1568638171999L);
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName");
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
List<String> promTimesReturned = mockPrometheusResponseToChunkedQueries(queryChunkSizeDuration, splitTimes);
assertEquals(promTimesReturned, convertMockTimesToStrings(promTimeValuesMock));
}
@Test
public void testMockPrometheusResponseShouldBeCorrectWhenUpperBoundaryAlignsWithData()
{
List<Double> expectedResponse = ImmutableList.of(1568638142.0, 1568638157.0, 1568638171.999);
assertEquals(mockPrometheusResponseToQuery(new io.airlift.units.Duration(30, TimeUnit.SECONDS), "1568638171.999"), expectedResponse);
}
@Test
public void testMockPrometheusResponseShouldBeCorrectWhenLowerBoundaryAlignsWithData()
{
List<Double> expectedResponse = ImmutableList.of(1568638142.0, 1568638157.0, 1568638171.999);
assertEquals(mockPrometheusResponseToQuery(new io.airlift.units.Duration(30, TimeUnit.SECONDS), "1568638172."), expectedResponse);
}
@Test
public void testMockPrometheusResponseShouldBeCorrectWhenLowerBoundaryLaterThanData()
{
List<Double> expectedResponse = ImmutableList.of(1568638157.0, 1568638171.999);
assertEquals(mockPrometheusResponseToQuery(new io.airlift.units.Duration(30, TimeUnit.SECONDS), "1568638172.001"), expectedResponse);
}
@Test
public void testMockPrometheusResponseWithSeveralChunksShouldBeCorrect()
{
List<String> expectedResponse = ImmutableList.of("1568638112", "1568638126.997", "1568638142", "1568638157", "1568638171.999");
List<String> splitTimes = ImmutableList.of("1568638141.999", "1568638172.");
assertEquals(mockPrometheusResponseToChunkedQueries(new io.airlift.units.Duration(30, TimeUnit.SECONDS), splitTimes), expectedResponse);
}
@Test
public void testPredicatePushDownLowerBoundDirect()
{
Range lowRange = Range.greaterThanOrEqual(TIMESTAMP_COLUMN_TYPE, packDateTimeWithZone(1570460709643L, UTC_KEY));
ValueSet valueSet = ValueSet.ofRanges(lowRange);
Domain testDomain = Domain.create(valueSet, false);
TupleDomain<ColumnHandle> testTupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(
new PrometheusColumnHandle("timestamp", TIMESTAMP_COLUMN_TYPE, 2), testDomain));
PrometheusPredicateTimeInfo predicateTimes = PrometheusSplitManager.determinePredicateTimes(testTupleDomain).orElseThrow();
Instant expected = ofEpochMilli(1570460709643L);
assertEquals(predicateTimes.getPredicateLowerTimeBound().orElseThrow(), expected);
}
@Test(enabled = false)
public void testPredicatePushDownSetsLowerBoundOnly()
{
long predicateLowValue = 1568638171999L - 600000L;
Range lowRange = Range.greaterThanOrEqual(TIMESTAMP_COLUMN_TYPE, packDateTimeWithZone(predicateLowValue, UTC_KEY));
ValueSet valueSet = ValueSet.ofRanges(lowRange);
Domain testDomain = Domain.create(valueSet, false);
TupleDomain<ColumnHandle> testTupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(
new PrometheusColumnHandle("timestamp", TIMESTAMP_COLUMN_TYPE, 2), testDomain));
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName")
.withPredicate(testTupleDomain);
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(120, TimeUnit.SECONDS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(30, TimeUnit.SECONDS);
Instant now = ofEpochMilli(1568638171999L);
TemporalAmount maxQueryAsTime = java.time.Duration.ofMillis(maxQueryRangeDuration.toMillis());
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
String earliestSplit = splitTimes.get(0);
Instant earliestSplitAsTime = ofEpochMilli(longFromDecimalSecondString(earliestSplit));
TemporalAmount queryChunkAsTime = java.time.Duration.ofMillis(queryChunkSizeDuration.toMillis());
Instant startOfQuery = earliestSplitAsTime.minus(queryChunkAsTime);
assertNotEquals(startOfQuery, now.minus(maxQueryAsTime).minus(java.time.Duration.ofMillis((splitTimes.size() - 1) * OFFSET_MILLIS)));
assertEquals(startOfQuery.toEpochMilli(), ofEpochMilli(predicateLowValue).toEpochMilli() - ((splitTimes.size() - 1) * OFFSET_MILLIS));
}
@Test
public void testPredicatePushDownSetsUpperBoundOnly()
{
long predicateHighValue = 1568638171999L;
Range highRange = Range.lessThanOrEqual(TIMESTAMP_COLUMN_TYPE, packDateTimeWithZone(predicateHighValue, UTC_KEY));
ValueSet valueSet = ValueSet.ofRanges(highRange);
Domain testDomain = Domain.create(valueSet, false);
TupleDomain<ColumnHandle> testTupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(
new PrometheusColumnHandle("timestamp", TIMESTAMP_COLUMN_TYPE, 2), testDomain));
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName")
.withPredicate(testTupleDomain);
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(120, TimeUnit.SECONDS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(30, TimeUnit.SECONDS);
Instant now = ofEpochMilli(1568638171999L + 600000L);
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
TemporalAmount expectedMaxQueryAsTime = java.time.Duration.ofMillis(maxQueryRangeDuration.toMillis() +
((splitTimes.size() - 1) * OFFSET_MILLIS));
String lastSplit = splitTimes.get(splitTimes.size() - 1);
Instant lastSplitAsTime = ofEpochMilli(longFromDecimalSecondString(lastSplit));
String earliestSplit = splitTimes.get(0);
Instant earliestSplitAsTime = ofEpochMilli(longFromDecimalSecondString(earliestSplit));
TemporalAmount queryChunkAsTime = java.time.Duration.ofMillis(queryChunkSizeDuration.toMillis());
java.time.Duration actualMaxDuration = Duration.between(earliestSplitAsTime
.minus(queryChunkAsTime), lastSplitAsTime);
assertEquals(lastSplitAsTime.toEpochMilli(), 1568638171999L);
assertEquals(actualMaxDuration, expectedMaxQueryAsTime);
}
@Test
public void testPredicatePushDownSetsUpperAndLowerBound()
{
long predicateHighValue = 1568638171999L;
Range highRange = Range.equal(TIMESTAMP_COLUMN_TYPE, packDateTimeWithZone(predicateHighValue, UTC_KEY));
long predicateLowValue = 1568638171999L - 600000L;
Range lowRange = Range.equal(TIMESTAMP_COLUMN_TYPE, packDateTimeWithZone(predicateLowValue, UTC_KEY));
ValueSet valueSet = ValueSet.ofRanges(lowRange, highRange);
Domain testDomain = Domain.create(valueSet, false);
TupleDomain<ColumnHandle> testTupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(
new PrometheusColumnHandle("timestamp", TIMESTAMP_COLUMN_TYPE, 2), testDomain));
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName")
.withPredicate(testTupleDomain);
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(120, TimeUnit.SECONDS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(30, TimeUnit.SECONDS);
Instant now = ofEpochMilli(1568638171999L + 1200000L);
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
TemporalAmount expectedMaxQueryAsTime = java.time.Duration.ofMillis(new io.airlift.units.Duration(10, TimeUnit.MINUTES).toMillis() +
((splitTimes.size() - 1) * OFFSET_MILLIS));
String lastSplit = splitTimes.get(splitTimes.size() - 1);
Instant lastSplitAsTime = ofEpochMilli(longFromDecimalSecondString(lastSplit));
String earliestSplit = splitTimes.get(0);
Instant earliestSplitAsTime = ofEpochMilli(longFromDecimalSecondString(earliestSplit));
TemporalAmount queryChunkAsTime = java.time.Duration.ofMillis(queryChunkSizeDuration.toMillis());
java.time.Duration actualMaxDuration = Duration.between(earliestSplitAsTime
.minus(queryChunkAsTime), lastSplitAsTime);
assertEquals(lastSplitAsTime.toEpochMilli(), 1568638171999L);
assertEquals(actualMaxDuration, expectedMaxQueryAsTime);
}
@Test
public void testEmptyPredicatePredicatePushDown()
{
long predicateLowValue = 1570460709643L;
PrometheusTableHandle prometheusTableHandle = new PrometheusTableHandle("schemaName", "tableName");
io.airlift.units.Duration maxQueryRangeDuration = new io.airlift.units.Duration(120, TimeUnit.SECONDS);
io.airlift.units.Duration queryChunkSizeDuration = new io.airlift.units.Duration(30, TimeUnit.SECONDS);
Instant now = ofEpochMilli(1568638171999L);
TemporalAmount maxQueryAsTime = java.time.Duration.ofMillis(maxQueryRangeDuration.toMillis());
List<String> splitTimes = PrometheusSplitManager.generateTimesForSplits(now, maxQueryRangeDuration, queryChunkSizeDuration, prometheusTableHandle);
String earliestSplit = splitTimes.get(0);
Instant earliestSplitAsTime = ofEpochMilli(longFromDecimalSecondString(earliestSplit));
TemporalAmount queryChunkAsTime = java.time.Duration.ofMillis(queryChunkSizeDuration.toMillis());
Instant startOfQuery = earliestSplitAsTime.minus(queryChunkAsTime);
assertEquals(startOfQuery, now.minus(maxQueryAsTime).minus(java.time.Duration.ofMillis((splitTimes.size() - 1) * OFFSET_MILLIS)));
assertNotEquals(startOfQuery.toEpochMilli(), ofEpochMilli(predicateLowValue).toEpochMilli());
}
/**
* mock Prometheus chunked query responses (time values only)
*
* @param splitTimes the end times that would be used for each Prometheus instant query
* @param queryChunkDuration the duration value that would be used for each query, `30s` for instance
* @return the values from the Prometheus data that would be return by all the chunked queries
*/
private static List<String> mockPrometheusResponseToChunkedQueries(io.airlift.units.Duration queryChunkDuration, List<String> splitTimes)
{
return Lists.reverse(splitTimes).stream()
.map(endTime -> mockPrometheusResponseToQuery(queryChunkDuration, endTime))
.flatMap(Collection::stream)
.sorted()
.map(TestPrometheusSplit::doubleToPlainString)
.collect(Collectors.toList());
}
/**
* mock Prometheus instant query
*/
private static List<Double> mockPrometheusResponseToQuery(io.airlift.units.Duration queryChunkDuration, String endTimeStr)
{
Double endTime = Double.valueOf(endTimeStr);
Double duration = queryChunkDuration.getValue(TimeUnit.SECONDS);
return promTimeValuesMock.stream()
.filter(promTimeValue -> ((endTime - duration) <= promTimeValue) && (promTimeValue <= endTime))
.collect(Collectors.toList());
}
/**
* Convert list of Double to list of String and avoid scientific notation
*/
private static List<String> convertMockTimesToStrings(List<Double> times)
{
return times.stream()
.map(TestPrometheusSplit::doubleToPlainString)
.collect(Collectors.toList());
}
/**
* Convert Double to String and avoid scientific notation
*/
private static String doubleToPlainString(Double aDouble)
{
return new BigDecimal(aDouble.toString()).stripTrailingZeros().toPlainString();
}
/**
* Prometheus mock data
* The array below represents to a response to from real data:
* $ curl "http://127.0.0.1:9090/api/v1/query?query=up[120s]&time=1568638172"
* Just the time items from the "values" section of the response
*/
private static final ImmutableList<Double> promTimeValuesMock = new ImmutableList.Builder<Double>()
.add(1568638066.999)
.add(1568638081.996)
.add(1568638097.0)
.add(1568638112.0)
.add(1568638126.997)
.add(1568638142.0)
.add(1568638157.0)
.add(1568638171.999)
.build();
private static long longFromDecimalSecondString(String decimalString)
{
return new BigDecimal(decimalString).multiply(new BigDecimal(1000L)).longValueExact();
}
private static String getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(PrometheusConnectorConfig config)
{
return config.getQueryChunkSizeDuration().roundTo(config.getQueryChunkSizeDuration().getUnit()) +
io.airlift.units.Duration.timeUnitToString(config.getQueryChunkSizeDuration().getUnit());
}
private static PrometheusConnectorConfig getCommonConfig(URI dataUri)
{
PrometheusConnectorConfig config = new PrometheusConnectorConfig();
config.setPrometheusURI(dataUri);
config.setMaxQueryRangeDuration(new io.airlift.units.Duration(21, DAYS));
config.setQueryChunkSizeDuration(new io.airlift.units.Duration(1, DAYS));
config.setCacheDuration(new io.airlift.units.Duration(30, SECONDS));
return config;
}
}
| |
/*
* Open Source Software published under the Apache Licence, Version 2.0.
*/
package io.github.vocabhunter.gui.controller;
import io.github.vocabhunter.analysis.core.GuiTaskHandler;
import io.github.vocabhunter.analysis.session.EnrichedSessionState;
import io.github.vocabhunter.analysis.session.FileNameTool;
import io.github.vocabhunter.analysis.session.SessionState;
import io.github.vocabhunter.gui.dialogues.*;
import io.github.vocabhunter.gui.model.MainModel;
import io.github.vocabhunter.gui.model.SessionModel;
import io.github.vocabhunter.gui.services.SessionFileService;
import io.github.vocabhunter.gui.status.GuiTask;
import io.github.vocabhunter.gui.status.StatusManager;
import javafx.stage.Stage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Path;
import javax.inject.Inject;
import javax.inject.Singleton;
@Singleton
public class GuiFileHandler {
private static final Logger LOG = LoggerFactory.getLogger(GuiFileHandler.class);
private Stage stage;
@Inject
private FileDialogueFactory fileDialogueFactory;
@Inject
private SessionFileService sessionFileService;
@Inject
private StatusManager statusManager;
@Inject
private MainModel model;
@Inject
private SessionStateHandler sessionStateHandler;
@Inject
private GuiTaskHandler guiTaskHandler;
public void initialise(final Stage stage) {
this.stage = stage;
}
public void handleExport() {
if (statusManager.beginExport()) {
guiTaskHandler.pauseThenExecuteOnGuiThread(this::processExport);
}
}
private void processExport() {
Path file = chooseFile(FileDialogueType.EXPORT_SELECTION);
if (file == null) {
statusManager.completeAction();
} else {
statusManager.performAction(file);
Path fileWithSuffix = FileNameTool.ensureExportFileHasSuffix(file);
SessionState sessionState = sessionStateHandler.getSessionState();
GuiTask<Boolean> task = new GuiTask<>(
guiTaskHandler,
statusManager,
() -> processExport(fileWithSuffix, sessionState),
e -> FileErrorTool.export(fileWithSuffix, e)
);
guiTaskHandler.executeInBackground(task);
}
}
private boolean processExport(final Path file, final SessionState sessionState) {
LOG.info("Exporting to file '{}'", file);
sessionFileService.exportSelection(sessionState, file);
return true;
}
public void processOpenOrNew(final Path file) {
if (statusManager.beginOpenSession()) {
guiTaskHandler.pauseThenExecuteOnGuiThread(() -> processOpenOrNewInternal(file));
}
}
private void processOpenOrNewInternal(final Path file) {
if (unsavedChangesCheck()) {
LOG.info("Opening file '{}'", file);
GuiTask<EnrichedSessionState> task = new GuiTask<>(
guiTaskHandler,
statusManager,
() -> sessionFileService.createOrOpenSession(file),
this::finishOpen,
e -> FileErrorTool.open(file, e));
guiTaskHandler.executeInBackground(task);
} else {
statusManager.completeAction();
}
}
public void handleOpenSession() {
if (statusManager.beginOpenSession()) {
guiTaskHandler.pauseThenExecuteOnGuiThread(this::processOpenSession);
}
}
private void processOpenSession() {
Path file = checkUnsavedChangesAndChooseFile(FileDialogueType.OPEN_SESSION);
if (file == null) {
statusManager.completeAction();
} else {
statusManager.performAction(file);
LOG.info("Opening session file '{}'", file);
GuiTask<EnrichedSessionState> task = new GuiTask<>(
guiTaskHandler,
statusManager,
() -> sessionFileService.read(file),
this::finishOpen,
e -> FileErrorTool.open(file, e));
guiTaskHandler.executeInBackground(task);
}
}
public void handleNewSession() {
if (statusManager.beginNewSession()) {
guiTaskHandler.pauseThenExecuteOnGuiThread(this::processNewSession);
}
}
private void processNewSession() {
Path file = checkUnsavedChangesAndChooseFile(FileDialogueType.NEW_SESSION);
if (file == null) {
statusManager.completeAction();
} else {
statusManager.performAction(file);
LOG.info("New session from '{}'", file);
GuiTask<EnrichedSessionState> task = new GuiTask<>(
guiTaskHandler,
statusManager,
() -> sessionFileService.createNewSession(file),
this::finishOpen,
e -> FileErrorTool.open(file, e));
guiTaskHandler.executeInBackground(task);
}
}
private void finishOpen(final EnrichedSessionState enrichedState) {
SessionState state = enrichedState.getState();
SessionModel sessionModel = sessionStateHandler.addSession(state);
model.replaceSessionModel(state, sessionModel, enrichedState.getFile().orElse(null));
statusManager.replaceSession(sessionModel.getPosition(), sessionModel.getProgress());
}
public void handleSave() {
if (model.hasSessionFile()) {
if (statusManager.beginSaveSession()) {
guiTaskHandler.pauseThenExecuteOnGuiThread(this::processSave);
}
} else {
handleSaveAs();
}
}
public void handleSaveAs() {
if (statusManager.beginSaveSession()) {
guiTaskHandler.pauseThenExecuteOnGuiThread(this::processSaveAs);
}
}
private void processSaveAs() {
Path file = chooseFile(FileDialogueType.SAVE_SESSION);
if (file == null) {
statusManager.completeAction();
} else {
file = FileNameTool.ensureSessionFileHasSuffix(file);
model.setSessionFile(file);
processSave();
}
}
private void processSave() {
Path file = model.getSessionFile();
statusManager.performAction(file);
LOG.info("Saving file '{}'", file);
SessionState sessionState = sessionStateHandler.getSessionState();
GuiTask<Boolean> task = new GuiTask<>(
guiTaskHandler,
statusManager,
() -> saveFile(file, sessionState),
b -> model.setChangesSaved(true),
e -> FileErrorTool.save(file, e)
);
guiTaskHandler.executeInBackground(task);
}
private boolean saveFile(final Path file, final SessionState sessionState) {
sessionFileService.write(file, sessionState);
return true;
}
private Path checkUnsavedChangesAndChooseFile(final FileDialogueType type) {
if (unsavedChangesCheck()) {
return chooseFile(type);
} else {
return null;
}
}
private Path chooseFile(final FileDialogueType type) {
FileDialogue dialogue = fileDialogueFactory.create(type, stage);
dialogue.showChooser();
if (dialogue.isFileSelected()) {
return dialogue.getSelectedFile();
} else {
return null;
}
}
public boolean unsavedChangesCheck() {
if (model.isChangesSaved()) {
return true;
} else {
UnsavedChangesDialogue dialogue = new UnsavedChangesDialogue(model.getSessionFile());
dialogue.showDialogue();
switch (dialogue.getUserResponse()) {
case SAVE:
return saveChanges();
case DISCARD:
return true;
default:
return false;
}
}
}
private boolean saveChanges() {
if (model.hasSessionFile()) {
saveChangesInternal();
return true;
} else {
return saveChangesAs();
}
}
private boolean saveChangesAs() {
Path file = chooseFile(FileDialogueType.SAVE_SESSION);
if (file == null) {
return false;
} else {
file = FileNameTool.ensureSessionFileHasSuffix(file);
model.setSessionFile(file);
return saveChangesInternal();
}
}
private boolean saveChangesInternal() {
Path file = model.getSessionFile();
try {
LOG.info("Saving file '{}'", file);
sessionFileService.write(file, sessionStateHandler.getSessionState());
model.setChangesSaved(true);
return true;
} catch (final RuntimeException e) {
FileErrorTool.save(file, e);
return false;
}
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/authz/tags/sakai-10.6/authz-tool/tool/src/java/org/sakaiproject/authz/tool/PermissionsHelperAction.java $
* $Id: PermissionsHelperAction.java 306833 2014-03-05 23:53:29Z enietzel@anisakai.com $
***********************************************************************************
*
* Copyright (c) 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.authz.tool;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.authz.api.AuthzGroup;
import org.sakaiproject.authz.api.AuthzPermissionException;
import org.sakaiproject.authz.api.GroupAlreadyDefinedException;
import org.sakaiproject.authz.api.GroupIdInvalidException;
import org.sakaiproject.authz.api.GroupNotDefinedException;
import org.sakaiproject.authz.api.PermissionsHelper;
import org.sakaiproject.authz.api.Role;
import org.sakaiproject.authz.api.RoleAlreadyDefinedException;
import org.sakaiproject.authz.cover.AuthzGroupService;
import org.sakaiproject.authz.cover.FunctionManager;
import org.sakaiproject.cheftool.Context;
import org.sakaiproject.cheftool.JetspeedRunData;
import org.sakaiproject.cheftool.RunData;
import org.sakaiproject.cheftool.VelocityPortlet;
import org.sakaiproject.cheftool.VelocityPortletPaneledAction;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.cover.EntityManager;
import org.sakaiproject.event.api.SessionState;
import org.sakaiproject.site.api.Group;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.api.ToolException;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.util.ResourceLoader;
/**
* This is a helper interface to the Permissions tool.
*/
public class PermissionsHelperAction extends VelocityPortletPaneledAction
{
/** Our logger. */
private static Log M_log = LogFactory.getLog(PermissionsHelperAction.class);
private static ResourceLoader rb = new ResourceLoader("authz-tool");
private static final String STARTED = "sakaiproject.permissions.started";
/** State attributes for Permissions mode - when it is MODE_DONE the tool can process the results. */
public static final String STATE_MODE = "pemissions.mode";
/** State attribute for the realm id - users should set before starting. */
public static final String STATE_REALM_ID = "permission.realmId";
/** State attribute for the realm id - users should set before starting. */
public static final String STATE_REALM_ROLES_ID = "permission.realmRolesId";
/** State attribute for the description of what's being edited - users should set before starting. */
public static final String STATE_DESCRIPTION = "permission.description";
/** State attribute for the lock/ability string prefix to be presented / edited - users should set before starting. */
public static final String STATE_PREFIX = "permission.prefix";
/** State attributes for storing the realm being edited. */
private static final String STATE_REALM_EDIT = "permission.realm";
/** State attributes for storing the current selected realm being edited. */
private static final String STATE_VIEW_REALM_EDIT = "permission.view.realm";
/** State attributes for storing the abilities, filtered by the prefix. */
private static final String STATE_ABILITIES = "permission.abilities";
/** State attribute for storing the roles to display. */
private static final String STATE_ROLES = "permission.roles";
/** State attribute for storing the abilities of each role for this resource. */
private static final String STATE_ROLE_ABILITIES = "permission.rolesAbilities";
/** State attribute for permission description */
public static final String STATE_PERMISSION_DESCRIPTIONS = "permission.descriptions";
/** Modes. */
public static final String MODE_MAIN = "main";
/** vm files for each mode. TODO: path too hard coded */
private static final String TEMPLATE_MAIN = "helper/chef_permissions";
private static final String STATE_GROUP_AWARE = "state_group_aware";
protected void toolModeDispatch(String methodBase, String methodExt, HttpServletRequest req, HttpServletResponse res)
throws ToolException
{
SessionState sstate = getState(req);
ToolSession toolSession = SessionManager.getCurrentToolSession();
String mode = (String) sstate.getAttribute(STATE_MODE);
Object started = toolSession.getAttribute(STARTED);
if (mode == null && started != null)
{
toolSession.removeAttribute(STARTED);
Tool tool = ToolManager.getCurrentTool();
String url = (String) SessionManager.getCurrentToolSession().getAttribute(tool.getId() + Tool.HELPER_DONE_URL);
SessionManager.getCurrentToolSession().removeAttribute(tool.getId() + Tool.HELPER_DONE_URL);
try
{
res.sendRedirect(url);
}
catch (IOException e)
{
Log.warn("chef", this + " : ", e);
}
return;
}
super.toolModeDispatch(methodBase, methodExt, req, res);
}
/**
* Allow extension classes to control which build method gets called for this pannel
* @param panel
* @return
*/
protected String panelMethodName(String panel)
{
// we are always calling buildMainPanelContext
return "buildMainPanelContext";
}
/**
* Default is to use when Portal starts up
*/
public String buildMainPanelContext(VelocityPortlet portlet, Context context, RunData rundata, SessionState sstate)
{
String mode = (String) sstate.getAttribute(STATE_MODE);
if (mode == null)
{
initHelper(portlet, context, rundata, sstate);
}
String template = buildHelperContext(portlet, context, rundata, sstate);
if (template == null)
{
addAlert(sstate, rb.getString("java.alert.prbset"));
}
else
{
return template;
}
return null;
}
protected void initHelper(VelocityPortlet portlet, Context context, RunData rundata, SessionState state)
{
ToolSession toolSession = SessionManager.getCurrentToolSession();
String prefix = (String) toolSession.getAttribute(PermissionsHelper.PREFIX);
String targetRef = (String) toolSession.getAttribute(PermissionsHelper.TARGET_REF);
String description = (String) toolSession.getAttribute(PermissionsHelper.DESCRIPTION);
String rolesRef = (String) toolSession.getAttribute(PermissionsHelper.ROLES_REF);
if (rolesRef == null) rolesRef = targetRef;
toolSession.setAttribute(STARTED, Boolean.valueOf(true));
// setup for editing the permissions of the site for this tool, using the roles of this site, too
state.setAttribute(STATE_REALM_ID, targetRef);
// use the roles from this ref's AuthzGroup
state.setAttribute(STATE_REALM_ROLES_ID, rolesRef);
// ... with this description
state.setAttribute(STATE_DESCRIPTION, description);
// ... showing only locks that are prpefixed with this
state.setAttribute(STATE_PREFIX, prefix);
// ... set the ResourceLoader object
state.setAttribute(STATE_PERMISSION_DESCRIPTIONS, toolSession.getAttribute(PermissionsHelper.PERMISSION_DESCRIPTION));
// start the helper
state.setAttribute(STATE_MODE, MODE_MAIN);
state.setAttribute(STATE_GROUP_AWARE, toolSession.getAttribute("groupAware"));
}
/**
* build the context.
*
* @return The name of the template to use.
*/
static public String buildHelperContext(VelocityPortlet portlet, Context context, RunData rundata, SessionState state)
{
// in state is the realm id
context.put("thelp", rb);
String realmId = (String) state.getAttribute(STATE_REALM_ID);
// in state is the realm to use for roles - if not, use realmId
String realmRolesId = (String) state.getAttribute(STATE_REALM_ROLES_ID);
context.put("viewRealmId", realmRolesId);
// get the realm locked for editing
AuthzGroup edit = (AuthzGroup) state.getAttribute(STATE_REALM_EDIT);
if (edit == null)
{
if (AuthzGroupService.allowUpdate(realmId))
{
try
{
edit = AuthzGroupService.getAuthzGroup(realmId);
state.setAttribute(STATE_REALM_EDIT, edit);
}
catch (GroupNotDefinedException e)
{
try
{
// we can create the realm
edit = AuthzGroupService.addAuthzGroup(realmId);
state.setAttribute(STATE_REALM_EDIT, edit);
}
catch (GroupIdInvalidException ee)
{
M_log.warn("PermissionsAction.buildHelperContext: addRealm: " + ee);
cleanupState(state);
return null;
}
catch (GroupAlreadyDefinedException ee)
{
M_log.warn("PermissionsAction.buildHelperContext: addRealm: " + ee);
cleanupState(state);
return null;
}
catch (AuthzPermissionException ee)
{
M_log.warn("PermissionsAction.buildHelperContext: addRealm: " + ee);
cleanupState(state);
return null;
}
}
}
// no permission
else
{
M_log.warn("PermissionsAction.buildHelperContext: no permission: " + realmId);
cleanupState(state);
return null;
}
}
AuthzGroup viewEdit = null;
// check wither the current realm id is of site group type
if (realmId.indexOf(SiteService.REFERENCE_ROOT) != -1)
{
String siteId = realmId.replaceAll(SiteService.REFERENCE_ROOT + "/", "");
context.put("siteRef", realmId);
if (state.getAttribute(STATE_GROUP_AWARE) != null && ((Boolean) state.getAttribute(STATE_GROUP_AWARE)).booleanValue())
{
// only show groups for group-aware tools
try
{
Site site = SiteService.getSite(siteId);
Collection groups = site.getGroups();
if (groups != null && !groups.isEmpty())
{
Iterator iGroups = groups.iterator();
for(; iGroups.hasNext();)
{
Group group = (Group) iGroups.next();
// need to either have realm update permission on the group level or better at the site level
if (!AuthzGroupService.allowUpdate(group.getReference()))
{
iGroups.remove();
}
}
context.put("groups", groups);
}
}
catch (Exception siteException)
{
M_log.warn("PermissionsAction.buildHelperContext: getsite of realm id = " + realmId + siteException);
}
}
// get the realm locked for editing
viewEdit = (AuthzGroup) state.getAttribute(STATE_VIEW_REALM_EDIT);
if (viewEdit == null)
{
if (AuthzGroupService.allowUpdate(realmRolesId) || AuthzGroupService.allowUpdate(SiteService.siteReference(siteId)))
{
try
{
viewEdit = AuthzGroupService.getAuthzGroup(realmRolesId);
state.setAttribute(STATE_VIEW_REALM_EDIT, viewEdit);
}
catch (GroupNotDefinedException e)
{
M_log.warn("PermissionsAction.buildHelperContext: getRealm with id= " + realmRolesId + " : " + e);
cleanupState(state);
return null;
}
}
// no permission
else
{
M_log.warn("PermissionsAction.buildHelperContext: no permission: " + realmId);
cleanupState(state);
return null;
}
}
}
// in state is the prefix for abilities to present
String prefix = (String) state.getAttribute(STATE_PREFIX);
// in state is the list of abilities we will present
List functions = (List) state.getAttribute(STATE_ABILITIES);
if (functions == null)
{
// get all functions prefixed with our prefix
functions = FunctionManager.getRegisteredFunctions(prefix);
}
if (functions != null && !functions.isEmpty())
{
List<String> nFunctions = new Vector<String>();
if (!realmRolesId.equals(realmId))
{
// editing groups within site, need to filter out those permissions only applicable to site level
for (Iterator iFunctions = functions.iterator(); iFunctions.hasNext();)
{
String function = (String) iFunctions.next();
if (function.indexOf("all.groups") == -1)
{
nFunctions.add(function);
}
}
}
else
{
nFunctions.addAll(functions);
}
state.setAttribute(STATE_ABILITIES, nFunctions);
context.put("abilities", nFunctions);
// get function description from passed in HashMap
// output permission descriptions
Map<String, String> functionDescriptions = (Map<String, String>) state.getAttribute(STATE_PERMISSION_DESCRIPTIONS);
if (functionDescriptions != null)
{
Set keySet = functionDescriptions.keySet();
for(Object function : functions)
{
String desc = (String) function;
String descKey = PermissionsHelper.PREFIX_PERMISSION_DESCRIPTION + function;
if (keySet.contains(descKey))
{
// use function description
desc = (String) functionDescriptions.get(descKey);
}
functionDescriptions.put((String) function, desc);
}
context.put("functionDescriptions", functionDescriptions);
}
}
// in state is the description of the edit
String description = (String) state.getAttribute(STATE_DESCRIPTION);
// the list of roles
List roles = (List) state.getAttribute(STATE_ROLES);
if (roles == null)
{
// get the roles from the edit, unless another is specified
AuthzGroup roleRealm = viewEdit != null ? viewEdit : edit;
if (realmRolesId != null)
{
try
{
roleRealm = AuthzGroupService.getAuthzGroup(realmRolesId);
}
catch (Exception e)
{
M_log.warn("PermissionsHelperAction.buildHelperContext: getRolesRealm: " + realmRolesId + " : " + e);
}
}
roles = new Vector();
roles.addAll(roleRealm.getRoles());
Collections.sort(roles);
state.setAttribute(STATE_ROLES, roles);
}
// the abilities not including this realm for each role
Map rolesAbilities = (Map) state.getAttribute(STATE_ROLE_ABILITIES);
if (rolesAbilities == null)
{
rolesAbilities = new Hashtable();
state.setAttribute(STATE_ROLE_ABILITIES, rolesAbilities);
// get this resource's role Realms,those that refine the role definitions, but not it's own
Reference ref = EntityManager.newReference(viewEdit != null ? viewEdit.getId() : edit.getId());
Collection realms = ref.getAuthzGroups();
realms.remove(ref.getReference());
for (Iterator iRoles = roles.iterator(); iRoles.hasNext();)
{
Role role = (Role) iRoles.next();
Set locks = AuthzGroupService.getAllowedFunctions(role.getId(), realms);
rolesAbilities.put(role.getId(), locks);
}
}
context.put("realm", viewEdit != null ? viewEdit : edit);
context.put("prefix", prefix);
context.put("description", description);
if (roles.size() > 0)
{
context.put("roles", roles);
}
context.put("rolesAbilities", rolesAbilities);
// make sure observers are disabled
VelocityPortletPaneledAction.disableObservers(state);
return TEMPLATE_MAIN;
}
/**
* Remove the state variables used internally, on the way out.
*/
private static void cleanupState(SessionState state)
{
state.removeAttribute(STATE_REALM_ID);
state.removeAttribute(STATE_REALM_ROLES_ID);
state.removeAttribute(STATE_REALM_EDIT);
state.removeAttribute(STATE_VIEW_REALM_EDIT);
state.removeAttribute(STATE_PREFIX);
state.removeAttribute(STATE_ABILITIES);
state.removeAttribute(STATE_DESCRIPTION);
state.removeAttribute(STATE_ROLES);
state.removeAttribute(STATE_ROLE_ABILITIES);
state.removeAttribute(STATE_PERMISSION_DESCRIPTIONS);
state.removeAttribute(STATE_MODE);
state.removeAttribute(VelocityPortletPaneledAction.STATE_HELPER);
state.removeAttribute(STATE_GROUP_AWARE);
// re-enable observers
VelocityPortletPaneledAction.enableObservers(state);
}
/**
* to show different permission settings based on user selection of authz group
* @param data
*/
public void doView_permission_option(RunData data)
{
String viewAuthzId = data.getParameters().getString("authzGroupSelection");
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
// reset attributes
state.setAttribute(STATE_REALM_ROLES_ID, viewAuthzId);
state.removeAttribute(STATE_VIEW_REALM_EDIT);
state.removeAttribute(STATE_ABILITIES);
state.removeAttribute(STATE_ROLES);
state.removeAttribute(STATE_ROLE_ABILITIES);
}
/**
* Handle the eventSubmit_doSave command to save the edited permissions.
*/
public void doSave(RunData data)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
if (!"POST".equals(data.getRequest().getMethod())) {
M_log.warn("PermissionsAction.doSave: user did not submit with a POST! IP=" + data.getRequest().getRemoteAddr());
return;
}
// only save the view realm's roles
AuthzGroup edit = (AuthzGroup) state.getAttribute(STATE_VIEW_REALM_EDIT);
if (edit == null)
{
edit = (AuthzGroup) state.getAttribute(STATE_REALM_EDIT);
}
if (edit != null)
{
// read the form, updating the edit
readForm(data, edit, state);
// commit the change
try
{
AuthzGroupService.save(edit);
}
catch (GroupNotDefinedException e)
{
addAlert(state, rb.getFormattedMessage("alert_sitegroupnotdefined", new Object[]{edit.getReference()}));
}
catch (AuthzPermissionException e)
{
addAlert(state, rb.getFormattedMessage("alert_permission", new Object[]{edit.getReference()}));
}
}
// clean up state
cleanupState(state);
}
/**
* Handle the eventSubmit_doCancel command to abort the edits.
*/
public void doCancel(RunData data)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
// clean up state
cleanupState(state);
}
/**
* Read the permissions form.
*/
private void readForm(RunData data, AuthzGroup edit, SessionState state)
{
List abilities = (List) state.getAttribute(STATE_ABILITIES);
List roles = (List) state.getAttribute(STATE_ROLES);
// look for each role's ability field
for (Iterator iRoles = roles.iterator(); iRoles.hasNext();)
{
Role role = (Role) iRoles.next();
for (Iterator iLocks = abilities.iterator(); iLocks.hasNext();)
{
String lock = (String) iLocks.next();
boolean checked = data.getParameters().getBoolean(role.getId() + lock);
if (checked)
{
// we have an ability! Make sure there's a role
Role myRole = edit.getRole(role.getId());
if (myRole == null)
{
try
{
myRole = edit.addRole(role.getId());
}
catch (RoleAlreadyDefinedException e)
{
M_log.warn("PermissionsAction.readForm: addRole after getRole null: " + role.getId() + " : " + e);
}
}
if (myRole != null) {
myRole.allowFunction(lock);
}
}
else
{
// if we do have this role, make sure there's not this lock
Role myRole = edit.getRole(role.getId());
if (myRole != null)
{
myRole.disallowFunction(lock);
}
}
}
}
}
}
| |
package technology.mainthread.apps.moment.background.service;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.IBinder;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import okio.Okio;
import rx.Observer;
import rx.Subscription;
import rx.subscriptions.Subscriptions;
import technology.mainthread.apps.moment.MomentApp;
import technology.mainthread.apps.moment.background.ConnectivityHelper;
import technology.mainthread.apps.moment.common.data.vo.Moment;
import technology.mainthread.apps.moment.common.rx.RxSchedulerHelper;
import technology.mainthread.apps.moment.data.db.MomentTable;
import technology.mainthread.apps.moment.data.db.SyncMoment;
import technology.mainthread.apps.moment.data.rx.api.RxMomentApi;
import timber.log.Timber;
import static technology.mainthread.apps.moment.background.receiver.ConnectivityBroadcastReceiver.enableNetworkChangeReceiver;
import static technology.mainthread.apps.moment.common.data.vo.MomentType.DRAWING;
// TODO: convert to sync intent service
public class SenderService extends Service {
public @interface StartCommand {
int START = 1;
int SEND = 2;
}
private static final String PARAM_START_COMMAND = "param_start_command";
private static final String PARAM_RECIPIENT = "param_recipient";
private static final String PARAM_DRAWING = "param_drawing";
@Inject
RxMomentApi rxMomentApi;
@Inject
@SyncMoment
MomentTable momentTable;
@Inject
ConnectivityHelper connectivityHelper;
private int currentMomentId;
private Subscription momentSubscription = Subscriptions.empty();
public static Intent getSenderServiceStartIntent(Context context) {
Intent intent = new Intent(context, SenderService.class);
intent.putExtra(PARAM_START_COMMAND, StartCommand.START);
return intent;
}
public static Intent getSenderServiceSendIntent(Context context, long[] recipients, byte[] drawing) {
Intent intent = new Intent(context, SenderService.class);
intent.putExtra(PARAM_START_COMMAND, StartCommand.SEND);
intent.putExtra(PARAM_RECIPIENT, recipients);
intent.putExtra(PARAM_DRAWING, drawing);
return intent;
}
@Override
public void onCreate() {
super.onCreate();
Timber.d("onCreate");
MomentApp.get(this).inject(this);
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null) {
int command = intent.getIntExtra(PARAM_START_COMMAND, 0);
Timber.d("onStartCommand with command: %s", command);
switch (command) {
case StartCommand.START:
sendNextInLine();
break;
case StartCommand.SEND:
long[] recipient = intent.getLongArrayExtra(PARAM_RECIPIENT);
byte[] drawing = intent.getByteArrayExtra(PARAM_DRAWING);
queueMoment(convertToList(recipient), drawing);
break;
default:
break;
}
}
return START_NOT_STICKY;
}
@Override
public void onDestroy() {
momentSubscription.unsubscribe();
super.onDestroy();
}
@Override
public IBinder onBind(Intent intent) {
// Do not allow binding
return null;
}
private void queueMoment(List<Long> recipients, byte[] drawing) {
if (!recipients.isEmpty() && drawing != null) {
String fileName = String.valueOf(System.currentTimeMillis());
try {
FileOutputStream outputStream = openFileOutput(fileName, Context.MODE_PRIVATE);
outputStream.write(drawing);
outputStream.close();
} catch (Exception e) {
Timber.e(e, "Cannot save drawing");
}
momentTable.add(Moment.builder()
.recipients(recipients)
.fileName(fileName)
.momentType(DRAWING)
.build());
sendNextInLine();
}
}
private void sendNextInLine() {
Timber.d("sendNextInLine");
if (currentMomentId != 0) {
// cancel if already sending
return;
}
Moment moment = momentTable.getNextInLine();
if (moment != null) {
currentMomentId = moment.getId();
sendMoment(moment);
} else {
stopSelf();
}
}
private void sendMoment(Moment moment) {
Timber.d("sending moment");
Bitmap drawing = null;
try {
FileInputStream inputStream = openFileInput(moment.getFileName());
drawing = BitmapFactory.decodeStream(inputStream);
inputStream.close();
} catch (Exception e) {
Timber.e(e, "Cannot load drawing");
}
if (drawing != null) {
momentSubscription = rxMomentApi.send(moment.getRecipients(), drawing)
.compose(RxSchedulerHelper.<Void>applySchedulers())
.subscribe(new Observer<Void>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
if (!connectivityHelper.isConnected()) {
enableNetworkChangeReceiver(SenderService.this, true);
}
stopSelf();
}
@Override
public void onNext(Void aVoid) {
removeMoment();
sendNextInLine();
}
});
} else {
momentTable.delete(moment.getId());
sendNextInLine();
}
}
private void removeMoment() {
Timber.d("removeMoment - currentMomentId: %d", currentMomentId);
if (currentMomentId != 0) {
Moment moment = momentTable.get(currentMomentId);
deleteFile(moment.getFileName());
momentTable.delete(currentMomentId);
currentMomentId = 0;
}
}
private List<Long> convertToList(long[] longArray) {
List<Long> list = new ArrayList<>();
for (long value : longArray) {
list.add(value);
}
return list;
}
}
| |
// This file is part of JavaSMT,
// an API wrapper for a collection of SMT solvers:
// https://github.com/sosy-lab/java-smt
//
// SPDX-FileCopyrightText: 2020 Dirk Beyer <https://www.sosy-lab.org>
//
// SPDX-License-Identifier: Apache-2.0
package org.sosy_lab.java_smt.test;
import static com.google.common.collect.ImmutableList.toImmutableList;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
import org.junit.AssumptionViolatedException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.sosy_lab.common.configuration.ConfigurationBuilder;
import org.sosy_lab.java_smt.SolverContextFactory.Solvers;
import org.sosy_lab.java_smt.api.BooleanFormula;
import org.sosy_lab.java_smt.api.FormulaType;
import org.sosy_lab.java_smt.api.NumeralFormula;
import org.sosy_lab.java_smt.api.NumeralFormulaManager;
import org.sosy_lab.java_smt.api.SolverException;
import org.sosy_lab.java_smt.basicimpl.AbstractNumeralFormulaManager.NonLinearArithmetic;
@RunWith(Parameterized.class)
public class NonLinearArithmeticTest<T extends NumeralFormula> extends SolverBasedTest0 {
// Boolector, CVC4, SMTInterpol and MathSAT5 do not fully support non-linear arithmetic
// (though SMTInterpol and MathSAT5 support some parts)
static final ImmutableSet<Solvers> SOLVER_WITHOUT_NONLINEAR_ARITHMETIC =
ImmutableSet.of(
Solvers.SMTINTERPOL, Solvers.MATHSAT5, Solvers.BOOLECTOR, Solvers.CVC4, Solvers.YICES2);
@Parameters(name = "{0} {1} {2}")
public static Iterable<Object[]> getAllSolvers() {
return Lists.cartesianProduct(
Arrays.asList(Solvers.values()),
ImmutableList.of(FormulaType.IntegerType, FormulaType.RationalType),
Arrays.asList(NonLinearArithmetic.values()))
.stream()
.map(List::toArray)
.collect(toImmutableList());
}
@Parameter(0)
public Solvers solver;
@Override
protected Solvers solverToUse() {
return solver;
}
@Parameter(1)
public FormulaType<?> formulaType;
private NumeralFormulaManager<T, T> nmgr;
@SuppressWarnings("unchecked")
@Before
public void chooseNumeralFormulaManager() {
if (formulaType.isIntegerType()) {
requireIntegers();
nmgr = (NumeralFormulaManager<T, T>) imgr;
} else if (formulaType.isRationalType()) {
requireRationals();
nmgr = (NumeralFormulaManager<T, T>) rmgr;
} else {
throw new AssertionError();
}
}
@Parameter(2)
public NonLinearArithmetic nonLinearArithmetic;
@Override
protected ConfigurationBuilder createTestConfigBuilder() {
return super.createTestConfigBuilder()
.setOption("solver.nonLinearArithmetic", nonLinearArithmetic.name());
}
private T handleExpectedException(Supplier<T> supplier) {
try {
return supplier.get();
} catch (UnsupportedOperationException e) {
if (nonLinearArithmetic == NonLinearArithmetic.USE
&& SOLVER_WITHOUT_NONLINEAR_ARITHMETIC.contains(solver)) {
throw new AssumptionViolatedException(
"Expected UnsupportedOperationException was thrown correctly");
}
throw e;
}
}
private void assertExpectedUnsatifiabilityForNonLinearArithmetic(BooleanFormula f)
throws SolverException, InterruptedException {
if (nonLinearArithmetic == NonLinearArithmetic.USE
|| (nonLinearArithmetic == NonLinearArithmetic.APPROXIMATE_FALLBACK
&& !SOLVER_WITHOUT_NONLINEAR_ARITHMETIC.contains(solver))) {
assertThatFormula(f).isUnsatisfiable();
} else {
assertThatFormula(f).isSatisfiable();
}
}
@Test
public void testLinearMultiplication() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
BooleanFormula f =
bmgr.and(
nmgr.equal(a, nmgr.multiply(nmgr.makeNumber(2), nmgr.makeNumber(3))),
nmgr.equal(nmgr.makeNumber(2 * 3 * 5), nmgr.multiply(a, nmgr.makeNumber(5))),
nmgr.equal(nmgr.makeNumber(2 * 3 * 5), nmgr.multiply(nmgr.makeNumber(5), a)));
assertThatFormula(f).isSatisfiable();
}
@Test
public void testLinearMultiplicationUnsatisfiable() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
BooleanFormula f =
bmgr.and(
nmgr.equal(a, nmgr.multiply(nmgr.makeNumber(2), nmgr.makeNumber(3))),
bmgr.xor(
nmgr.equal(nmgr.makeNumber(2 * 3 * 5), nmgr.multiply(a, nmgr.makeNumber(5))),
nmgr.equal(nmgr.makeNumber(2 * 3 * 5), nmgr.multiply(nmgr.makeNumber(5), a))));
assertThatFormula(f).isUnsatisfiable();
}
@Test
public void testMultiplicationOfVariables() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
T b = nmgr.makeVariable("b");
T c = nmgr.makeVariable("c");
BooleanFormula f =
bmgr.and(
nmgr.equal(c, handleExpectedException(() -> nmgr.multiply(a, b))),
nmgr.equal(c, nmgr.makeNumber(2 * 3)));
assertThatFormula(f).isSatisfiable();
}
@Test
public void testMultiplicationOfVariablesUnsatisfiable()
throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
T b = nmgr.makeVariable("b");
T c = nmgr.makeVariable("c");
BooleanFormula f =
bmgr.and(
nmgr.equal(handleExpectedException(() -> nmgr.multiply(a, b)), c),
nmgr.equal(a, nmgr.makeNumber(3)),
nmgr.equal(b, nmgr.makeNumber(5)),
bmgr.not(nmgr.equal(c, nmgr.makeNumber(15))));
if (solver == Solvers.MATHSAT5
&& nonLinearArithmetic != NonLinearArithmetic.APPROXIMATE_ALWAYS) {
// MathSAT supports non-linear multiplication
assertThatFormula(f).isUnsatisfiable();
} else {
assertExpectedUnsatifiabilityForNonLinearArithmetic(f);
}
}
@Test
public void testDivisionByConstant() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
BooleanFormula f =
bmgr.and(
nmgr.equal(nmgr.makeNumber(2 * 3), a),
nmgr.equal(nmgr.divide(a, nmgr.makeNumber(3)), nmgr.makeNumber(2)),
nmgr.equal(nmgr.divide(a, nmgr.makeNumber(2)), nmgr.makeNumber(3)));
assertThatFormula(f).isSatisfiable();
}
@Test
public void testDivisionByConstantUnsatisfiable() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
BooleanFormula f =
bmgr.and(
nmgr.equal(a, nmgr.makeNumber(2 * 3)),
bmgr.xor(
nmgr.equal(nmgr.divide(a, nmgr.makeNumber(3)), nmgr.makeNumber(2)),
nmgr.equal(nmgr.divide(a, nmgr.makeNumber(2)), nmgr.makeNumber(3))));
if (formulaType.equals(FormulaType.IntegerType)
&& nonLinearArithmetic == NonLinearArithmetic.APPROXIMATE_ALWAYS) {
// Integer division is always non-linear due to rounding rules
assertThatFormula(f).isSatisfiable();
} else {
assertThatFormula(f).isUnsatisfiable();
}
}
@Test
public void testDivision() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
// (a == 2) && (3 == 6 / a)
BooleanFormula f =
bmgr.and(
nmgr.equal(a, nmgr.makeNumber(2)),
nmgr.equal(
nmgr.makeNumber(3),
handleExpectedException(() -> nmgr.divide(nmgr.makeNumber(2 * 3), a))));
assertThatFormula(f).isSatisfiable();
}
@Test
public void testDivisionUnsatisfiable() throws SolverException, InterruptedException {
T a = nmgr.makeVariable("a");
BooleanFormula f =
bmgr.and(
bmgr.not(nmgr.equal(a, nmgr.makeNumber(2))),
bmgr.not(nmgr.equal(a, nmgr.makeNumber(0))), // some solver produce model a=0 otherwise
nmgr.equal(
nmgr.makeNumber(3),
handleExpectedException(() -> nmgr.divide(nmgr.makeNumber(2 * 3), a))));
if (ImmutableSet.of(Solvers.MATHSAT5, Solvers.CVC4).contains(solver)
&& nonLinearArithmetic != NonLinearArithmetic.APPROXIMATE_ALWAYS) {
// some solvers support non-linear multiplication (partially)
assertThatFormula(f).isUnsatisfiable();
} else {
assertExpectedUnsatifiabilityForNonLinearArithmetic(f);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.fieldstats;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
/**
*/
public class FieldStatsIntegrationTests extends ElasticsearchIntegrationTest {
public void testRandom() throws Exception {
assertAcked(prepareCreate("test").addMapping(
"test", "string", "type=string", "date", "type=date", "double", "type=double", "double", "type=double",
"float", "type=float", "long", "type=long", "integer", "type=integer", "short", "type=short", "byte", "type=byte"
));
byte minByte = Byte.MAX_VALUE;
byte maxByte = Byte.MIN_VALUE;
short minShort = Short.MAX_VALUE;
short maxShort = Short.MIN_VALUE;
int minInt = Integer.MAX_VALUE;
int maxInt = Integer.MIN_VALUE;
long minLong = Long.MAX_VALUE;
long maxLong = Long.MIN_VALUE;
float minFloat = Float.MAX_VALUE;
float maxFloat = Float.MIN_VALUE;
double minDouble = Double.MAX_VALUE;
double maxDouble = Double.MIN_VALUE;
String minString = new String(Character.toChars(1114111));
String maxString = "0";
int numDocs = scaledRandomIntBetween(128, 1024);
List<IndexRequestBuilder> request = new ArrayList<>(numDocs);
for (int doc = 0; doc < numDocs; doc++) {
byte b = randomByte();
minByte = (byte) Math.min(minByte, b);
maxByte = (byte) Math.max(maxByte, b);
short s = randomShort();
minShort = (short) Math.min(minShort, s);
maxShort = (short) Math.max(maxShort, s);
int i = randomInt();
minInt = Math.min(minInt, i);
maxInt = Math.max(maxInt, i);
long l = randomLong();
minLong = Math.min(minLong, l);
maxLong = Math.max(maxLong, l);
float f = randomFloat();
minFloat = Math.min(minFloat, f);
maxFloat = Math.max(maxFloat, f);
double d = randomDouble();
minDouble = Math.min(minDouble, d);
maxDouble = Math.max(maxDouble, d);
String str = randomRealisticUnicodeOfLength(3);
if (str.compareTo(minString) < 0) {
minString = str;
}
if (str.compareTo(maxString) > 0) {
maxString = str;
}
request.add(client().prepareIndex("test", "test", Integer.toString(doc))
.setSource("byte", b, "short", s, "integer", i, "long", l, "float", f, "double", d, "string", str)
);
}
indexRandom(true, false, request);
FieldStatsResponse response = client().prepareFieldStats().setFields("byte", "short", "integer", "long", "float", "double", "string").get();
assertAllSuccessful(response);
for (FieldStats stats : response.getAllFieldStats().values()) {
assertThat(stats.getMaxDoc(), equalTo((long) numDocs));
assertThat(stats.getDocCount(), equalTo((long) numDocs));
assertThat(stats.getDensity(), equalTo(100));
}
assertThat(response.getAllFieldStats().get("byte").getMinValue(), equalTo(Byte.toString(minByte)));
assertThat(response.getAllFieldStats().get("byte").getMaxValue(), equalTo(Byte.toString(maxByte)));
assertThat(response.getAllFieldStats().get("short").getMinValue(), equalTo(Short.toString(minShort)));
assertThat(response.getAllFieldStats().get("short").getMaxValue(), equalTo(Short.toString(maxShort)));
assertThat(response.getAllFieldStats().get("integer").getMinValue(), equalTo(Integer.toString(minInt)));
assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(Integer.toString(maxInt)));
assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(Long.toString(minLong)));
assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(Long.toString(maxLong)));
assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(Float.toString(minFloat)));
assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(Float.toString(maxFloat)));
assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(Double.toString(minDouble)));
assertThat(response.getAllFieldStats().get("double").getMaxValue(), equalTo(Double.toString(maxDouble)));
}
public void testFieldStatsIndexLevel() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test3").addMapping(
"test", "value", "type=long"
));
indexRange("test1", -10, 100);
indexRange("test2", 101, 200);
indexRange("test3", 201, 300);
// default:
FieldStatsResponse response = client().prepareFieldStats().setFields("value").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(Long.toString(-10)));
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(Long.toString(300)));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(Long.toString(-10)));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(Long.toString(300)));
// Level: cluster
response = client().prepareFieldStats().setFields("value").setLevel("cluster").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(Long.toString(-10)));
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(Long.toString(300)));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(Long.toString(-10)));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(Long.toString(300)));
// Level: indices
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(3));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(Long.toString(-10)));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(Long.toString(100)));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(Long.toString(101)));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(Long.toString(200)));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(Long.toString(201)));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(Long.toString(300)));
// Illegal level option:
try {
client().prepareFieldStats().setFields("value").setLevel("illegal").get();
fail();
} catch (ActionRequestValidationException e) {
assertThat(e.getMessage(), equalTo("Validation Failed: 1: invalid level option [illegal];"));
}
}
public void testIncompatibleFieldTypes() {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=string"
));
client().prepareIndex("test1", "test").setSource("value", 1l).get();
client().prepareIndex("test1", "test").setSource("value", 2l).get();
client().prepareIndex("test2", "test").setSource("value", "a").get();
client().prepareIndex("test2", "test").setSource("value", "b").get();
refresh();
try {
client().prepareFieldStats().setFields("value").get();
fail();
} catch (ElasticsearchIllegalStateException e){
assertThat(e.getMessage(), containsString("trying to merge the field stats of field [value]"));
}
FieldStatsResponse response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(Long.toString(1)));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(Long.toString(2)));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("a"));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo("b"));
}
private void indexRange(String index, long from, long to) throws ExecutionException, InterruptedException {
List<IndexRequestBuilder> requests = new ArrayList<>();
for (long value = from; value <= to; value++) {
requests.add(client().prepareIndex(index, "test").setSource("value", value));
}
indexRandom(true, false, requests);
}
}
| |
package edu.kit.ipd.sdq.kamp4aps.core;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import edu.kit.ipd.sdq.kamp.architecture.ArchitectureModelLookup;
import edu.kit.ipd.sdq.kamp4aps.model.DeploymentContext.ComponentCorrelation;
import edu.kit.ipd.sdq.kamp4aps.model.DeploymentContext.VariableMapping;
import edu.kit.ipd.sdq.kamp4aps.model.KAMP4aPSModificationmarks.ChangePropagationDueToHardwareChange;
import edu.kit.ipd.sdq.kamp4aps.model.KAMP4aPSModificationmarks.ModifyComponent;
import edu.kit.ipd.sdq.kamp4aps.model.KAMP4aPSModificationmarks.ModifyInterface;
import edu.kit.ipd.sdq.kamp4aps.model.KAMP4aPSModificationmarks.ModifyModule;
import edu.kit.ipd.sdq.kamp4aps.model.KAMP4aPSModificationmarks.ModifyStructure;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.Plant;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.BusComponents.BusBox;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.BusComponents.BusCable;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.BusComponents.BusMaster;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.BusComponents.BusSlave;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.Component;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.InterfaceRepository.Interface;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.InterfaceRepository.SignalInterface;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.ModuleRepository.Module;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.StructureRepository.Structure;
import edu.kit.ipd.sdq.kamp4aps.model.basic.Entity;
import edu.kit.ipd.sdq.kamp4aps.model.basic.Identifier;
import edu.kit.ipd.sdq.kamp4iec.model.IECRepository.GlobalVariable;
/**
* This class represents a part of the change rules implementation
* that where extracted of the scenarios described in the link below.
* These rules are searching for the affected structural elements
* which were defined in the \texttt{xPPU} metamodel.
* Each look up method is the manifestation of a single element of the mentioned
* metamodel.
*
* The internal public class \texttt{BusComponentsParams} serves as data exchange
* class to avoid large parameter lists.
*
* @author Sandro Koch
* @see <a href="https://sdqweb.ipd.kit.edu/publications/pdfs/koch2017a.pdf">koch2017a<\a>
*
*/
public class APSArchitectureModelLookup extends ArchitectureModelLookup {
private static BusComponentsParams bcParams;
/* ########################################################################################################################
* # STRUCTURE LOOKUP SECTION ###########################################################################################
* ########################################################################################################################
*/
/**
*
* @param version
* @param initialMarkedStructures
* @return
*/
public static List<Plant> lookUpParentsOfStructures(Collection<ModifyStructure<Structure>> initialMarkedStructures){
List<Plant> results = new ArrayList<Plant>();
for(ModifyStructure<Structure> modifyStructure : initialMarkedStructures){
results.add(modifyStructure.getAffectedElement().getParentPlant());
}
return results;
}
/**
*
* @param version
* @param initialMarkedStructures
* @return
*/
public static Map<Structure, Set<Module>> lookUpModulesOfStructures(Collection<Structure> initialMarkedStructures){
Map<Structure, Set<Module>> results = new HashMap<Structure, Set<Module>>();
for(Structure modifyStructure : initialMarkedStructures){
for(Module module : modifyStructure.getModules()){
if(results.get(modifyStructure) == null)
results.put(modifyStructure, new HashSet<Module>());
results.get(modifyStructure).add(module);
}
}
return results;
}
/**
*
* @param version
* @param initialMarkedStructures
* @return
*/
public static Map<Structure, Set<Component>> lookUpComponentsOfStructures(Collection<Structure> initialMarkedStructures){
Map<Structure, Set<Component>> results = new HashMap<Structure, Set<Component>>();
for(Structure modifyStructure : initialMarkedStructures){
for(Component component : modifyStructure.getComponents()){
if(results.get(modifyStructure) == null)
results.put(modifyStructure, new HashSet<Component>());
results.get(modifyStructure).add(component);
}
}
return results;
}
/* #####################################################################################################################
* # MODULE LOOKUP SECTION ###########################################################################################
* #####################################################################################################################
*/
/**
* Returns a map with modules and their parent structure.
* For reverse LookUp.
* @param initialMarkedModules
* @param changePropagationDueToHardwareChange
* @return
*/
public static Map<Module, Structure> lookUpParentStructuresOfModules(Collection<ModifyModule<Module>> initialMarkedModules,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Module, Structure> results = new HashMap<Module, Structure>();
for(ModifyModule<Module> modifyModule : initialMarkedModules){
if(modifyModule.getAffectedElement().getParentEntity() != null &&
modifyModule.getAffectedElement().getParentEntity() instanceof Structure)
results.put(modifyModule.getAffectedElement(), (Structure) modifyModule.getAffectedElement().getParentEntity());
}
List<ModifyModule<Module>> modulesToModify = changePropagationDueToHardwareChange.getModuleModifications();
for(ModifyModule<Module> moduleToModify : modulesToModify){
Module affectedModule = moduleToModify.getAffectedElement();
Entity parent = affectedModule.getParentEntity();
if(parent instanceof Structure){
results.put(affectedModule, (Structure) parent);
}
}
return results;
}
/**
* Returns a map with modules and their parent module.
* For reverse LookUp.
* @param initialMarkedModules
* @param changePropagationDueToHardwareChange
* @return
*/
public static Map<Module, Module> lookUpParentModulesOfModules(Collection<ModifyModule<Module>> initialMarkedModules,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Module, Module> results = new HashMap<Module, Module>();
for(ModifyModule<Module> modifyModule : initialMarkedModules){
if(modifyModule.getAffectedElement().getParentEntity() != null &&
modifyModule.getAffectedElement().getParentEntity() instanceof Module)
results.put(modifyModule.getAffectedElement(), (Module) modifyModule.getAffectedElement().getParentEntity());
}
List<ModifyModule<Module>> modulesToModify = changePropagationDueToHardwareChange.getModuleModifications();
for(ModifyModule<Module> moduleToModify : modulesToModify){
Module affectedModule = moduleToModify.getAffectedElement();
Entity parent = affectedModule.getParentEntity();
if(parent instanceof Module){
results.put(affectedModule, (Module) parent);
}
}
return results;
}
/**
*
* @param initialMarkedModules
* @param changePropagationDueToHardwareChange
* @return
*/
public static Map<Module, Set<Module>> lookUpModulesOfModules(Collection<? extends Module> initialMarkedModules,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Module, Set<Module>> results = new HashMap<Module, Set<Module>>();
for(Module modifyModule: initialMarkedModules){
for(Module module : modifyModule.getModules()){
if(results.get(modifyModule) == null)
results.put(modifyModule, new HashSet<Module>());
results.get(modifyModule).add(module);
}
}
List<ModifyModule<Module>> modulesToModify = changePropagationDueToHardwareChange.getModuleModifications();
for(ModifyModule<Module> moduleToModify : modulesToModify){
Module affectedModule = moduleToModify.getAffectedElement();
for(Module module : affectedModule.getModules()){
if(results.get(affectedModule) == null)
results.put(affectedModule, new HashSet<Module>());
results.get(affectedModule).add(module);
}
}
return results;
}
/**
*
* @param initialMarkedModules
* @param changePropagationDueToHardwareChange
* @return
*/
public static Map<Module, Set<Component>> lookUpComponentsOfModules(Collection<? extends Module> initialMarkedModules,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Module, Set<Component>> results = new HashMap<Module, Set<Component>>();
for(Module modifyModule: initialMarkedModules){
for(Component component : modifyModule.getComponents()){
if(results.get(modifyModule) == null)
results.put(modifyModule, new HashSet<Component>());
results.get(modifyModule).add(component);
}
}
List<ModifyModule<Module>> modulesToModify = changePropagationDueToHardwareChange.getModuleModifications();
for(ModifyModule<Module> moduleToModify : modulesToModify){
Module affectedModule = moduleToModify.getAffectedElement();
for(Component component : affectedModule.getComponents()){
if(results.get(affectedModule) == null)
results.put(affectedModule, new HashSet<Component>());
results.get(affectedModule).add(component);
}
}
return results;
}
/**
*
* @param initialMarkedModules
* @param changePropagationDueToHardwareChange
* @return
*/
public static Map<Module, Set<Interface>> lookUpInterfacesOfModules(Collection<? extends Module> initialMarkedModules,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Module, Set<Interface>> results = new HashMap<Module, Set<Interface>>();
for(Module modifyModule: initialMarkedModules){
for(Interface interfac : modifyModule.getInterfaces()){
if(results.get(modifyModule) == null)
results.put(modifyModule, new HashSet<Interface>());
results.get(modifyModule).add(interfac);
}
}
List<ModifyModule<Module>> modulesToModify = changePropagationDueToHardwareChange.getModuleModifications();
for(ModifyModule<Module> moduleToModify : modulesToModify){
Module affectedModule = moduleToModify.getAffectedElement();
for(Interface interfac : affectedModule.getInterfaces()){
if(results.get(affectedModule) == null)
results.put(affectedModule, new HashSet<Interface>());
results.get(affectedModule).add(interfac);
}
}
return results;
}
/* ########################################################################################################################
* # COMPONENT LOOKUP SECTION ###########################################################################################
* ########################################################################################################################
*/
/**
*
* @param version
* @param initialMarkedComponents
* @return
*/
public static Map<Component, Structure> lookUpParentStructuresOfComponents(Collection<Component> initialMarkedComponents,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Component, Structure> results = new HashMap<Component, Structure>();
for(Component modifyComponent : initialMarkedComponents){
if(modifyComponent.getParent() != null)
results.put(modifyComponent, (Structure) modifyComponent.getParent());
}
List<ModifyComponent<Component>> componentsToModify = changePropagationDueToHardwareChange.getComponentModifications();
for(ModifyComponent<Component> componentToModify : componentsToModify){
Component affectedComponent = componentToModify.getAffectedElement();
Structure parent = affectedComponent.getParent();
if(parent != null){
results.put(affectedComponent, parent);
}
}
return results;
}
/**
*
* @param version
* @param initialMarkedComponents
* @return
*/
public static Map<Component, Module> lookUpParentModulesOfComponents(Collection<Component> initialMarkedComponents,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Component, Module> results = new HashMap<Component, Module>();
for(Component modifyComponent : initialMarkedComponents){
if(modifyComponent.getParentModule() instanceof Module)
results.put(modifyComponent, (Module)modifyComponent.getParentModule());
}
List<ModifyComponent<Component>> componentsToModify = changePropagationDueToHardwareChange.getComponentModifications();
for(ModifyComponent<Component> componentToModify : componentsToModify){
Component affectedComponent = componentToModify.getAffectedElement();
Module parent = affectedComponent.getParentModule();
if(parent != null){
results.put(affectedComponent, parent);
}
}
return results;
}
/**
*
* @param version
* @param initialMarkedComponents
* @return
*/
public static Map<Component, Set<Interface>> lookUpInterfacesOfComponents(Collection<? extends Component> initialMarkedComponents,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Component, Set<Interface>> results = new HashMap<Component, Set<Interface>>();
for(Component modifyComponent : initialMarkedComponents){
for(Interface interfac : modifyComponent.getConnectedInterfaces()){
if(results.get(modifyComponent) == null)
results.put(modifyComponent, new HashSet<Interface>());
results.get(modifyComponent).add(interfac);
}
}
List<ModifyComponent<Component>> componentsToModify = changePropagationDueToHardwareChange.getComponentModifications();
for(ModifyComponent<Component> componentToModify : componentsToModify){
Component affectedComponent = componentToModify.getAffectedElement();
for(Interface interfac : affectedComponent.getConnectedInterfaces()){
if(results.get(affectedComponent) == null)
results.put(affectedComponent, new HashSet<Interface>());
results.get(affectedComponent).add(interfac);
}
}
return results;
}
/* ########################################################################################################################
* # INTERFACE LOOKUP SECTION ###########################################################################################
* ########################################################################################################################
*/
/**
*
* @param initialMarkedInterfaces
* @return
*/
public static Map<Interface, Set<Module>> lookUpParentModulesOfInterfaces(
Collection<? extends Interface> initialMarkedInterfaces,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Interface, Set<Module>> results = new HashMap<Interface, Set<Module>>();
for(Interface modifyInterface : initialMarkedInterfaces){
for(Identifier parent : modifyInterface.getParentElement()){
if(parent instanceof Module){
if(results.get(modifyInterface) == null)
results.put(modifyInterface, new HashSet<Module>());
results.get(modifyInterface).add((Module) parent);
}
}
}
List<ModifyInterface<Interface>> interfacesToModify = changePropagationDueToHardwareChange.getInterfaceModifications();
for(ModifyInterface<Interface> modifyInterface : interfacesToModify){
for(Identifier parent : modifyInterface.getAffectedElement().getParentElement()){
if(parent instanceof Module){
if(results.get(modifyInterface.getAffectedElement()) == null)
results.put(modifyInterface.getAffectedElement(), new HashSet<Module>());
results.get(modifyInterface.getAffectedElement()).add((Module) parent);
}
}
}
return results;
}
/**
*
* @param initialMarkedInterfaces
* @return
*/
public static Map<Interface, Set<Component>> lookUpParentComponentsOfInterfaces(
Collection<? extends Interface> initialMarkedInterfaces,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Interface, Set<Component>> results = new HashMap<Interface, Set<Component>>();
for(Interface modifyInterface : initialMarkedInterfaces){
for(Identifier parent : modifyInterface.getParentElement()){
if(parent instanceof Component){
if(results.get(modifyInterface) == null)
results.put(modifyInterface, new HashSet<Component>());
results.get(modifyInterface).add((Component) parent);
}
}
}
for(ModifyInterface<Interface> modifyInterface : changePropagationDueToHardwareChange.getInterfaceModifications()){
for(Identifier parent : modifyInterface.getAffectedElement().getParentElement()){
if(parent instanceof Component){
if(results.get(modifyInterface.getAffectedElement()) == null)
results.put(modifyInterface.getAffectedElement(), new HashSet<Component>());
results.get(modifyInterface.getAffectedElement()).add((Component) parent);
}
}
}
return results;
}
/**
*
* @param initialMarkedInterfaces
* @return
*/
public static Map<Interface, Set<GlobalVariable>> lookUpGlobalVariablesOfInterfaces(
APSArchitectureVersion version,
Collection<? extends Interface> initialMarkedInterfaces,
ChangePropagationDueToHardwareChange changePropagationDueToHardwareChange){
Map<Interface, Set<GlobalVariable>> results = new HashMap<Interface, Set<GlobalVariable>>();
for(Interface modifyInterface : initialMarkedInterfaces){
if(results.get(modifyInterface) == null)
results.put(modifyInterface, new HashSet<GlobalVariable>());
for(ComponentCorrelation correlation : version.getDeploymentContextRepository().getComponentCorrelation()) {
for(VariableMapping mapping : correlation.getVariableMapping()) {
if(modifyInterface.getId().equals(mapping.getInterfaceDeclaration().getId())) {
results.get(modifyInterface).add(mapping.getProgramVariable());
}
}
}
}
return results;
}
//#########################################################################################################################################
//#########################################################################################################################################
//#########################################################################################################################################
public static Map<Component, Set<ModifyInterface<Interface>>> lookUpChangesBasedOnSignalInterfaces(APSArchitectureVersion version,
Collection<ModifyInterface<Interface>> initialMarkedInterfaces){
Map<Component, Set<ModifyInterface<Interface>>> results = new HashMap<Component, Set<ModifyInterface<Interface>>>();
for(ModifyInterface<Interface> modifyInterface : initialMarkedInterfaces){
for(Component component : version.getAPSPlant().getComponentRepository().getAllComponentsInPlant()){
if(modifyInterface.getAffectedElement() instanceof Interface){
for(Interface componentInterface : component.getConnectedInterfaces()){
if(componentInterface.getId() == modifyInterface.getAffectedElement().getId()){
if(results.get(component) == null)
results.put(component, new HashSet<ModifyInterface<Interface>>());
results.get(component).add(modifyInterface);
}
}
}
}
}
return results;
}
public static BusComponentsParams lookUpChangesBasedOnBusModification(APSArchitectureVersion version,
Collection<BusBox> initialMarkedBusBoxes){
EList<Component> allComponents = version.getAPSPlant().getComponentRepository().getAllComponentsInPlant();
bcParams = initBusComponentParams(initialMarkedBusBoxes);
setAllBusComponentsInParams(allComponents);
while(bcParams.hasChanged){
bcParams.hasChanged = false;
EList<Interface> interfacesOfBusBoxes = getAllInterfacesOfMarkedBusBoxes();
EList<Interface> interfacesOfBusMasters = getAllInterfacesOfMarkedBusMasters();
EList<Interface> interfacesOfBusSlaves = getAllInterfacesOfMarkedBusSlaves();
EList<Interface> interfacesOfBusCables = getAllInterfacesOfMarkedBusCables();
addAllBusMastersThatAreConnectedToTheBusCables(interfacesOfBusCables);
addAllBusSlavessThatAreConnectedToTheBusCables(interfacesOfBusCables);
removeAllBusCablesThatAreNotConnectedToBusComponent(interfacesOfBusBoxes);
removeAllBusCablesThatAreNotConnectedToBusComponent(interfacesOfBusMasters);
removeAllBusCablesThatAreNotConnectedToBusComponent(interfacesOfBusSlaves);
addAllBusMastersThatAreConnectedToTheBusCables(interfacesOfBusCables);
addAllBusSlavessThatAreConnectedToTheBusCables(interfacesOfBusCables);
}
updateBusCableCausingsByBusMasters();
updateBusCableCausingsByBusSlaves();
updateBusCableCausingsByBusBoxes();
return bcParams;
}
private static void updateBusCableCausingsByBusBoxes() {
for(BusBox key : bcParams.causingElementsOfBusBox.keySet()){
for(BusCable cable : bcParams.busCablesToChange){
if(bcParams.causingElementsOfBusCable.get(cable) == null){
bcParams.causingElementsOfBusCable.put(cable, new HashSet<Identifier>());
}
if(bcParams.causingElementsOfBusBox.get(key).contains(cable.getSignalPlug1())){
Set<Identifier> causingElement = bcParams.causingElementsOfBusCable.get(cable);
causingElement.add((Identifier)cable.getSignalPlug1());
bcParams.causingElementsOfBusCable.put(cable, causingElement);
} else if(bcParams.causingElementsOfBusBox.get(key).contains(cable.getSignalPlug2())){
Set<Identifier> causingElement = bcParams.causingElementsOfBusCable.get(cable);
causingElement.add((Identifier)cable.getSignalPlug2());
bcParams.causingElementsOfBusCable.put(cable, causingElement);
}
}
}
}
private static void updateBusCableCausingsByBusSlaves() {
for(BusSlave key : bcParams.causingElementsOfBusSlave.keySet()){
for(BusCable cable : bcParams.busCablesToChange){
if(bcParams.causingElementsOfBusCable.get(cable) == null){
bcParams.causingElementsOfBusCable.put(cable, new HashSet<Identifier>());
}
if(bcParams.causingElementsOfBusSlave.get(key).contains(cable.getSignalPlug1())){
Set<Identifier> causingElement = bcParams.causingElementsOfBusCable.get(cable);
causingElement.add((Identifier)cable.getSignalPlug1());
bcParams.causingElementsOfBusCable.put(cable, causingElement);
} else if(bcParams.causingElementsOfBusSlave.get(key).contains(cable.getSignalPlug2())){
Set<Identifier> causingElement = bcParams.causingElementsOfBusCable.get(cable);
causingElement.add((Identifier)cable.getSignalPlug2());
bcParams.causingElementsOfBusCable.put(cable, causingElement);
}
}
}
}
private static void updateBusCableCausingsByBusMasters() {
for(BusMaster key : bcParams.causingElementsOfBusMaster.keySet()){
for(BusCable cable : bcParams.busCablesToChange){
if(bcParams.causingElementsOfBusCable.get(cable) == null){
bcParams.causingElementsOfBusCable.put(cable, new HashSet<Identifier>());
}
if(bcParams.causingElementsOfBusMaster.get(key).contains(cable.getSignalPlug1())){
Set<Identifier> causingElement = bcParams.causingElementsOfBusCable.get(cable);
causingElement.add((Identifier)cable.getSignalPlug1());
bcParams.causingElementsOfBusCable.put(cable, causingElement);
} else if(bcParams.causingElementsOfBusMaster.get(key).contains(cable.getSignalPlug2())){
Set<Identifier> causingElement = bcParams.causingElementsOfBusCable.get(cable);
causingElement.add((Identifier)cable.getSignalPlug2());
bcParams.causingElementsOfBusCable.put(cable, causingElement);
}
}
}
}
private static void addAllBusSlavessThatAreConnectedToTheBusCables(EList<Interface> interfacesOfBusCables) {
EList<BusSlave> slavesToAdd = new BasicEList<BusSlave>();
for(BusSlave bs : bcParams.allBusSlaves){
for(Interface bcInterface : interfacesOfBusCables){
if(bs.getSignalinterface_master() != null){
if(bcInterface.getId() == bs.getSignalinterface_master().getId()){
updateSlavesToAdd(slavesToAdd, bs, bcInterface);
}
}
if (bs.getSignalinterface_slave() != null){
if(bcInterface.getId() == bs.getSignalinterface_slave().getId()){
updateSlavesToAdd(slavesToAdd, bs, bcInterface);
}
}
}
}
for(BusSlave bs : slavesToAdd){
if(!bcParams.busSlavesToChange.contains(bs)){
bcParams.busSlavesToChange.add(bs);
bcParams.hasChanged = true;
}
}
}
private static void updateSlavesToAdd(EList<BusSlave> slavesToAdd, BusSlave bs, Interface bcInterface) {
slavesToAdd.add(bs);
if(bcParams.causingElementsOfBusSlave.get(bs) == null)
bcParams.causingElementsOfBusSlave.put(bs, new HashSet<Identifier>());
Set<Identifier> causingElements = bcParams.causingElementsOfBusSlave.get(bs);
causingElements.add((Identifier)bcInterface);
}
private static void addAllBusMastersThatAreConnectedToTheBusCables(EList<Interface> interfacesOfBusCables) {
EList<BusMaster> mastersToAdd = new BasicEList<BusMaster>();
for(BusMaster bm : bcParams.allBusMasters){
for(Interface bcInterface : interfacesOfBusCables){
for(Interface si : bm.getSignalinterfaces()){
if(bcInterface.getId().equals(si.getId())){
updateMastersToAdd(mastersToAdd, bm, bcInterface);
}
}
if(bm.getSignalinterface_controller().getId() == bcInterface.getId()){
updateMastersToAdd(mastersToAdd, bm, bcInterface);
}
}
}
for(BusMaster bm : mastersToAdd){
if(!bcParams.busMastersToChange.contains(bm)){
bcParams.busMastersToChange.add(bm);
bcParams.hasChanged = true;
}
}
}
private static void updateMastersToAdd(EList<BusMaster> mastersToAdd, BusMaster bm, Interface bcInterface) {
mastersToAdd.add(bm);
if(bcParams.causingElementsOfBusMaster.get(bm) == null)
bcParams.causingElementsOfBusMaster.put(bm, new HashSet<Identifier>());
Set<Identifier> causingElements = bcParams.causingElementsOfBusMaster.get(bm);
causingElements.add((Identifier)bcInterface);
}
private static void setAllBusComponentsInParams(EList<Component> allComponents) {
getAllBusBoxesInTheSystem(allComponents);
getAllBusMastersInTheSystem(allComponents);
getAllBusSlavesInTheSystem(allComponents);
getAllBusCablesInTheSystem(allComponents);
}
private static BusComponentsParams initBusComponentParams(Collection<BusBox> initialMarkedBusBoxes) {
BusComponentsParams bcParams = new BusComponentsParams();
bcParams.busBoxesToChange = new HashSet<BusBox>(initialMarkedBusBoxes);
bcParams.busMastersToChange = new HashSet<BusMaster>();
bcParams.busSlavesToChange = new HashSet<BusSlave>();
bcParams.busCablesToChange = new HashSet<BusCable>();
bcParams.allBusBoxes = new HashSet<BusBox>();
bcParams.allBusMasters = new HashSet<BusMaster>();
bcParams.allBusSlaves = new HashSet<BusSlave>();
bcParams.allBusCables = new HashSet<BusCable>();
bcParams.causingElementsOfBusBox = new HashMap<BusBox, Set<Identifier>>();
bcParams.causingElementsOfBusMaster = new HashMap<BusMaster, Set<Identifier>>();
bcParams.causingElementsOfBusSlave = new HashMap<BusSlave, Set<Identifier>>();
bcParams.causingElementsOfBusCable = new HashMap<BusCable, Set<Identifier>>();
bcParams.hasChanged = true;
return bcParams;
}
private static void removeAllBusCablesThatAreNotConnectedToBusComponent(EList<Interface> interfacesOfBusComponent) {
EList<BusCable> cablesToAdd = new BasicEList<BusCable>();
for(BusCable bc : bcParams.allBusCables){
for(Interface si : interfacesOfBusComponent){
if(si != null){
if(si.getId() == bc.getSignalPlug1().getId() ||
si.getId() == bc.getSignalPlug2().getId()){
cablesToAdd.add(bc);
}
}
}
}
for(BusCable bc : cablesToAdd){
if(!bcParams.busCablesToChange.contains(bc)){
bcParams.busCablesToChange.add(bc);
bcParams.hasChanged = true;
}
}
}
private static void getAllBusBoxesInTheSystem(EList<Component> allComponents) {
for(Component component : allComponents){
if(component instanceof BusBox)
bcParams.allBusBoxes.add((BusBox)component);
}
}
private static void getAllBusMastersInTheSystem(EList<Component> allComponents) {
for(Component component : allComponents){
if(component instanceof BusMaster)
bcParams.allBusMasters.add((BusMaster)component);
}
}
private static void getAllBusSlavesInTheSystem(EList<Component> allComponents) {
for(Component component : allComponents){
if(component instanceof BusSlave)
bcParams.allBusSlaves.add((BusSlave)component);
}
}
private static void getAllBusCablesInTheSystem(EList<Component> allComponents) {
for(Component component : allComponents){
if(component instanceof BusCable)
bcParams.allBusCables.add((BusCable)component);
}
}
private static EList<Interface> getAllInterfacesOfMarkedBusBoxes() {
EList<Interface> interfacesOfBusBox = new BasicEList<Interface>();
for(BusBox bb : bcParams.busBoxesToChange){
interfacesOfBusBox.add(bb.getPowersupply());
interfacesOfBusBox.add(bb.getSignalinterface_box());
interfacesOfBusBox.add(bb.getSignalinterface_master());
for(SignalInterface si : bb.getSignalinterfaces())
interfacesOfBusBox.add(si);
}
return interfacesOfBusBox;
}
private static EList<Interface> getAllInterfacesOfMarkedBusMasters() {
EList<Interface> interfacesOfBusMasters = new BasicEList<Interface>();
for(BusMaster bm : bcParams.busMastersToChange){
interfacesOfBusMasters.add(bm.getSignalinterface_controller());
for(SignalInterface si : bm.getSignalinterfaces())
interfacesOfBusMasters.add(si);
}
return interfacesOfBusMasters;
}
private static EList<Interface> getAllInterfacesOfMarkedBusSlaves() {
EList<Interface> interfacesOfBusSlaves = new BasicEList<Interface>();
for(BusSlave bs : bcParams.busSlavesToChange){
interfacesOfBusSlaves.add(bs.getSignalinterface_master());
interfacesOfBusSlaves.add(bs.getSignalinterface_slave());
interfacesOfBusSlaves.addAll(bs.getConnectedInterfaces());
}
return interfacesOfBusSlaves;
}
private static EList<Interface> getAllInterfacesOfMarkedBusCables() {
EList<Interface> interfacesOfBusCables = new BasicEList<Interface>();
for(BusCable bc : bcParams.busCablesToChange){
interfacesOfBusCables.add(bc.getSignalPlug1());
interfacesOfBusCables.add(bc.getSignalPlug2());
}
return interfacesOfBusCables;
}
/**
*
* This class is a data exchange class to avoid large
* parameter lists. It stores all bus components of the system
* as well as the identified bus components which are affected
* by a change. Also the causing entities are stored in this class
* The look up methods of the parent class
* \texttt{ArchitectureModelLookup} fill the parameters of the class.
*
* @author Sandro Koch
*
*/
public static class BusComponentsParams{
public Set<BusBox> busBoxesToChange;
public Set<BusMaster> busMastersToChange;
public Set<BusSlave> busSlavesToChange;
public Set<BusCable> busCablesToChange;
public Set<BusBox> allBusBoxes;
public Set<BusMaster> allBusMasters;
public Set<BusSlave> allBusSlaves;
public Set<BusCable> allBusCables;
public Map<BusBox, Set<Identifier>> causingElementsOfBusBox;
public Map<BusMaster, Set<Identifier>> causingElementsOfBusMaster;
public Map<BusSlave, Set<Identifier>> causingElementsOfBusSlave;
public Map<BusCable, Set<Identifier>> causingElementsOfBusCable;
public boolean hasChanged;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import com.google.common.base.Joiner;
import org.apache.cassandra.schema.ColumnMetadata;
import org.apache.cassandra.schema.TableMetadata;
import org.apache.cassandra.cql3.Term.Raw;
import org.apache.cassandra.cql3.restrictions.Restriction;
import org.apache.cassandra.cql3.restrictions.TokenRestriction;
import org.apache.cassandra.cql3.statements.Bound;
import org.apache.cassandra.exceptions.InvalidRequestException;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkContainsNoDuplicates;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkContainsOnly;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkTrue;
import static org.apache.cassandra.cql3.statements.RequestValidations.invalidRequest;
/**
* A relation using the token function.
* Examples:
* <ul>
* <li>SELECT ... WHERE token(a) > token(1)</li>
* <li>SELECT ... WHERE token(a, b) > token(1, 3)</li>
* </ul>
*/
public final class TokenRelation extends Relation
{
private final List<ColumnIdentifier> entities;
private final Term.Raw value;
public TokenRelation(List<ColumnIdentifier> entities, Operator type, Term.Raw value)
{
this.entities = entities;
this.relationType = type;
this.value = value;
}
@Override
public boolean onToken()
{
return true;
}
public Term.Raw getValue()
{
return value;
}
public List<? extends Term.Raw> getInValues()
{
return null;
}
@Override
protected Restriction newEQRestriction(TableMetadata table, VariableSpecifications boundNames)
{
List<ColumnMetadata> columnDefs = getColumnDefinitions(table);
Term term = toTerm(toReceivers(table, columnDefs), value, table.keyspace, boundNames);
return new TokenRestriction.EQRestriction(table, columnDefs, term);
}
@Override
protected Restriction newINRestriction(TableMetadata table, VariableSpecifications boundNames)
{
throw invalidRequest("%s cannot be used with the token function", operator());
}
@Override
protected Restriction newSliceRestriction(TableMetadata table,
VariableSpecifications boundNames,
Bound bound,
boolean inclusive)
{
List<ColumnMetadata> columnDefs = getColumnDefinitions(table);
Term term = toTerm(toReceivers(table, columnDefs), value, table.keyspace, boundNames);
return new TokenRestriction.SliceRestriction(table, columnDefs, bound, inclusive, term);
}
@Override
protected Restriction newContainsRestriction(TableMetadata table, VariableSpecifications boundNames, boolean isKey)
{
throw invalidRequest("%s cannot be used with the token function", operator());
}
@Override
protected Restriction newIsNotRestriction(TableMetadata table, VariableSpecifications boundNames)
{
throw invalidRequest("%s cannot be used with the token function", operator());
}
@Override
protected Restriction newLikeRestriction(TableMetadata table, VariableSpecifications boundNames, Operator operator)
{
throw invalidRequest("%s cannot be used with the token function", operator);
}
@Override
protected Term toTerm(List<? extends ColumnSpecification> receivers,
Raw raw,
String keyspace,
VariableSpecifications boundNames) throws InvalidRequestException
{
Term term = raw.prepare(keyspace, receivers.get(0));
term.collectMarkerSpecification(boundNames);
return term;
}
@Override
public Relation renameIdentifier(ColumnIdentifier from, ColumnIdentifier to)
{
if (!entities.contains(from))
return this;
List<ColumnIdentifier> newEntities = entities.stream().map(e -> e.equals(from) ? to : e).collect(Collectors.toList());
return new TokenRelation(newEntities, operator(), value);
}
@Override
public String toCQLString()
{
return String.format("token%s %s %s", Tuples.tupleToString(entities, ColumnIdentifier::toCQLString), relationType, value);
}
@Override
public int hashCode()
{
return Objects.hash(relationType, entities, value);
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof TokenRelation))
return false;
TokenRelation tr = (TokenRelation) o;
return relationType.equals(tr.relationType) && entities.equals(tr.entities) && value.equals(tr.value);
}
/**
* Returns the definition of the columns to which apply the token restriction.
*
* @param table the table metadata
* @return the definition of the columns to which apply the token restriction.
* @throws InvalidRequestException if the entity cannot be resolved
*/
private List<ColumnMetadata> getColumnDefinitions(TableMetadata table)
{
List<ColumnMetadata> columnDefs = new ArrayList<>(entities.size());
for (ColumnIdentifier id : entities)
columnDefs.add(table.getExistingColumn(id));
return columnDefs;
}
/**
* Returns the receivers for this relation.
*
* @param table the table meta data
* @param columnDefs the column definitions
* @return the receivers for the specified relation.
* @throws InvalidRequestException if the relation is invalid
*/
private static List<? extends ColumnSpecification> toReceivers(TableMetadata table,
List<ColumnMetadata> columnDefs)
throws InvalidRequestException
{
if (!columnDefs.equals(table.partitionKeyColumns()))
{
checkTrue(columnDefs.containsAll(table.partitionKeyColumns()),
"The token() function must be applied to all partition key components or none of them");
checkContainsNoDuplicates(columnDefs, "The token() function contains duplicate partition key components");
checkContainsOnly(columnDefs, table.partitionKeyColumns(), "The token() function must contains only partition key components");
throw invalidRequest("The token function arguments must be in the partition key order: %s",
Joiner.on(", ").join(ColumnMetadata.toIdentifiers(table.partitionKeyColumns())));
}
ColumnMetadata firstColumn = columnDefs.get(0);
return Collections.singletonList(new ColumnSpecification(firstColumn.ksName,
firstColumn.cfName,
new ColumnIdentifier("partition key token", true),
table.partitioner.getTokenValidator()));
}
}
| |
/*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2013 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.images.impl;
import com.adobe.acs.commons.dam.RenditionPatternPicker;
import com.adobe.acs.commons.images.ImageTransformer;
import com.adobe.acs.commons.images.NamedImageTransformer;
import com.adobe.acs.commons.util.PathInfoUtil;
import com.day.cq.commons.jcr.JcrConstants;
import com.day.cq.dam.api.Asset;
import com.day.cq.dam.api.Rendition;
import com.day.cq.dam.commons.util.DamUtil;
import com.day.cq.wcm.api.NameConstants;
import com.day.cq.wcm.api.Page;
import com.day.cq.wcm.api.PageManager;
import com.day.cq.wcm.foundation.Image;
import com.day.image.Layer;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.ReferencePolicy;
import org.apache.felix.scr.annotations.References;
import org.apache.felix.scr.annotations.Service;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ValueMap;
import org.apache.sling.api.servlets.OptingServlet;
import org.apache.sling.api.servlets.SlingSafeMethodsServlet;
import org.apache.sling.api.wrappers.ValueMapDecorator;
import org.apache.sling.commons.mime.MimeTypeService;
import org.apache.sling.commons.osgi.PropertiesUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.imageio.ImageIO;
import javax.jcr.RepositoryException;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@SuppressWarnings("serial")
@Component(
label = "ACS AEM Commons - Named Transform Image Servlet",
description = "Transform images programatically by applying a named transform to the requested Image.",
metatype = true
)
@Properties({
@Property(
label = "Resource Types",
description = "Resource Types and Node Types to bind this servlet to.",
name = "sling.servlet.resourceTypes",
value = { "nt/file", "nt/resource", "dam/Asset", "cq/Page", "cq/PageContent", "nt/unstructured",
"foundation/components/image", "foundation/components/parbase", "foundation/components/page" },
propertyPrivate = false
),
@Property(
label = "Extension",
description = "",
name = "sling.servlet.extensions",
value = { "transform" },
propertyPrivate = true
),
@Property(
name = "sling.servlet.methods",
value = { "GET" },
propertyPrivate = true
)
})
@References({
@Reference(
name = "namedImageTransformers",
referenceInterface = NamedImageTransformer.class,
policy = ReferencePolicy.DYNAMIC,
cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE
),
@Reference(
name = "imageTransformers",
referenceInterface = ImageTransformer.class,
policy = ReferencePolicy.DYNAMIC,
cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE
)
})
@Service(Servlet.class)
public class NamedTransformImageServlet extends SlingSafeMethodsServlet implements OptingServlet {
private static final Logger log = LoggerFactory.getLogger(NamedTransformImageServlet.class);
@Reference
private MimeTypeService mimeTypeService;
private static final ValueMap EMPTY_PARAMS = new ValueMapDecorator(new LinkedHashMap<String, Object>());
private static final Pattern LAST_SUFFIX_PATTERN = Pattern.compile("(image|img)\\.(.+)");
private static final String MIME_TYPE_PNG = "image/png";
private static final String TYPE_QUALITY = "quality";
private Map<String, NamedImageTransformer> namedImageTransformers =
new ConcurrentHashMap<String, NamedImageTransformer>();
private Map<String, ImageTransformer> imageTransformers = new ConcurrentHashMap<String, ImageTransformer>();
/* Asset Rendition Pattern Picker */
private static final String DEFAULT_ASSET_RENDITION_PICKER_REGEX = "cq5dam\\.web\\.(.*)";
@Property(label = "Asset Rendition Picker Regex",
description = "Regex to select the Rendition to transform when directly transforming a DAM Asset."
+ " [ Default: cq5dam.web.(.*) ]",
value = DEFAULT_ASSET_RENDITION_PICKER_REGEX)
private static final String PROP_ASSET_RENDITION_PICKER_REGEX = "prop.asset-rendition-picker-regex";
private static RenditionPatternPicker renditionPatternPicker =
new RenditionPatternPicker(Pattern.compile(DEFAULT_ASSET_RENDITION_PICKER_REGEX));
/**
* Only accept requests that.
* - Are not null
* - Have a suffix
* - Whose first suffix segment is a registered transform name
* - Whose last suffix matches the image file name pattern
*
* @param request SlingRequest object
* @return true if the Servlet should handle the request
*/
@Override
public final boolean accepts(SlingHttpServletRequest request) {
if (request == null) {
return false;
}
final String suffix = request.getRequestPathInfo().getSuffix();
if (StringUtils.isBlank(suffix)) {
return false;
}
final String transformName = PathInfoUtil.getFirstSuffixSegment(request);
if (!this.namedImageTransformers.keySet().contains(transformName)) {
return false;
}
final String lastSuffix = PathInfoUtil.getLastSuffixSegment(request);
final Matcher matcher = LAST_SUFFIX_PATTERN.matcher(lastSuffix);
if (!matcher.matches()) {
return false;
}
return true;
}
@Override
protected final void doGet(final SlingHttpServletRequest request, final SlingHttpServletResponse response) throws
ServletException, IOException {
// Get the transform names from the suffix
final List<NamedImageTransformer> selectedNamedImageTransformers = getNamedImageTransformers(request);
// Collect and combine the image transformers and their params
final ValueMap imageTransformersWithParams = getImageTransformersWithParams(selectedNamedImageTransformers);
final Image image = this.resolveImage(request);
final String mimeType = this.getMimeType(request, image);
Layer layer = this.getLayer(image);
if (layer == null) {
response.setStatus(SlingHttpServletResponse.SC_NOT_FOUND);
return;
}
// Transform the image
layer = this.transform(layer, imageTransformersWithParams);
// Get the quality
final double quality = this.getQuality(mimeType,
imageTransformersWithParams.get(TYPE_QUALITY, EMPTY_PARAMS));
response.setContentType(mimeType);
layer.write(mimeType, quality, response.getOutputStream());
response.flushBuffer();
}
/**
* Execute the ImageTransformers as specified by the Request's suffix segments against the Image layer.
*
* @param layer the Image layer
* @param imageTransformersWithParams the transforms and their params
* @return the transformed Image layer
*/
protected final Layer transform(Layer layer, final ValueMap imageTransformersWithParams) {
for (final String type : imageTransformersWithParams.keySet()) {
if (StringUtils.equals(TYPE_QUALITY, type)) {
// Do not process the "quality" transform in the usual manner
continue;
}
final ImageTransformer imageTransformer = this.imageTransformers.get(type);
if (imageTransformer == null) {
log.warn("Skipping transform. Missing ImageTransformer for type: {}");
continue;
}
final ValueMap transformParams = imageTransformersWithParams.get(type, EMPTY_PARAMS);
if (transformParams != null) {
layer = imageTransformer.transform(layer, transformParams);
}
}
return layer;
}
/**
* Gets the NamedImageTransformers based on the Suffix segments in order.
*
* @param request the SlingHttpServletRequest object
* @return a list of the NamedImageTransformers specified by the HTTP Request suffix segments
*/
protected final List<NamedImageTransformer> getNamedImageTransformers(final SlingHttpServletRequest request) {
final List<NamedImageTransformer> transformers = new ArrayList<NamedImageTransformer>();
String[] suffixes = PathInfoUtil.getSuffixSegments(request);
if (suffixes.length < 2) {
log.warn("Named Transform Image Servlet requires at least one named transform");
return transformers;
}
int endIndex = suffixes.length - 1;
// Its OK to check; the above check ensures there are 2+ segments
if (StringUtils.isNumeric(PathInfoUtil.getSuffixSegment(request, suffixes.length - 2))) {
endIndex--;
}
suffixes = (String[]) ArrayUtils.subarray(suffixes, 0, endIndex);
for (final String transformerName : suffixes) {
final NamedImageTransformer transformer = this.namedImageTransformers.get(transformerName);
if (transformer != null) {
transformers.add(transformer);
}
}
return transformers;
}
/**
* Collect and combine the image transformers and their params.
*
* @param selectedNamedImageTransformers the named transformers and their params
* @return the combined named image transformers and their params
*/
protected final ValueMap getImageTransformersWithParams(
final List<NamedImageTransformer> selectedNamedImageTransformers) {
final ValueMap params = new ValueMapDecorator(new LinkedHashMap<String, Object>());
for (final NamedImageTransformer namedImageTransformer : selectedNamedImageTransformers) {
params.putAll(namedImageTransformer.getImageTransforms());
}
return params;
}
/**
* Intelligently determines how to find the Image based on the associated SlingRequest.
*
* @param request the SlingRequest Obj
* @return the Image object configured w the info of where the image to render is stored in CRX
*/
protected final Image resolveImage(final SlingHttpServletRequest request) {
final Resource resource = request.getResource();
final ResourceResolver resourceResolver = request.getResourceResolver();
final PageManager pageManager = resourceResolver.adaptTo(PageManager.class);
final Page page = pageManager.getContainingPage(resource);
if (DamUtil.isAsset(resource)) {
// For assets, pick the configured rendition if it exists
// If rendition does not exist, use original
final Asset asset = DamUtil.resolveToAsset(resource);
Rendition rendition = asset.getRendition(renditionPatternPicker);
if (rendition == null) {
log.warn("Could not find rendition [ {} ] for [ {} ]", renditionPatternPicker.toString(),
resource.getPath());
rendition = asset.getOriginal();
}
final Resource renditionResource = request.getResourceResolver().getResource(rendition.getPath());
final Image image = new Image(resource);
image.set(Image.PN_REFERENCE, renditionResource.getPath());
return image;
} else if (DamUtil.isRendition(resource)
|| resourceResolver.isResourceType(resource, JcrConstants.NT_FILE)
|| resourceResolver.isResourceType(resource, JcrConstants.NT_RESOURCE)) {
// For renditions; use the requested rendition
final Image image = new Image(resource);
image.set(Image.PN_REFERENCE, resource.getPath());
return image;
} else if (page != null) {
if (resourceResolver.isResourceType(resource, NameConstants.NT_PAGE)
|| StringUtils.equals(resource.getPath(), page.getContentResource().getPath())) {
// Is a Page or Page's Content Resource; use the Page's image resource
return new Image(page.getContentResource(), "image");
} else {
return new Image(resource);
}
}
return new Image(resource);
}
/**
* Gets the mimeType of the image.
* - The last segments suffix is looked at first and used
* - if the last suffix segment's "extension" is .orig or .original then use the underlying resources mimeType
* - else look up the mimeType to use based on this "extension"
* - default to the resource's mimeType if the requested mimeType by extension is not supported.
*
* @param image the image to get the mimeType for
* @return the string representation of the image's mimeType
*/
private String getMimeType(final SlingHttpServletRequest request, final Image image) {
final String lastSuffix = PathInfoUtil.getLastSuffixSegment(request);
final String mimeType = mimeTypeService.getMimeType(lastSuffix);
if (!StringUtils.endsWithIgnoreCase(lastSuffix, ".orig")
&& !StringUtils.endsWithIgnoreCase(lastSuffix, ".original")
&& (ImageIO.getImageWritersByMIMEType(mimeType).hasNext())) {
return mimeType;
} else {
try {
return image.getMimeType();
} catch (final RepositoryException e) {
return MIME_TYPE_PNG;
}
}
}
/**
* Gets the Image layer.
*
* @param image The Image to get the layer from
* @return the image's Layer
* @throws IOException
*/
private Layer getLayer(final Image image) throws IOException {
Layer layer = null;
try {
layer = image.getLayer(false, false, false);
} catch (RepositoryException ex) {
log.error("Could not create layer");
}
if (layer == null) {
log.error("Could not create layer - layer is null;");
} else {
image.crop(layer);
image.rotate(layer);
}
return layer;
}
/**
* Computes the quality based on the "synthetic" Image Quality transform params
*
* Image Quality does not "transform" in the usual manner (it is not a simple layer manipulation)
* thus this ad-hoc method is required to handle quality manipulation transformations.
*
* If "quality" key is no available in "transforms" the default of 82 is used (magic AEM Product quality setting)
*
* @param mimeType the desired image mimeType
* @param transforms the map of image transform params
* @return
*/
protected final double getQuality(final String mimeType, final ValueMap transforms) {
final String key = "quality";
final int defaultQuality = 82;
final int maxQuality = 100;
final int minQuality = 0;
final int maxQualityGIF = 255;
final double oneHundred = 100D;
log.debug("Transforming with [ quality ]");
double quality = transforms.get(key, defaultQuality);
if (quality > maxQuality || quality < minQuality) {
quality = defaultQuality;
}
quality = quality / oneHundred;
if (StringUtils.equals("image/gif", mimeType)) {
quality = quality * maxQualityGIF;
}
return quality;
}
@Activate
protected final void activate(final Map<String, String> properties) throws Exception {
final String regex = PropertiesUtil.toString(properties.get(PROP_ASSET_RENDITION_PICKER_REGEX),
DEFAULT_ASSET_RENDITION_PICKER_REGEX);
try {
renditionPatternPicker = new RenditionPatternPicker(regex);
log.info("Asset Rendition Pattern Picker: {}", regex);
} catch (Exception ex) {
log.error("Error creating RenditionPatternPicker with regex [ {} ], defaulting to [ {} ]", regex,
DEFAULT_ASSET_RENDITION_PICKER_REGEX);
renditionPatternPicker = new RenditionPatternPicker(DEFAULT_ASSET_RENDITION_PICKER_REGEX);
}
}
protected final void bindNamedImageTransformers(final NamedImageTransformer service,
final Map<Object, Object> props) {
final String type = PropertiesUtil.toString(props.get(NamedImageTransformer.PROP_NAME), null);
if (type != null) {
this.namedImageTransformers.put(type, service);
}
}
protected final void unbindNamedImageTransformers(final NamedImageTransformer service,
final Map<Object, Object> props) {
final String type = PropertiesUtil.toString(props.get(NamedImageTransformer.PROP_NAME), null);
if (type != null) {
this.namedImageTransformers.remove(type);
}
}
protected final void bindImageTransformers(final ImageTransformer service, final Map<Object, Object> props) {
final String type = PropertiesUtil.toString(props.get(ImageTransformer.PROP_TYPE), null);
if (type != null) {
imageTransformers.put(type, service);
}
}
protected final void unbindImageTransformers(final ImageTransformer service, final Map<Object, Object> props) {
final String type = PropertiesUtil.toString(props.get(ImageTransformer.PROP_TYPE), null);
if (type != null) {
imageTransformers.remove(type);
}
}
}
| |
/*
* Copyright 2009 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.KeyValueSkipListSet;
import org.apache.hadoop.hbase.regionserver.MemStore;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.junit.experimental.categories.Category;
/**
* Testing the sizing that HeapSize offers and compares to the size given by
* ClassSize.
*/
@Category(SmallTests.class)
public class TestHeapSize extends TestCase {
static final Log LOG = LogFactory.getLog(TestHeapSize.class);
// List of classes implementing HeapSize
// BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
// KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
/**
* Test our hard-coded sizing of native java objects
*/
public void testNativeSizes() throws IOException {
@SuppressWarnings("rawtypes")
Class cl = null;
long expected = 0L;
long actual = 0L;
// ArrayList
cl = ArrayList.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.ARRAYLIST;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// ByteBuffer
cl = ByteBuffer.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.BYTE_BUFFER;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Integer
cl = Integer.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.INTEGER;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Map.Entry
// Interface is public, all others are not. Hard to size via ClassSize
// cl = Map.Entry.class;
// expected = ClassSize.estimateBase(cl, false);
// actual = ClassSize.MAP_ENTRY;
// if(expected != actual) {
// ClassSize.estimateBase(cl, true);
// assertEquals(expected, actual);
// }
// Object
cl = Object.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.OBJECT;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// TreeMap
cl = TreeMap.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.TREEMAP;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// String
cl = String.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.STRING;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// ConcurrentHashMap
cl = ConcurrentHashMap.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.CONCURRENT_HASHMAP;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// ConcurrentSkipListMap
cl = ConcurrentSkipListMap.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.CONCURRENT_SKIPLISTMAP;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// ReentrantReadWriteLock
cl = ReentrantReadWriteLock.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.REENTRANT_LOCK;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// AtomicLong
cl = AtomicLong.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.ATOMIC_LONG;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// AtomicInteger
cl = AtomicInteger.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.ATOMIC_INTEGER;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// AtomicBoolean
cl = AtomicBoolean.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.ATOMIC_BOOLEAN;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// CopyOnWriteArraySet
cl = CopyOnWriteArraySet.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.COPYONWRITE_ARRAYSET;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// CopyOnWriteArrayList
cl = CopyOnWriteArrayList.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.COPYONWRITE_ARRAYLIST;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// TimeRangeTracker
cl = TimeRangeTracker.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.TIMERANGE_TRACKER;
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// KeyValueSkipListSet
cl = KeyValueSkipListSet.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.KEYVALUE_SKIPLIST_SET;
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
/**
* Testing the classes that implements HeapSize and are a part of 0.20.
* Some are not tested here for example BlockIndex which is tested in
* TestHFile since it is a non public class
* @throws IOException
*/
public void testSizes() throws IOException {
@SuppressWarnings("rawtypes")
Class cl = null;
long expected = 0L;
long actual = 0L;
//KeyValue
cl = KeyValue.class;
expected = ClassSize.estimateBase(cl, false);
KeyValue kv = new KeyValue();
actual = kv.heapSize();
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
//Put
cl = Put.class;
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.TREEMAP;
Put put = new Put(Bytes.toBytes(""));
actual = put.heapSize();
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
//LruBlockCache Overhead
cl = LruBlockCache.class;
actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// CachedBlock Fixed Overhead
// We really need "deep" sizing but ClassSize does not do this.
// Perhaps we should do all these more in this style....
cl = CachedBlock.class;
actual = CachedBlock.PER_BLOCK_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
expected += ClassSize.estimateBase(String.class, false);
expected += ClassSize.estimateBase(ByteBuffer.class, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);
ClassSize.estimateBase(String.class, true);
ClassSize.estimateBase(ByteBuffer.class, true);
assertEquals(expected, actual);
}
// MemStore Overhead
cl = MemStore.class;
actual = MemStore.FIXED_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// MemStore Deep Overhead
actual = MemStore.DEEP_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
expected += ClassSize.estimateBase(AtomicLong.class, false);
expected += (2 * ClassSize.estimateBase(KeyValueSkipListSet.class, false));
expected += (2 * ClassSize.estimateBase(ConcurrentSkipListMap.class, false));
expected += (2 * ClassSize.estimateBase(TimeRangeTracker.class, false));
if(expected != actual) {
ClassSize.estimateBase(cl, true);
ClassSize.estimateBase(AtomicLong.class, true);
ClassSize.estimateBase(KeyValueSkipListSet.class, true);
ClassSize.estimateBase(KeyValueSkipListSet.class, true);
ClassSize.estimateBase(ConcurrentSkipListMap.class, true);
ClassSize.estimateBase(ConcurrentSkipListMap.class, true);
ClassSize.estimateBase(TimeRangeTracker.class, true);
ClassSize.estimateBase(TimeRangeTracker.class, true);
assertEquals(expected, actual);
}
// SchemaConfigured
LOG.debug("Heap size for: " + SchemaConfigured.class.getName());
SchemaConfigured sc = new SchemaConfigured(null, "myTable", "myCF");
assertEquals(ClassSize.estimateBase(SchemaConfigured.class, true),
sc.heapSize());
// Store Overhead
cl = Store.class;
actual = Store.FIXED_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Region Overhead
cl = HRegion.class;
actual = HRegion.FIXED_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Block cache key overhead
cl = BlockCacheKey.class;
// Passing zero length file name, because estimateBase does not handle
// deep overhead.
actual = new BlockCacheKey("", 0).heapSize();
expected = ClassSize.estimateBase(cl, false);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Currently NOT testing Deep Overheads of many of these classes.
// Deep overheads cover a vast majority of stuff, but will not be 100%
// accurate because it's unclear when we're referencing stuff that's already
// accounted for. But we have satisfied our two core requirements.
// Sizing is quite accurate now, and our tests will throw errors if
// any of these classes are modified without updating overhead sizes.
}
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
}
| |
/*
*
* Autopsy Forensic Browser
*
* Copyright 2012-2014 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.recentactivity;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
/**
* Recent activity image ingest module
*/
public final class RAImageIngestModule implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName());
private final List<Extract> extracters = new ArrayList<>();
private final List<Extract> browserExtracters = new ArrayList<>();
private IngestServices services = IngestServices.getInstance();
private IngestJobContext context;
private StringBuilder subCompleted = new StringBuilder();
RAImageIngestModule() {
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
Extract registry = new ExtractRegistry();
Extract iexplore = new ExtractIE();
Extract recentDocuments = new RecentDocumentsByLnk();
Extract chrome = new Chrome();
Extract firefox = new Firefox();
Extract SEUQA = new SearchEngineURLQueryAnalyzer();
extracters.add(chrome);
extracters.add(firefox);
extracters.add(iexplore);
extracters.add(recentDocuments);
extracters.add(SEUQA); // this needs to run after the web browser modules
extracters.add(registry); // this runs last because it is slowest
browserExtracters.add(chrome);
browserExtracters.add(firefox);
browserExtracters.add(iexplore);
for (Extract extracter : extracters) {
extracter.init();
}
}
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
services.postMessage(IngestMessage.createMessage(MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"RAImageIngestModule.process.started",
dataSource.getName())));
progressBar.switchToDeterminate(extracters.size());
ArrayList<String> errors = new ArrayList<>();
for (int i = 0; i < extracters.size(); i++) {
Extract extracter = extracters.get(i);
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS
break;
}
progressBar.progress(extracter.getName(), i);
try {
extracter.process(dataSource, context);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); //NON-NLS
subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed",
extracter.getName()));
errors.add(
NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModErrs", RecentActivityExtracterModuleFactory.getModuleName()));
}
progressBar.progress(i + 1);
errors.addAll(extracter.getErrorMessages());
}
// create the final message for inbox
StringBuilder errorMessage = new StringBuilder();
String errorMsgSubject;
MessageType msgLevel = MessageType.INFO;
if (errors.isEmpty() == false) {
msgLevel = MessageType.ERROR;
errorMessage.append(
NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsg.errsEncountered"));
for (String msg : errors) {
errorMessage.append("<li>").append(msg).append("</li>\n"); //NON-NLS
}
errorMessage.append("</ul>\n"); //NON-NLS
if (errors.size() == 1) {
errorMsgSubject = NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsgSub.oneErr");
} else {
errorMsgSubject = NbBundle.getMessage(this.getClass(),
"RAImageIngestModule.process.errMsgSub.nErrs", errors.size());
}
} else {
errorMessage.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsg.noErrs"));
errorMsgSubject = NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsgSub.noErrs");
}
final IngestMessage msg = IngestMessage.createMessage(msgLevel, RecentActivityExtracterModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"RAImageIngestModule.process.ingestMsg.finished",
dataSource.getName(), errorMsgSubject),
errorMessage.toString());
services.postMessage(msg);
StringBuilder historyMsg = new StringBuilder();
historyMsg.append(
NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.title", dataSource.getName()));
for (Extract module : browserExtracters) {
historyMsg.append("<li>").append(module.getName()); //NON-NLS
historyMsg.append(": ").append((module.foundData()) ? NbBundle
.getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.found") : NbBundle
.getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.notFnd"));
historyMsg.append("</li>"); //NON-NLS
}
historyMsg.append("</ul>"); //NON-NLS
final IngestMessage inboxMsg = IngestMessage.createMessage(MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"RAImageIngestModule.process.ingestMsg.results",
dataSource.getName()),
historyMsg.toString());
services.postMessage(inboxMsg);
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}
for (int i = 0; i < extracters.size(); i++) {
Extract extracter = extracters.get(i);
try {
extracter.complete();
} catch (Exception ex) {
logger.log(Level.SEVERE, "Exception occurred when completing " + extracter.getName(), ex); //NON-NLS
subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.complete.errMsg.failed",
extracter.getName()));
}
}
return ProcessResult.OK;
}
/**
* Get the temp path for a specific sub-module in recent activity. Will
* create the dir if it doesn't exist.
*
* @param a_case Case that directory is for
* @param mod Module name that will be used for a sub folder in the temp
* folder to prevent name collisions
*
* @return Path to directory
*/
protected static String getRATempPath(Case a_case, String mod) {
String tmpDir = a_case.getTempDirectory() + File.separator + "RecentActivity" + File.separator + mod; //NON-NLS
File dir = new File(tmpDir);
if (dir.exists() == false) {
dir.mkdirs();
}
return tmpDir;
}
/**
* Get the output path for a specific sub-module in recent activity. Will
* create the dir if it doesn't exist.
*
* @param a_case Case that directory is for
* @param mod Module name that will be used for a sub folder in the temp
* folder to prevent name collisions
*
* @return Path to directory
*/
protected static String getRAOutputPath(Case a_case, String mod) {
String tmpDir = a_case.getModulesOutputDirAbsPath() + File.separator + "RecentActivity" + File.separator + mod; //NON-NLS
File dir = new File(tmpDir);
if (dir.exists() == false) {
dir.mkdirs();
}
return tmpDir;
}
}
| |
/*
* Copyright (c) 2011 Denis Tulskiy
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* version 3 along with this work. If not, see <http://www.gnu.org/licenses/>.
*/
package mysh.tulskiy.keymaster.osx;
import com.sun.jna.Pointer;
import com.sun.jna.platform.mac.Carbon;
import com.sun.jna.ptr.PointerByReference;
import mysh.tulskiy.keymaster.common.HotKey;
import mysh.tulskiy.keymaster.common.HotKeyListener;
import mysh.tulskiy.keymaster.common.MediaKey;
import mysh.tulskiy.keymaster.common.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import static com.sun.jna.platform.mac.Carbon.*;
/**
* Author: Denis Tulskiy
* Date: 6/17/11
*/
public class CarbonProvider extends Provider {
private static final Logger LOGGER = LoggerFactory.getLogger(CarbonProvider.class);
private static final int kEventHotKeyPressed = 5;
private static final int kEventClassKeyboard = OS_TYPE("keyb");
private static final int typeEventHotKeyID = OS_TYPE("hkid");
private static final int kEventParamDirectObject = OS_TYPE("----");
private static int idSeq = 1;
private Map<Integer, OSXHotKey> hotKeys = new ConcurrentHashMap<>();
private Queue<OSXHotKey> registerQueue = new LinkedList<OSXHotKey>();
private final Object lock = new Object();
private boolean listen;
private boolean reset;
private EventHandlerProcPtr keyListener;
private PointerByReference eventHandlerReference;
public Thread thread;
public void init() {
thread = new Thread(new Runnable() {
public void run() {
synchronized (lock) {
LOGGER.info("Installing Event Handler");
eventHandlerReference = new PointerByReference();
keyListener = new EventHandler();
EventTypeSpec[] eventTypes = (EventTypeSpec[]) (new EventTypeSpec().toArray(1));
eventTypes[0].eventClass = kEventClassKeyboard;
eventTypes[0].eventKind = kEventHotKeyPressed;
int status = INSTANCE.InstallEventHandler(INSTANCE.GetEventDispatcherTarget(), keyListener, 1, eventTypes, null, eventHandlerReference); //fHandlerRef
if (status != 0) {
LOGGER.warn("Could not register Event Handler, error code: " + status);
}
if (eventHandlerReference.getValue() == null) {
LOGGER.warn("Event Handler reference is null");
}
listen = true;
while (listen) {
if (reset) {
resetAll();
reset = false;
lock.notify();
}
while (!registerQueue.isEmpty()) {
register(registerQueue.poll());
}
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}, "keymaster-handler");
thread.setDaemon(true);
thread.start();
}
private void resetAll() {
LOGGER.info("Resetting hotkeys");
for (OSXHotKey hotKey : hotKeys.values()) {
int ret = INSTANCE.UnregisterEventHotKey(hotKey.handler.getValue());
if (ret != 0) {
LOGGER.warn("Could not unregister hotkey. Error code: " + ret);
}
}
hotKeys.clear();
}
private void register(OSXHotKey hotKey) {
KeyStroke keyCode = hotKey.keyStroke;
EventHotKeyID.ByValue hotKeyReference = new EventHotKeyID.ByValue();
int id = idSeq++;
hotKeyReference.id = id;
hotKeyReference.signature = OS_TYPE("hk" + String.format("%02d", id));
PointerByReference gMyHotKeyRef = new PointerByReference();
int status = INSTANCE.RegisterEventHotKey(KeyMap.getKeyCode(keyCode), KeyMap.getModifier(keyCode), hotKeyReference, INSTANCE.GetEventDispatcherTarget(), 0, gMyHotKeyRef);
if (status != 0) {
LOGGER.warn("Could not register HotKey: " + keyCode + ". Error code: " + status);
return;
}
if (gMyHotKeyRef.getValue() == null) {
LOGGER.warn("HotKey returned null handler reference");
return;
}
hotKey.handler = gMyHotKeyRef;
LOGGER.info("Registered hotkey: " + keyCode);
hotKeys.put(id, hotKey);
}
@Override
public void stop() {
LOGGER.info("Stopping now");
try {
synchronized (lock) {
listen = false;
lock.notify();
}
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
if (eventHandlerReference.getValue() != null) {
INSTANCE.RemoveEventHandler(eventHandlerReference.getValue());
}
super.stop();
}
public void reset() {
synchronized (lock) {
reset = true;
lock.notify();
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
public void register(KeyStroke keyCode, HotKeyListener listener) {
synchronized (lock) {
registerQueue.add(new OSXHotKey(keyCode, listener));
lock.notify();
}
}
@Override
public void unregister(KeyStroke keyCode, HotKeyListener listener) {
OSXHotKey hotkey = new OSXHotKey(keyCode, listener);
unregisterKey(hotkey);
}
private void unregisterKey(HotKey hotkey) {
LOGGER.info("unregistering hotkey: " + hotkey);
for (Map.Entry<Integer, OSXHotKey> e : hotKeys.entrySet()) {
if (e.getValue().equals(hotkey))
hotKeys.remove(e.getKey());
}
}
public void register(MediaKey mediaKey, HotKeyListener listener) {
LOGGER.warn("Media keys are not supported on this platform");
}
@Override
public void unregister(MediaKey mediaKey, HotKeyListener listener) {
LOGGER.warn("Media keys are not supported on this platform");
}
private static int OS_TYPE(String osType) {
byte[] bytes = osType.getBytes();
return (bytes[0] << 24) + (bytes[1] << 16) + (bytes[2] << 8) + bytes[3];
}
private class EventHandler implements Carbon.EventHandlerProcPtr {
public int callback(Pointer inHandlerCallRef, Pointer inEvent, Pointer inUserData) {
EventHotKeyID eventHotKeyID = new EventHotKeyID();
int ret = INSTANCE.GetEventParameter(inEvent, kEventParamDirectObject, typeEventHotKeyID, null, eventHotKeyID.size(), null, eventHotKeyID);
if (ret != 0) {
LOGGER.warn("Could not get event parameters. Error code: " + ret);
} else {
int eventId = eventHotKeyID.id;
LOGGER.info("Received event id: " + eventId);
fireEvent(hotKeys.get(eventId));
}
return 0;
}
}
class OSXHotKey extends HotKey {
PointerByReference handler;
public OSXHotKey(KeyStroke keyStroke, HotKeyListener listener) {
super(keyStroke, listener);
}
}
}
| |
/*
* Copyright (C) 2014 Lucas Rocha
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lucasr.twowayview;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.PointF;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v7.widget.LinearSmoothScroller;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.RecyclerView.Adapter;
import android.support.v7.widget.RecyclerView.LayoutManager;
import android.support.v7.widget.RecyclerView.LayoutParams;
import android.support.v7.widget.RecyclerView.Recycler;
import android.support.v7.widget.RecyclerView.State;
import android.support.v7.widget.RecyclerView.ViewHolder;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup.MarginLayoutParams;
import java.util.List;
public abstract class TwoWayLayoutManager extends LayoutManager {
private static final String LOGTAG = "TwoWayLayoutManager";
private RecyclerView mRecyclerView;
private boolean mIsVertical = true;
private SavedState mPendingSavedState = null;
private int mPendingScrollPosition = RecyclerView.NO_POSITION;
private int mPendingScrollOffset = 0;
private int mLayoutStart;
private int mLayoutEnd;
public TwoWayLayoutManager(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public TwoWayLayoutManager(Context context, AttributeSet attrs, int defStyle) {
final TypedArray a =
context.obtainStyledAttributes(attrs, R.styleable.twowayview_TwoWayLayoutManager, defStyle, 0);
final int indexCount = a.getIndexCount();
for (int i = 0; i < indexCount; i++) {
final int attr = a.getIndex(i);
if (attr == R.styleable.twowayview_TwoWayLayoutManager_android_orientation) {
final int orientation = a.getInt(attr, -1);
if (orientation >= 0) {
setOrientation(Orientation.values()[orientation]);
}
}
}
a.recycle();
}
public TwoWayLayoutManager(Orientation orientation) {
mIsVertical = (orientation == Orientation.VERTICAL);
}
private static View findNextScrapView(List<ViewHolder> scrapList, Direction direction,
int position) {
final int scrapCount = scrapList.size();
ViewHolder closest = null;
int closestDistance = Integer.MAX_VALUE;
for (int i = 0; i < scrapCount; i++) {
final ViewHolder holder = scrapList.get(i);
final int distance = holder.getPosition() - position;
if ((distance < 0 && direction == Direction.END) ||
(distance > 0 && direction == Direction.START)) {
continue;
}
final int absDistance = Math.abs(distance);
if (absDistance < closestDistance) {
closest = holder;
closestDistance = absDistance;
if (distance == 0) {
break;
}
}
}
if (closest != null) {
return closest.itemView;
}
return null;
}
private int getTotalSpace() {
if (mIsVertical) {
return getHeight() - getPaddingBottom() - getPaddingTop();
} else {
return getWidth() - getPaddingRight() - getPaddingLeft();
}
}
protected int getStartWithPadding() {
return (mIsVertical ? getPaddingTop() : getPaddingLeft());
}
protected int getEndWithPadding() {
if (mIsVertical) {
return (getHeight() - getPaddingBottom());
} else {
return (getWidth() - getPaddingRight());
}
}
protected int getChildStart(View child) {
return (mIsVertical ? getDecoratedTop(child) : getDecoratedLeft(child));
}
protected int getChildEnd(View child) {
return (mIsVertical ? getDecoratedBottom(child) : getDecoratedRight(child));
}
protected Adapter getAdapter() {
return (mRecyclerView != null ? mRecyclerView.getAdapter() : null);
}
private void offsetChildren(int offset) {
if (mIsVertical) {
offsetChildrenVertical(offset);
} else {
offsetChildrenHorizontal(offset);
}
mLayoutStart += offset;
mLayoutEnd += offset;
}
private void recycleChildrenOutOfBounds(Direction direction, Recycler recycler) {
if (direction == Direction.END) {
recycleChildrenFromStart(direction, recycler);
} else {
recycleChildrenFromEnd(direction, recycler);
}
}
private void recycleChildrenFromStart(Direction direction, Recycler recycler) {
final int childCount = getChildCount();
final int childrenStart = getStartWithPadding();
int detachedCount = 0;
for (int i = 0; i < childCount; i++) {
final View child = getChildAt(i);
final int childEnd = getChildEnd(child);
if (childEnd >= childrenStart) {
break;
}
detachedCount++;
detachChild(child, direction);
}
while (--detachedCount >= 0) {
final View child = getChildAt(0);
removeAndRecycleView(child, recycler);
updateLayoutEdgesFromRemovedChild(child, direction);
}
}
private void recycleChildrenFromEnd(Direction direction, Recycler recycler) {
final int childrenEnd = getEndWithPadding();
final int childCount = getChildCount();
int firstDetachedPos = 0;
int detachedCount = 0;
for (int i = childCount - 1; i >= 0; i--) {
final View child = getChildAt(i);
final int childStart = getChildStart(child);
if (childStart <= childrenEnd) {
break;
}
firstDetachedPos = i;
detachedCount++;
detachChild(child, direction);
}
while (--detachedCount >= 0) {
final View child = getChildAt(firstDetachedPos);
removeAndRecycleViewAt(firstDetachedPos, recycler);
updateLayoutEdgesFromRemovedChild(child, direction);
}
}
private int scrollBy(int delta, Recycler recycler, State state) {
final int childCount = getChildCount();
if (childCount == 0 || delta == 0) {
return 0;
}
final int start = getStartWithPadding();
final int end = getEndWithPadding();
final int firstPosition = getFirstVisiblePosition();
final int totalSpace = getTotalSpace();
if (delta < 0) {
delta = Math.max(-(totalSpace - 1), delta);
} else {
delta = Math.min(totalSpace - 1, delta);
}
final boolean cannotScrollBackward = (firstPosition == 0 &&
mLayoutStart >= start && delta <= 0);
final boolean cannotScrollForward = (firstPosition + childCount == state.getItemCount() &&
mLayoutEnd <= end && delta >= 0);
if (cannotScrollForward || cannotScrollBackward) {
return 0;
}
offsetChildren(-delta);
final Direction direction = (delta > 0 ? Direction.END : Direction.START);
recycleChildrenOutOfBounds(direction, recycler);
final int absDelta = Math.abs(delta);
if (canAddMoreViews(Direction.START, start - absDelta) ||
canAddMoreViews(Direction.END, end + absDelta)) {
fillGap(direction, recycler, state);
}
return delta;
}
private void fillGap(Direction direction, Recycler recycler, State state) {
final int childCount = getChildCount();
final int extraSpace = getExtraLayoutSpace(state);
final int firstPosition = getFirstVisiblePosition();
if (direction == Direction.END) {
fillAfter(firstPosition + childCount, recycler, state, extraSpace);
correctTooHigh(childCount, recycler, state);
} else {
fillBefore(firstPosition - 1, recycler, extraSpace);
correctTooLow(childCount, recycler, state);
}
}
private void fillBefore(int pos, Recycler recycler) {
fillBefore(pos, recycler, 0);
}
private void fillBefore(int position, Recycler recycler, int extraSpace) {
final int limit = getStartWithPadding() - extraSpace;
while (canAddMoreViews(Direction.START, limit) && position >= 0) {
makeAndAddView(position, Direction.START, recycler);
position--;
}
}
private void fillAfter(int pos, Recycler recycler, State state) {
fillAfter(pos, recycler, state, 0);
}
private void fillAfter(int position, Recycler recycler, State state, int extraSpace) {
final int limit = getEndWithPadding() + extraSpace;
final int itemCount = state.getItemCount();
while (canAddMoreViews(Direction.END, limit) && position < itemCount) {
makeAndAddView(position, Direction.END, recycler);
position++;
}
}
private void fillSpecific(int position, Recycler recycler, State state) {
if (state.getItemCount() == 0) {
return;
}
makeAndAddView(position, Direction.END, recycler);
final int extraSpaceBefore;
final int extraSpaceAfter;
final int extraSpace = getExtraLayoutSpace(state);
if (state.getTargetScrollPosition() < position) {
extraSpaceAfter = 0;
extraSpaceBefore = extraSpace;
} else {
extraSpaceAfter = extraSpace;
extraSpaceBefore = 0;
}
fillBefore(position - 1, recycler, extraSpaceBefore);
// This will correct for the top of the first view not
// touching the top of the parent.
adjustViewsStartOrEnd();
fillAfter(position + 1, recycler, state, extraSpaceAfter);
correctTooHigh(getChildCount(), recycler, state);
}
private void correctTooHigh(int childCount, Recycler recycler, State state) {
// First see if the last item is visible. If it is not, it is OK for the
// top of the list to be pushed up.
final int lastPosition = getLastVisiblePosition();
if (lastPosition != state.getItemCount() - 1 || childCount == 0) {
return;
}
// This is bottom of our drawable area.
final int start = getStartWithPadding();
final int end = getEndWithPadding();
final int firstPosition = getFirstVisiblePosition();
// This is how far the end edge of the last view is from the end of the
// drawable area.
int endOffset = end - mLayoutEnd;
// Make sure we are 1) Too high, and 2) Either there are more rows above the
// first row or the first row is scrolled off the top of the drawable area
if (endOffset > 0 && (firstPosition > 0 || mLayoutStart < start)) {
if (firstPosition == 0) {
// Don't pull the top too far down.
endOffset = Math.min(endOffset, start - mLayoutStart);
}
// Move everything down
offsetChildren(endOffset);
if (firstPosition > 0) {
// Fill the gap that was opened above first position with more
// children, if possible.
fillBefore(firstPosition - 1, recycler);
// Close up the remaining gap.
adjustViewsStartOrEnd();
}
}
}
private void correctTooLow(int childCount, Recycler recycler, State state) {
// First see if the first item is visible. If it is not, it is OK for the
// end of the list to be pushed forward.
final int firstPosition = getFirstVisiblePosition();
if (firstPosition != 0 || childCount == 0) {
return;
}
final int start = getStartWithPadding();
final int end = getEndWithPadding();
final int itemCount = state.getItemCount();
final int lastPosition = getLastVisiblePosition();
// This is how far the start edge of the first view is from the start of the
// drawable area.
int startOffset = mLayoutStart - start;
// Make sure we are 1) Too low, and 2) Either there are more columns/rows below the
// last column/row or the last column/row is scrolled off the end of the
// drawable area.
if (startOffset > 0) {
if (lastPosition < itemCount - 1 || mLayoutEnd > end) {
if (lastPosition == itemCount - 1) {
// Don't pull the bottom too far up.
startOffset = Math.min(startOffset, mLayoutEnd - end);
}
// Move everything up.
offsetChildren(-startOffset);
if (lastPosition < itemCount - 1) {
// Fill the gap that was opened below the last position with more
// children, if possible.
fillAfter(lastPosition + 1, recycler, state);
// Close up the remaining gap.
adjustViewsStartOrEnd();
}
} else if (lastPosition == itemCount - 1) {
adjustViewsStartOrEnd();
}
}
}
private void adjustViewsStartOrEnd() {
if (getChildCount() == 0) {
return;
}
int delta = mLayoutStart - getStartWithPadding();
if (delta < 0) {
// We only are looking to see if we are too low, not too high
delta = 0;
}
if (delta != 0) {
offsetChildren(-delta);
}
}
private void fillFromScrapList(List<ViewHolder> scrapList, Direction direction) {
final int firstPosition = getFirstVisiblePosition();
int position;
if (direction == Direction.END) {
position = firstPosition + getChildCount();
} else {
position = firstPosition - 1;
}
View scrapChild;
while ((scrapChild = findNextScrapView(scrapList, direction, position)) != null) {
setupChild(scrapChild, direction);
position += (direction == Direction.END ? 1 : -1);
}
}
private void setupChild(View child, Direction direction) {
final ItemSelectionSupport itemSelection = ItemSelectionSupport.from(mRecyclerView);
if (itemSelection != null) {
final int position = getPosition(child);
itemSelection.setViewChecked(child, itemSelection.isItemChecked(position));
}
measureChild(child, direction);
layoutChild(child, direction);
}
private View makeAndAddView(int position, Direction direction, Recycler recycler) {
final View child = recycler.getViewForPosition(position);
final boolean isItemRemoved = ((LayoutParams) child.getLayoutParams()).isItemRemoved();
if (!isItemRemoved) {
addView(child, (direction == Direction.END ? -1 : 0));
}
setupChild(child, direction);
if (!isItemRemoved) {
updateLayoutEdgesFromNewChild(child);
}
return child;
}
private void handleUpdate() {
// Refresh state by requesting layout without changing the
// first visible position. This will ensure the layout will
// sync with the adapter changes.
final int firstPosition = getFirstVisiblePosition();
final View firstChild = findViewByPosition(firstPosition);
if (firstChild != null) {
setPendingScrollPositionWithOffset(firstPosition, getChildStart(firstChild));
} else {
setPendingScrollPositionWithOffset(RecyclerView.NO_POSITION, 0);
}
}
private void updateLayoutEdgesFromNewChild(View newChild) {
final int childStart = getChildStart(newChild);
if (childStart < mLayoutStart) {
mLayoutStart = childStart;
}
final int childEnd = getChildEnd(newChild);
if (childEnd > mLayoutEnd) {
mLayoutEnd = childEnd;
}
}
private void updateLayoutEdgesFromRemovedChild(View removedChild, Direction direction) {
final int childCount = getChildCount();
if (childCount == 0) {
resetLayoutEdges();
return;
}
final int removedChildStart = getChildStart(removedChild);
final int removedChildEnd = getChildEnd(removedChild);
if (removedChildStart > mLayoutStart && removedChildEnd < mLayoutEnd) {
return;
}
int index;
final int limit;
if (direction == Direction.END) {
// Scrolling towards the end of the layout, child view being
// removed from the start.
mLayoutStart = Integer.MAX_VALUE;
index = 0;
limit = removedChildEnd;
} else {
// Scrolling towards the start of the layout, child view being
// removed from the end.
mLayoutEnd = Integer.MIN_VALUE;
index = childCount - 1;
limit = removedChildStart;
}
while (index >= 0 && index <= childCount - 1) {
final View child = getChildAt(index);
if (direction == Direction.END) {
final int childStart = getChildStart(child);
if (childStart < mLayoutStart) {
mLayoutStart = childStart;
}
// Checked enough child views to update the minimum
// layout start edge, stop.
if (childStart >= limit) {
break;
}
index++;
} else {
final int childEnd = getChildEnd(child);
if (childEnd > mLayoutEnd) {
mLayoutEnd = childEnd;
}
// Checked enough child views to update the minimum
// layout end edge, stop.
if (childEnd <= limit) {
break;
}
index--;
}
}
}
private void resetLayoutEdges() {
mLayoutStart = getStartWithPadding();
mLayoutEnd = mLayoutStart;
}
protected int getExtraLayoutSpace(State state) {
if (state.hasTargetScrollPosition()) {
return getTotalSpace();
} else {
return 0;
}
}
private Bundle getPendingItemSelectionState() {
if (mPendingSavedState != null) {
return mPendingSavedState.itemSelectionState;
}
return null;
}
protected void setPendingScrollPositionWithOffset(int position, int offset) {
mPendingScrollPosition = position;
mPendingScrollOffset = offset;
}
protected int getPendingScrollPosition() {
if (mPendingSavedState != null) {
return mPendingSavedState.anchorItemPosition;
}
return mPendingScrollPosition;
}
protected int getPendingScrollOffset() {
if (mPendingSavedState != null) {
return 0;
}
return mPendingScrollOffset;
}
protected int getAnchorItemPosition(State state) {
final int itemCount = state.getItemCount();
int pendingPosition = getPendingScrollPosition();
if (pendingPosition != RecyclerView.NO_POSITION) {
if (pendingPosition < 0 || pendingPosition >= itemCount) {
pendingPosition = RecyclerView.NO_POSITION;
}
}
if (pendingPosition != RecyclerView.NO_POSITION) {
return pendingPosition;
} else if (getChildCount() > 0) {
return findFirstValidChildPosition(itemCount);
} else {
return 0;
}
}
private int findFirstValidChildPosition(int itemCount) {
final int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
final View view = getChildAt(i);
final int position = getPosition(view);
if (position >= 0 && position < itemCount) {
return position;
}
}
return 0;
}
@Override
public int getDecoratedMeasuredWidth(View child) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
return super.getDecoratedMeasuredWidth(child) + lp.leftMargin + lp.rightMargin;
}
@Override
public int getDecoratedMeasuredHeight(View child) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
return super.getDecoratedMeasuredHeight(child) + lp.topMargin + lp.bottomMargin;
}
@Override
public int getDecoratedLeft(View child) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
return super.getDecoratedLeft(child) - lp.leftMargin;
}
@Override
public int getDecoratedTop(View child) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
return super.getDecoratedTop(child) - lp.topMargin;
}
@Override
public int getDecoratedRight(View child) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
return super.getDecoratedRight(child) + lp.rightMargin;
}
@Override
public int getDecoratedBottom(View child) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
return super.getDecoratedBottom(child) + lp.bottomMargin;
}
@Override
public void layoutDecorated(View child, int left, int top, int right, int bottom) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
super.layoutDecorated(child, left + lp.leftMargin, top + lp.topMargin,
right - lp.rightMargin, bottom - lp.bottomMargin);
}
@Override
public void onAttachedToWindow(RecyclerView view) {
super.onAttachedToWindow(view);
mRecyclerView = view;
}
@Override
public void onDetachedFromWindow(RecyclerView view, Recycler recycler) {
super.onDetachedFromWindow(view, recycler);
mRecyclerView = null;
}
@Override
public void onAdapterChanged(RecyclerView.Adapter oldAdapter, RecyclerView.Adapter newAdapter) {
super.onAdapterChanged(oldAdapter, newAdapter);
final ItemSelectionSupport itemSelectionSupport = ItemSelectionSupport.from(mRecyclerView);
if (oldAdapter != null && itemSelectionSupport != null) {
itemSelectionSupport.clearChoices();
}
}
@Override
public void onLayoutChildren(Recycler recycler, State state) {
final ItemSelectionSupport itemSelection = ItemSelectionSupport.from(mRecyclerView);
if (itemSelection != null) {
final Bundle itemSelectionState = getPendingItemSelectionState();
if (itemSelectionState != null) {
itemSelection.onRestoreInstanceState(itemSelectionState);
}
if (state.didStructureChange()) {
itemSelection.onAdapterDataChanged();
}
}
final int anchorItemPosition = getAnchorItemPosition(state);
detachAndScrapAttachedViews(recycler);
fillSpecific(anchorItemPosition, recycler, state);
onLayoutScrapList(recycler, state);
setPendingScrollPositionWithOffset(RecyclerView.NO_POSITION, 0);
mPendingSavedState = null;
}
protected void onLayoutScrapList(Recycler recycler, State state) {
final int childCount = getChildCount();
if (childCount == 0 || state.isPreLayout() || !supportsPredictiveItemAnimations()) {
return;
}
final List<ViewHolder> scrapList = recycler.getScrapList();
fillFromScrapList(scrapList, Direction.START);
fillFromScrapList(scrapList, Direction.END);
}
protected void detachChild(View child, Direction direction) {
// Do nothing by default.
}
@Override
public void onItemsAdded(RecyclerView recyclerView, int positionStart, int itemCount) {
handleUpdate();
}
@Override
public void onItemsRemoved(RecyclerView recyclerView, int positionStart, int itemCount) {
handleUpdate();
}
@Override
public void onItemsUpdated(RecyclerView recyclerView, int positionStart, int itemCount) {
handleUpdate();
}
@Override
public void onItemsMoved(RecyclerView recyclerView, int from, int to, int itemCount) {
handleUpdate();
}
@Override
public void onItemsChanged(RecyclerView recyclerView) {
handleUpdate();
}
@Override
public RecyclerView.LayoutParams generateDefaultLayoutParams() {
if (mIsVertical) {
return new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
} else {
return new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
}
}
@Override
public boolean supportsPredictiveItemAnimations() {
return true;
}
@Override
public int scrollHorizontallyBy(int dx, Recycler recycler, State state) {
if (mIsVertical) {
return 0;
}
return scrollBy(dx, recycler, state);
}
@Override
public int scrollVerticallyBy(int dy, Recycler recycler, State state) {
if (!mIsVertical) {
return 0;
}
return scrollBy(dy, recycler, state);
}
@Override
public boolean canScrollHorizontally() {
return !mIsVertical;
}
@Override
public boolean canScrollVertically() {
return mIsVertical;
}
@Override
public void scrollToPosition(int position) {
scrollToPositionWithOffset(position, 0);
}
public void scrollToPositionWithOffset(int position, int offset) {
setPendingScrollPositionWithOffset(position, offset);
requestLayout();
}
@Override
public void smoothScrollToPosition(RecyclerView recyclerView, State state, int position) {
final LinearSmoothScroller scroller = new LinearSmoothScroller(recyclerView.getContext()) {
@Override
public PointF computeScrollVectorForPosition(int targetPosition) {
if (getChildCount() == 0) {
return null;
}
final int direction = targetPosition < getFirstVisiblePosition() ? -1 : 1;
if (mIsVertical) {
return new PointF(0, direction);
} else {
return new PointF(direction, 0);
}
}
@Override
protected int getVerticalSnapPreference() {
return LinearSmoothScroller.SNAP_TO_START;
}
@Override
protected int getHorizontalSnapPreference() {
return LinearSmoothScroller.SNAP_TO_START;
}
};
scroller.setTargetPosition(position);
startSmoothScroll(scroller);
}
@Override
public int computeHorizontalScrollOffset(State state) {
if (getChildCount() == 0) {
return 0;
}
return getFirstVisiblePosition();
}
@Override
public int computeVerticalScrollOffset(State state) {
if (getChildCount() == 0) {
return 0;
}
return getFirstVisiblePosition();
}
@Override
public int computeHorizontalScrollExtent(State state) {
return getChildCount();
}
@Override
public int computeVerticalScrollExtent(State state) {
return getChildCount();
}
@Override
public int computeHorizontalScrollRange(State state) {
return state.getItemCount();
}
@Override
public int computeVerticalScrollRange(State state) {
return state.getItemCount();
}
@Override
public void onMeasure(Recycler recycler, State state, int widthSpec, int heightSpec) {
super.onMeasure(recycler, state, widthSpec, heightSpec);
}
@Override
public Parcelable onSaveInstanceState() {
final SavedState state = new SavedState(SavedState.EMPTY_STATE);
int anchorItemPosition = getPendingScrollPosition();
if (anchorItemPosition == RecyclerView.NO_POSITION) {
anchorItemPosition = getFirstVisiblePosition();
}
state.anchorItemPosition = anchorItemPosition;
final ItemSelectionSupport itemSelection = ItemSelectionSupport.from(mRecyclerView);
if (itemSelection != null) {
state.itemSelectionState = itemSelection.onSaveInstanceState();
} else {
state.itemSelectionState = Bundle.EMPTY;
}
return state;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
mPendingSavedState = (SavedState) state;
requestLayout();
}
public Orientation getOrientation() {
return (mIsVertical ? Orientation.VERTICAL : Orientation.HORIZONTAL);
}
public void setOrientation(Orientation orientation) {
final boolean isVertical = (orientation == Orientation.VERTICAL);
if (this.mIsVertical == isVertical) {
return;
}
this.mIsVertical = isVertical;
requestLayout();
}
public int getFirstVisiblePosition() {
if (getChildCount() == 0) {
return 0;
}
return getPosition(getChildAt(0));
}
public int getLastVisiblePosition() {
final int childCount = getChildCount();
if (childCount == 0) {
return 0;
}
return getPosition(getChildAt(childCount - 1));
}
protected abstract void measureChild(View child, Direction direction);
protected abstract void layoutChild(View child, Direction direction);
protected abstract boolean canAddMoreViews(Direction direction, int limit);
public static enum Orientation {
HORIZONTAL,
VERTICAL
}
public static enum Direction {
START,
END
}
protected static class SavedState implements Parcelable {
public static final Parcelable.Creator<SavedState> CREATOR
= new Parcelable.Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
protected static final SavedState EMPTY_STATE = new SavedState();
private final Parcelable superState;
private int anchorItemPosition;
private Bundle itemSelectionState;
private SavedState() {
superState = null;
}
protected SavedState(Parcelable superState) {
if (superState == null) {
throw new IllegalArgumentException("superState must not be null");
}
this.superState = (superState != EMPTY_STATE ? superState : null);
}
protected SavedState(Parcel in) {
this.superState = EMPTY_STATE;
anchorItemPosition = in.readInt();
itemSelectionState = in.readParcelable(getClass().getClassLoader());
}
public Parcelable getSuperState() {
return superState;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeInt(anchorItemPosition);
out.writeParcelable(itemSelectionState, flags);
}
}
}
| |
package org.kuali.kpme.edo.api.group;
import java.io.Serializable;
import java.util.Collection;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.kuali.kpme.core.api.groupkey.HrGroupKey;
import org.kuali.kpme.core.api.groupkey.HrGroupKeyContract;
import org.kuali.rice.core.api.CoreConstants;
import org.kuali.rice.core.api.mo.AbstractDataTransferObject;
import org.kuali.rice.core.api.mo.ModelBuilder;
import org.kuali.rice.core.api.util.jaxb.DateTimeAdapter;
import org.kuali.rice.core.api.util.jaxb.LocalDateAdapter;
import org.w3c.dom.Element;
@XmlRootElement(name = EdoGroupTracking.Constants.ROOT_ELEMENT_NAME)
@XmlAccessorType(XmlAccessType.NONE)
@XmlType(name = EdoGroupTracking.Constants.TYPE_NAME, propOrder = {
EdoGroupTracking.Elements.EDO_WORKFLOW_ID,
EdoGroupTracking.Elements.EDO_GROUP_TRACKING_ID,
EdoGroupTracking.Elements.DEPARTMENT_ID,
EdoGroupTracking.Elements.REVIEW_LEVEL_NAME,
EdoGroupTracking.Elements.ORGANIZATION_CODE,
EdoGroupTracking.Elements.GROUP_NAME,
CoreConstants.CommonElements.VERSION_NUMBER,
CoreConstants.CommonElements.OBJECT_ID,
EdoGroupTracking.Elements.ACTIVE,
EdoGroupTracking.Elements.ID,
EdoGroupTracking.Elements.EFFECTIVE_LOCAL_DATE,
EdoGroupTracking.Elements.CREATE_TIME,
EdoGroupTracking.Elements.USER_PRINCIPAL_ID,
EdoGroupTracking.Elements.GROUP_KEY_CODE,
EdoGroupTracking.Elements.GROUP_KEY,
CoreConstants.CommonElements.FUTURE_ELEMENTS
})
public final class EdoGroupTracking
extends AbstractDataTransferObject
implements EdoGroupTrackingContract
{
@XmlElement(name = Elements.EDO_WORKFLOW_ID, required = false)
private final String edoWorkflowId;
@XmlElement(name = Elements.EDO_GROUP_TRACKING_ID, required = false)
private final String edoGroupTrackingId;
@XmlElement(name = Elements.DEPARTMENT_ID, required = false)
private final String departmentId;
@XmlElement(name = Elements.REVIEW_LEVEL_NAME, required = false)
private final String reviewLevelName;
@XmlElement(name = Elements.ORGANIZATION_CODE, required = false)
private final String organizationCode;
@XmlElement(name = Elements.GROUP_NAME, required = false)
private final String groupName;
@XmlElement(name = CoreConstants.CommonElements.VERSION_NUMBER, required = false)
private final Long versionNumber;
@XmlElement(name = CoreConstants.CommonElements.OBJECT_ID, required = false)
private final String objectId;
@XmlElement(name = Elements.ACTIVE, required = false)
private final boolean active;
@XmlElement(name = Elements.ID, required = false)
private final String id;
@XmlJavaTypeAdapter(LocalDateAdapter.class)
@XmlElement(name = Elements.EFFECTIVE_LOCAL_DATE, required = false)
private final LocalDate effectiveLocalDate;
@XmlJavaTypeAdapter(DateTimeAdapter.class)
@XmlElement(name = Elements.CREATE_TIME, required = false)
private final DateTime createTime;
@XmlElement(name = Elements.USER_PRINCIPAL_ID, required = false)
private final String userPrincipalId;
@XmlElement(name = Elements.GROUP_KEY_CODE, required = false)
private final String groupKeyCode;
@XmlElement(name = Elements.GROUP_KEY, required = false)
private final HrGroupKey groupKey;
@SuppressWarnings("unused")
@XmlAnyElement
private final Collection<Element> _futureElements = null;
/**
* Private constructor used only by JAXB.
*
*/
private EdoGroupTracking() {
this.edoWorkflowId = null;
this.edoGroupTrackingId = null;
this.departmentId = null;
this.reviewLevelName = null;
this.organizationCode = null;
this.groupName = null;
this.versionNumber = null;
this.objectId = null;
this.active = false;
this.id = null;
this.effectiveLocalDate = null;
this.createTime = null;
this.userPrincipalId = null;
this.groupKeyCode = null;
this.groupKey = null;
}
private EdoGroupTracking(Builder builder) {
this.edoWorkflowId = builder.getEdoWorkflowId();
this.edoGroupTrackingId = builder.getEdoGroupTrackingId();
this.departmentId = builder.getDepartmentId();
this.reviewLevelName = builder.getReviewLevelName();
this.organizationCode = builder.getOrganizationCode();
this.groupName = builder.getGroupName();
this.versionNumber = builder.getVersionNumber();
this.objectId = builder.getObjectId();
this.active = builder.isActive();
this.id = builder.getId();
this.effectiveLocalDate = builder.getEffectiveLocalDate();
this.createTime = builder.getCreateTime();
this.userPrincipalId = builder.getUserPrincipalId();
this.groupKeyCode = builder.getGroupKeyCode();
this.groupKey = builder.getGroupKey() == null ? null : builder.getGroupKey().build();
}
@Override
public String getEdoWorkflowId() {
return this.edoWorkflowId;
}
@Override
public String getEdoGroupTrackingId() {
return this.edoGroupTrackingId;
}
@Override
public String getDepartmentId() {
return this.departmentId;
}
@Override
public String getReviewLevelName() {
return this.reviewLevelName;
}
@Override
public String getOrganizationCode() {
return this.organizationCode;
}
@Override
public String getGroupName() {
return this.groupName;
}
@Override
public Long getVersionNumber() {
return this.versionNumber;
}
@Override
public String getObjectId() {
return this.objectId;
}
@Override
public boolean isActive() {
return this.active;
}
@Override
public String getId() {
return this.id;
}
@Override
public LocalDate getEffectiveLocalDate() {
return this.effectiveLocalDate;
}
@Override
public DateTime getCreateTime() {
return this.createTime;
}
@Override
public String getUserPrincipalId() {
return this.userPrincipalId;
}
@Override
public String getGroupKeyCode() {
return this.groupKeyCode;
}
@Override
public HrGroupKey getGroupKey() {
return this.groupKey;
}
/**
* A builder which can be used to construct {@link EdoGroupTracking} instances. Enforces the constraints of the {@link EdoGroupTrackingContract}.
*
*/
public final static class Builder
implements Serializable, EdoGroupTrackingContract, ModelBuilder
{
private String edoWorkflowId;
private String edoGroupTrackingId;
private String departmentId;
private String reviewLevelName;
private String organizationCode;
private String groupName;
private Long versionNumber;
private String objectId;
private boolean active;
private String id;
private LocalDate effectiveLocalDate;
private DateTime createTime;
private String userPrincipalId;
private String groupKeyCode;
private HrGroupKey.Builder groupKey;
private Builder() {
// TODO modify this constructor as needed to pass any required values and invoke the appropriate 'setter' methods
}
public static Builder create() {
// TODO modify as needed to pass any required values and add them to the signature of the 'create' method
return new Builder();
}
public static Builder create(EdoGroupTrackingContract contract) {
if (contract == null) {
throw new IllegalArgumentException("contract was null");
}
// TODO if create() is modified to accept required parameters, this will need to be modified
Builder builder = create();
builder.setEdoWorkflowId(contract.getEdoWorkflowId());
builder.setEdoGroupTrackingId(contract.getEdoGroupTrackingId());
builder.setDepartmentId(contract.getDepartmentId());
builder.setReviewLevelName(contract.getReviewLevelName());
builder.setOrganizationCode(contract.getOrganizationCode());
builder.setGroupName(contract.getGroupName());
builder.setVersionNumber(contract.getVersionNumber());
builder.setObjectId(contract.getObjectId());
builder.setActive(contract.isActive());
builder.setId(contract.getId());
builder.setEffectiveLocalDate(contract.getEffectiveLocalDate());
builder.setCreateTime(contract.getCreateTime());
builder.setUserPrincipalId(contract.getUserPrincipalId());
builder.setGroupKeyCode(contract.getGroupKeyCode());
builder.setGroupKey(contract.getGroupKey() == null ? null : HrGroupKey.Builder.create(contract.getGroupKey()));
return builder;
}
public EdoGroupTracking build() {
return new EdoGroupTracking(this);
}
@Override
public String getEdoWorkflowId() {
return this.edoWorkflowId;
}
@Override
public String getEdoGroupTrackingId() {
return this.edoGroupTrackingId;
}
@Override
public String getDepartmentId() {
return this.departmentId;
}
@Override
public String getReviewLevelName() {
return this.reviewLevelName;
}
@Override
public String getOrganizationCode() {
return this.organizationCode;
}
@Override
public String getGroupName() {
return this.groupName;
}
@Override
public Long getVersionNumber() {
return this.versionNumber;
}
@Override
public String getObjectId() {
return this.objectId;
}
@Override
public boolean isActive() {
return this.active;
}
@Override
public String getId() {
return this.id;
}
@Override
public LocalDate getEffectiveLocalDate() {
return this.effectiveLocalDate;
}
@Override
public DateTime getCreateTime() {
return this.createTime;
}
@Override
public String getUserPrincipalId() {
return this.userPrincipalId;
}
@Override
public String getGroupKeyCode() {
return this.groupKeyCode;
}
@Override
public HrGroupKey.Builder getGroupKey() {
return this.groupKey;
}
public void setEdoWorkflowId(String edoWorkflowId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.edoWorkflowId = edoWorkflowId;
}
public void setEdoGroupTrackingId(String edoGroupTrackingId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.edoGroupTrackingId = edoGroupTrackingId;
}
public void setDepartmentId(String departmentId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.departmentId = departmentId;
}
public void setReviewLevelName(String reviewLevelName) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.reviewLevelName = reviewLevelName;
}
public void setOrganizationCode(String organizationCode) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.organizationCode = organizationCode;
}
public void setGroupName(String groupName) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.groupName = groupName;
}
public void setVersionNumber(Long versionNumber) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.versionNumber = versionNumber;
}
public void setObjectId(String objectId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.objectId = objectId;
}
public void setActive(boolean active) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.active = active;
}
public void setId(String id) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.id = id;
}
public void setEffectiveLocalDate(LocalDate effectiveLocalDate) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.effectiveLocalDate = effectiveLocalDate;
}
public void setCreateTime(DateTime createTime) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.createTime = createTime;
}
public void setUserPrincipalId(String userPrincipalId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.userPrincipalId = userPrincipalId;
}
public void setGroupKeyCode(String groupKeyCode) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.groupKeyCode = groupKeyCode;
}
public void setGroupKey(HrGroupKey.Builder groupKey) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.groupKey = groupKey;
}
}
/**
* Defines some internal constants used on this class.
*
*/
static class Constants {
final static String ROOT_ELEMENT_NAME = "edoGroupTracking";
final static String TYPE_NAME = "EdoGroupTrackingType";
}
/**
* A private class which exposes constants which define the XML element names to use when this object is marshalled to XML.
*
*/
static class Elements {
final static String EDO_WORKFLOW_ID = "edoWorkflowId";
final static String EDO_GROUP_TRACKING_ID = "edoGroupTrackingId";
final static String DEPARTMENT_ID = "departmentId";
final static String REVIEW_LEVEL_NAME = "reviewLevelName";
final static String ORGANIZATION_CODE = "organizationCode";
final static String GROUP_NAME = "groupName";
final static String ACTIVE = "active";
final static String ID = "id";
final static String EFFECTIVE_LOCAL_DATE = "effectiveLocalDate";
final static String CREATE_TIME = "createTime";
final static String USER_PRINCIPAL_ID = "userPrincipalId";
final static String GROUP_KEY_CODE = "groupKeyCode";
final static String GROUP_KEY = "groupKey";
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.packet;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import org.onlab.packet.dhcp.Dhcp6ClientDataOption;
import org.onlab.packet.dhcp.Dhcp6ClientIdOption;
import org.onlab.packet.dhcp.Dhcp6IaAddressOption;
import org.onlab.packet.dhcp.Dhcp6IaNaOption;
import org.onlab.packet.dhcp.Dhcp6IaTaOption;
import org.onlab.packet.dhcp.Dhcp6IaPdOption;
import org.onlab.packet.dhcp.Dhcp6LeaseQueryOption;
import org.onlab.packet.dhcp.Dhcp6Option;
import org.onlab.packet.dhcp.Dhcp6RelayOption;
import org.onlab.packet.dhcp.Dhcp6InterfaceIdOption;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Representation of an DHCPv6 Packet.
* Base on RFC-3315.
*/
public class DHCP6 extends BasePacket {
private static final int UNSIGNED_SHORT_MASK = 0xffff;
// size of different field of option
private static final int OPT_CODE_SIZE = 2;
private static final int OPT_LEN_SIZE = 2;
// default size of DHCPv6 payload (without options)
private static final int DHCP6_DEFAULT_SIZE = 4;
// default size of DHCPv6 relay message payload (without options)
private static final int DHCP6_RELAY_MSG_SIZE = 34;
private static final int IPV6_ADDR_LEN = 16;
// masks & offsets for default DHCPv6 header
private static final int MSG_TYPE_OFFSET = 24;
private static final int TRANSACTION_ID_MASK = 0x00ffffff;
// Relay message types
public static final Set<Byte> RELAY_MSG_TYPES =
ImmutableSet.of(MsgType.RELAY_FORW.value,
MsgType.RELAY_REPL.value
);
public static final Set<Byte> LEASEQUERY_MSG_TYPES =
ImmutableSet.of(MsgType.LEASEQUERY.value,
MsgType.LEASEQUERY_REPLY.value
);
/**
* DHCPv6 message type.
*/
public enum MsgType {
SOLICIT((byte) 1), ADVERTISE((byte) 2), REQUEST((byte) 3),
CONFIRM((byte) 4), RENEW((byte) 5), REBIND((byte) 6),
REPLY((byte) 7), RELEASE((byte) 8), DECLINE((byte) 9),
RECONFIGURE((byte) 10), INFORMATION_REQUEST((byte) 11),
RELAY_FORW((byte) 12), RELAY_REPL((byte) 13), LEASEQUERY((byte) 14),
LEASEQUERY_REPLY((byte) 15);
protected byte value;
MsgType(final byte value) {
this.value = value;
}
public byte value() {
return this.value;
}
public static MsgType getType(final int value) {
switch (value) {
case 1:
return SOLICIT;
case 2:
return ADVERTISE;
case 3:
return REQUEST;
case 4:
return CONFIRM;
case 5:
return RENEW;
case 6:
return REBIND;
case 7:
return REPLY;
case 8:
return RELEASE;
case 9:
return DECLINE;
case 10:
return RECONFIGURE;
case 11:
return INFORMATION_REQUEST;
case 12:
return RELAY_FORW;
case 13:
return RELAY_REPL;
case 14:
return LEASEQUERY;
case 15:
return LEASEQUERY_REPLY;
default:
return null;
}
}
public static String getMsgTypeStr(final MsgType msgType) {
switch (msgType) {
case SOLICIT:
return "SOLICIT";
case ADVERTISE:
return "ADVERTISE";
case REQUEST:
return "REQUEST";
case CONFIRM:
return "CONFIRM";
case RENEW:
return "RENEW";
case REBIND:
return "REBIND";
case REPLY:
return "REPLY";
case RELEASE:
return "RELEASE";
case DECLINE:
return "DECLINE";
case RECONFIGURE:
return "RECONFIGURE";
case INFORMATION_REQUEST:
return "INFORMATION_REQUEST";
case RELAY_FORW:
return "RELAY_FORW";
case RELAY_REPL:
return "RELAY_REPL";
default:
return "NULL";
}
}
}
/**
* DHCPv6 option code.
*/
public enum OptionCode {
CLIENTID((short) 1), SERVERID((short) 2), IA_NA((short) 3), IA_TA((short) 4),
IAADDR((short) 5), ORO((short) 6), PREFERENCE((short) 7), ELAPSED_TIME((short) 8),
RELAY_MSG((short) 9), AUTH((short) 11), UNICAST((short) 12),
STATUS_CODE((short) 13), RAPID_COMMIT((short) 14), USER_CLASS((short) 15),
VENDOR_CLASS((short) 16), VENDOR_OPTS((short) 17), INTERFACE_ID((short) 18),
RECONF_MSG((short) 19), RECONF_ACCEPT((short) 20), IA_PD((short) 25), IAPREFIX((short) 26),
SUBSCRIBER_ID((short) 38), OPTION_ERO((short) 43), LEASE_QUERY((short) 44),
CLIENT_DATA((short) 45), CLIENT_LT((short) 48);
protected short value;
OptionCode(final short value) {
this.value = value;
}
public short value() {
return this.value;
}
}
private static final Map<Short, Deserializer<Dhcp6Option>> OPT_DESERIALIZERS =
ImmutableMap.<Short, Deserializer<Dhcp6Option>>builder()
.put(OptionCode.IA_NA.value, Dhcp6IaNaOption.deserializer())
.put(OptionCode.IA_TA.value, Dhcp6IaTaOption.deserializer())
.put(OptionCode.IAADDR.value, Dhcp6IaAddressOption.deserializer())
.put(OptionCode.RELAY_MSG.value, Dhcp6RelayOption.deserializer())
.put(OptionCode.CLIENTID.value, Dhcp6ClientIdOption.deserializer())
.put(OptionCode.IA_PD.value, Dhcp6IaPdOption.deserializer())
.put(OptionCode.INTERFACE_ID.value, Dhcp6InterfaceIdOption.deserializer())
.put(OptionCode.LEASE_QUERY.value, Dhcp6LeaseQueryOption.deserializer())
.put(OptionCode.CLIENT_DATA.value, Dhcp6ClientDataOption.deserializer())
.build();
// general field
private byte msgType; // 1 byte
private List<Dhcp6Option> options;
// non-relay field
private int transactionId; // 3 bytes
// relay field
private byte hopCount; // 1 byte
private byte[] linkAddress; // 16 bytes
private byte[] peerAddress; // 16 bytes
/**
* Creates new DHCPv6 object.
*/
public DHCP6() {
options = Lists.newArrayList();
}
@Override
public byte[] serialize() {
int payloadLength = options.stream()
.mapToInt(Dhcp6Option::getLength)
.sum();
// 2 bytes code and 2 bytes length
payloadLength += options.size() * (OPT_CODE_SIZE + OPT_LEN_SIZE);
if (RELAY_MSG_TYPES.contains(msgType)) {
payloadLength += DHCP6_RELAY_MSG_SIZE;
} else {
payloadLength += DHCP6_DEFAULT_SIZE;
}
ByteBuffer bb = ByteBuffer.allocate(payloadLength);
if (RELAY_MSG_TYPES.contains(msgType)) {
bb.put(msgType);
bb.put(hopCount);
bb.put(linkAddress);
bb.put(peerAddress);
} else {
int defaultHeader = ((int) msgType) << MSG_TYPE_OFFSET | (transactionId & TRANSACTION_ID_MASK);
bb.putInt(defaultHeader);
}
// serialize options
options.forEach(option -> {
bb.put(option.serialize());
});
return bb.array();
}
/**
* Returns a deserializer for DHCPv6.
*
* @return the deserializer for DHCPv6
*/
public static Deserializer<DHCP6> deserializer() {
return (data, offset, length) -> {
DHCP6 dhcp6 = new DHCP6();
checkNotNull(data);
if (offset < 0 || length < 0 ||
length > data.length || offset >= data.length ||
offset + length > data.length) {
throw new DeserializationException("Illegal offset or length");
}
final ByteBuffer bb = ByteBuffer.wrap(data, offset, length);
if (bb.remaining() < DHCP6.DHCP6_DEFAULT_SIZE) {
throw new DeserializationException(
"Buffer underflow while reading DHCPv6 option");
}
// peek message type
dhcp6.msgType = bb.array()[offset];
if (RELAY_MSG_TYPES.contains(dhcp6.msgType)) {
bb.get(); // drop message type
dhcp6.hopCount = bb.get();
dhcp6.linkAddress = new byte[IPV6_ADDR_LEN];
dhcp6.peerAddress = new byte[IPV6_ADDR_LEN];
bb.get(dhcp6.linkAddress);
bb.get(dhcp6.peerAddress);
} else {
// get msg type + transaction id (1 + 3 bytes)
int defaultHeader = bb.getInt();
dhcp6.transactionId = defaultHeader & TRANSACTION_ID_MASK;
}
dhcp6.options = Lists.newArrayList();
while (bb.remaining() >= Dhcp6Option.DEFAULT_LEN) {
// create temporary byte buffer for reading code and length
ByteBuffer optByteBuffer =
ByteBuffer.wrap(data, bb.position(), bb.limit() - bb.position());
short code = optByteBuffer.getShort();
int optionLen = UNSIGNED_SHORT_MASK & optByteBuffer.getShort();
if (optByteBuffer.remaining() < optionLen) {
throw new DeserializationException(
"Buffer underflow while reading DHCPv6 option");
}
Dhcp6Option option;
byte[] optionData = new byte[Dhcp6Option.DEFAULT_LEN + optionLen];
bb.get(optionData);
if (OPT_DESERIALIZERS.containsKey(code)) {
option = OPT_DESERIALIZERS.get(code).deserialize(optionData, 0, optionData.length);
} else {
option = Dhcp6Option.deserializer().deserialize(optionData, 0, optionData.length);
}
option.setParent(dhcp6);
dhcp6.options.add(option);
}
return dhcp6;
};
}
/**
* Gets the message type of this DHCPv6 packet.
*
* @return the message type
*/
public byte getMsgType() {
return msgType;
}
/**
* Gets options from this DHCPv6 packet.
*
* @return DHCPv6 options
*/
public List<Dhcp6Option> getOptions() {
return options;
}
/**
* Gets the transaction ID of this DHCPv6 packet.
*
* @return the transaction ID
*/
public int getTransactionId() {
return transactionId;
}
/**
* Gets the hop count of this DHCPv6 relay message.
*
* @return the hop count
*/
public byte getHopCount() {
return hopCount;
}
/**
* Gets the link address of this DHCPv6 relay message.
*
* @return the link address
*/
public byte[] getLinkAddress() {
return linkAddress;
}
/**
* Gets IPv6 link address.
*
* @return the IPv6 link address
*/
public Ip6Address getIp6LinkAddress() {
return linkAddress == null ? null : Ip6Address.valueOf(linkAddress);
}
/**
* Gets the peer address of this DHCPv6 relay message.
*
* @return the link address
*/
public byte[] getPeerAddress() {
return peerAddress;
}
/**
* Gets IPv6 peer address.
*
* @return the IPv6 peer address
*/
public Ip6Address getIp6PeerAddress() {
return peerAddress == null ? null : Ip6Address.valueOf(peerAddress);
}
/**
* Sets message type.
*
* @param msgType the message type
*/
public void setMsgType(byte msgType) {
this.msgType = msgType;
}
/**
* Sets options.
*
* @param options the options
*/
public void setOptions(List<Dhcp6Option> options) {
this.options = options;
}
/**
* Sets transaction id.
*
* @param transactionId the transaction id
*/
public void setTransactionId(int transactionId) {
this.transactionId = transactionId;
}
/**
* Sets hop count.
*
* @param hopCount the hop count
*/
public void setHopCount(byte hopCount) {
this.hopCount = hopCount;
}
/**
* Sets link address.
*
* @param linkAddress the link address
*/
public void setLinkAddress(byte[] linkAddress) {
this.linkAddress = linkAddress;
}
/**
* Sets peer address.
*
* @param peerAddress the peer address
*/
public void setPeerAddress(byte[] peerAddress) {
this.peerAddress = peerAddress;
}
@Override
public String toString() {
if (RELAY_MSG_TYPES.contains(msgType)) {
// relay message
return toStringHelper(getClass())
.add("msgType", msgType)
.add("hopCount", hopCount)
.add("linkAddress", Ip6Address.valueOf(linkAddress))
.add("peerAddress", Ip6Address.valueOf(peerAddress))
.add("options", options)
.toString();
} else {
return toStringHelper(getClass())
.add("msgType", msgType)
.add("transactionId", transactionId)
.add("options", options)
.toString();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.resourceGroups;
import com.facebook.presto.execution.MockQueryExecution;
import com.facebook.presto.execution.resourceGroups.InternalResourceGroup.RootInternalResourceGroup;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.apache.commons.math3.distribution.BinomialDistribution;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import static com.facebook.presto.execution.QueryState.FAILED;
import static com.facebook.presto.execution.QueryState.QUEUED;
import static com.facebook.presto.execution.QueryState.RUNNING;
import static com.facebook.presto.spi.resourceGroups.SchedulingPolicy.QUERY_PRIORITY;
import static com.facebook.presto.spi.resourceGroups.SchedulingPolicy.WEIGHTED;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.testing.Assertions.assertGreaterThan;
import static io.airlift.testing.Assertions.assertLessThan;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.util.Collections.reverse;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
public class TestResourceGroups
{
@Test(timeOut = 10_000)
public void testQueueFull()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
root.setMaxQueuedQueries(1);
root.setMaxRunningQueries(1);
MockQueryExecution query1 = new MockQueryExecution(0);
root.run(query1);
assertEquals(query1.getState(), RUNNING);
MockQueryExecution query2 = new MockQueryExecution(0);
root.run(query2);
assertEquals(query2.getState(), QUEUED);
MockQueryExecution query3 = new MockQueryExecution(0);
root.run(query3);
assertEquals(query3.getState(), FAILED);
assertEquals(query3.getFailureCause().getMessage(), "Too many queued queries for \"root\"");
}
@Test(timeOut = 10_000)
public void testFairEligibility()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
root.setMaxQueuedQueries(4);
root.setMaxRunningQueries(1);
InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group1.setMaxQueuedQueries(4);
group1.setMaxRunningQueries(1);
InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group2.setMaxQueuedQueries(4);
group2.setMaxRunningQueries(1);
InternalResourceGroup group3 = root.getOrCreateSubGroup("3");
group3.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group3.setMaxQueuedQueries(4);
group3.setMaxRunningQueries(1);
MockQueryExecution query1a = new MockQueryExecution(0);
group1.run(query1a);
assertEquals(query1a.getState(), RUNNING);
MockQueryExecution query1b = new MockQueryExecution(0);
group1.run(query1b);
assertEquals(query1b.getState(), QUEUED);
MockQueryExecution query2a = new MockQueryExecution(0);
group2.run(query2a);
assertEquals(query2a.getState(), QUEUED);
MockQueryExecution query2b = new MockQueryExecution(0);
group2.run(query2b);
assertEquals(query2b.getState(), QUEUED);
MockQueryExecution query3a = new MockQueryExecution(0);
group3.run(query3a);
assertEquals(query3a.getState(), QUEUED);
query1a.complete();
root.processQueuedQueries();
// 2a and not 1b should have started, as group1 was not eligible to start a second query
assertEquals(query1b.getState(), QUEUED);
assertEquals(query2a.getState(), RUNNING);
assertEquals(query2b.getState(), QUEUED);
assertEquals(query3a.getState(), QUEUED);
query2a.complete();
root.processQueuedQueries();
assertEquals(query3a.getState(), RUNNING);
assertEquals(query2b.getState(), QUEUED);
assertEquals(query1b.getState(), QUEUED);
query3a.complete();
root.processQueuedQueries();
assertEquals(query1b.getState(), RUNNING);
assertEquals(query2b.getState(), QUEUED);
}
@Test(timeOut = 10_000)
public void testFairQueuing()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
root.setMaxQueuedQueries(4);
root.setMaxRunningQueries(1);
InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group1.setMaxQueuedQueries(4);
group1.setMaxRunningQueries(2);
InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group2.setMaxQueuedQueries(4);
group2.setMaxRunningQueries(2);
MockQueryExecution query1a = new MockQueryExecution(0);
group1.run(query1a);
assertEquals(query1a.getState(), RUNNING);
MockQueryExecution query1b = new MockQueryExecution(0);
group1.run(query1b);
assertEquals(query1b.getState(), QUEUED);
MockQueryExecution query1c = new MockQueryExecution(0);
group1.run(query1c);
assertEquals(query1c.getState(), QUEUED);
MockQueryExecution query2a = new MockQueryExecution(0);
group2.run(query2a);
assertEquals(query2a.getState(), QUEUED);
query1a.complete();
root.processQueuedQueries();
// 1b and not 2a should have started, as it became queued first and group1 was eligible to run more
assertEquals(query1b.getState(), RUNNING);
assertEquals(query1c.getState(), QUEUED);
assertEquals(query2a.getState(), QUEUED);
// 2a and not 1c should have started, as all eligible sub groups get fair sharing
query1b.complete();
root.processQueuedQueries();
assertEquals(query2a.getState(), RUNNING);
assertEquals(query1c.getState(), QUEUED);
}
@Test(timeOut = 10_000)
public void testMemoryLimit()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, BYTE));
root.setMaxQueuedQueries(4);
root.setMaxRunningQueries(3);
MockQueryExecution query1 = new MockQueryExecution(1);
root.run(query1);
// Process the group to refresh stats
root.processQueuedQueries();
assertEquals(query1.getState(), RUNNING);
MockQueryExecution query2 = new MockQueryExecution(0);
root.run(query2);
assertEquals(query2.getState(), QUEUED);
MockQueryExecution query3 = new MockQueryExecution(0);
root.run(query3);
assertEquals(query3.getState(), QUEUED);
query1.complete();
root.processQueuedQueries();
assertEquals(query2.getState(), RUNNING);
assertEquals(query3.getState(), RUNNING);
}
@Test
public void testSubgroupMemoryLimit()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(10, BYTE));
root.setMaxQueuedQueries(4);
root.setMaxRunningQueries(3);
InternalResourceGroup subgroup = root.getOrCreateSubGroup("subgroup");
subgroup.setSoftMemoryLimit(new DataSize(1, BYTE));
subgroup.setMaxQueuedQueries(4);
subgroup.setMaxRunningQueries(3);
MockQueryExecution query1 = new MockQueryExecution(1);
subgroup.run(query1);
// Process the group to refresh stats
root.processQueuedQueries();
assertEquals(query1.getState(), RUNNING);
MockQueryExecution query2 = new MockQueryExecution(0);
subgroup.run(query2);
assertEquals(query2.getState(), QUEUED);
MockQueryExecution query3 = new MockQueryExecution(0);
subgroup.run(query3);
assertEquals(query3.getState(), QUEUED);
query1.complete();
root.processQueuedQueries();
assertEquals(query2.getState(), RUNNING);
assertEquals(query3.getState(), RUNNING);
}
@Test(timeOut = 10_000)
public void testSoftCpuLimit()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, BYTE));
root.setSoftCpuLimit(new Duration(1, SECONDS));
root.setHardCpuLimit(new Duration(2, SECONDS));
root.setCpuQuotaGenerationMillisPerSecond(2000);
root.setMaxQueuedQueries(1);
root.setMaxRunningQueries(2);
MockQueryExecution query1 = new MockQueryExecution(1, new Duration(1, SECONDS), 1);
root.run(query1);
assertEquals(query1.getState(), RUNNING);
MockQueryExecution query2 = new MockQueryExecution(0);
root.run(query2);
assertEquals(query2.getState(), RUNNING);
MockQueryExecution query3 = new MockQueryExecution(0);
root.run(query3);
assertEquals(query3.getState(), QUEUED);
query1.complete();
root.processQueuedQueries();
assertEquals(query2.getState(), RUNNING);
assertEquals(query3.getState(), QUEUED);
root.generateCpuQuota(2);
root.processQueuedQueries();
assertEquals(query2.getState(), RUNNING);
assertEquals(query3.getState(), RUNNING);
}
@Test(timeOut = 10_000)
public void testHardCpuLimit()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, BYTE));
root.setHardCpuLimit(new Duration(1, SECONDS));
root.setCpuQuotaGenerationMillisPerSecond(2000);
root.setMaxQueuedQueries(1);
root.setMaxRunningQueries(1);
MockQueryExecution query1 = new MockQueryExecution(1, new Duration(2, SECONDS), 1);
root.run(query1);
assertEquals(query1.getState(), RUNNING);
MockQueryExecution query2 = new MockQueryExecution(0);
root.run(query2);
assertEquals(query2.getState(), QUEUED);
query1.complete();
root.processQueuedQueries();
assertEquals(query2.getState(), QUEUED);
root.generateCpuQuota(2);
root.processQueuedQueries();
assertEquals(query2.getState(), RUNNING);
}
@Test(timeOut = 10_000)
public void testPriorityScheduling()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
root.setMaxQueuedQueries(100);
// Start with zero capacity, so that nothing starts running until we've added all the queries
root.setMaxRunningQueries(0);
root.setSchedulingPolicy(QUERY_PRIORITY);
InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group1.setMaxQueuedQueries(100);
group1.setMaxRunningQueries(1);
InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group2.setMaxQueuedQueries(100);
group2.setMaxRunningQueries(1);
SortedMap<Integer, MockQueryExecution> queries = new TreeMap<>();
Random random = new Random();
for (int i = 0; i < 100; i++) {
int priority;
do {
priority = random.nextInt(1_000_000) + 1;
} while (queries.containsKey(priority));
MockQueryExecution query = new MockQueryExecution(0, priority);
if (random.nextBoolean()) {
group1.run(query);
}
else {
group2.run(query);
}
queries.put(priority, query);
}
root.setMaxRunningQueries(1);
List<MockQueryExecution> orderedQueries = new ArrayList<>(queries.values());
reverse(orderedQueries);
for (MockQueryExecution query : orderedQueries) {
root.processQueuedQueries();
assertEquals(query.getState(), RUNNING);
query.complete();
}
}
@Test(timeOut = 10_000)
public void testWeightedScheduling()
{
RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> { }, directExecutor());
root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
root.setMaxQueuedQueries(4);
// Start with zero capacity, so that nothing starts running until we've added all the queries
root.setMaxRunningQueries(0);
root.setSchedulingPolicy(WEIGHTED);
InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group1.setMaxQueuedQueries(2);
group1.setMaxRunningQueries(2);
InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
group2.setMaxQueuedQueries(2);
group2.setMaxRunningQueries(2);
group2.setSchedulingWeight(2);
Set<MockQueryExecution> group1Queries = fillGroupTo(group1, ImmutableSet.of(), 2);
Set<MockQueryExecution> group2Queries = fillGroupTo(group2, ImmutableSet.of(), 2);
root.setMaxRunningQueries(1);
int group2Ran = 0;
for (int i = 0; i < 1000; i++) {
for (Iterator<MockQueryExecution> iterator = group1Queries.iterator(); iterator.hasNext(); ) {
MockQueryExecution query = iterator.next();
if (query.getState() == RUNNING) {
query.complete();
iterator.remove();
}
}
for (Iterator<MockQueryExecution> iterator = group2Queries.iterator(); iterator.hasNext(); ) {
MockQueryExecution query = iterator.next();
if (query.getState() == RUNNING) {
query.complete();
iterator.remove();
group2Ran++;
}
}
root.processQueuedQueries();
group1Queries = fillGroupTo(group1, group1Queries, 2);
group2Queries = fillGroupTo(group2, group2Queries, 2);
}
// group1 has a weight of 1 and group2 has a weight of 2, so group2 should account for (2 / (1 + 2)) of the queries.
// since this is stochastic, we check that the result of 1000 trials are 2/3 with 99.9999% confidence
BinomialDistribution binomial = new BinomialDistribution(1000, 2.0 / 3.0);
int lowerBound = binomial.inverseCumulativeProbability(0.000001);
int upperBound = binomial.inverseCumulativeProbability(0.999999);
assertLessThan(group2Ran, upperBound);
assertGreaterThan(group2Ran, lowerBound);
}
private static Set<MockQueryExecution> fillGroupTo(InternalResourceGroup group, Set<MockQueryExecution> existingQueries, int count)
{
Set<MockQueryExecution> queries = new HashSet<>(existingQueries);
while (queries.size() < count) {
MockQueryExecution query = new MockQueryExecution(0);
queries.add(query);
group.run(query);
}
return queries;
}
}
| |
/*
* Copyright 2004-2012 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package jp.fieldnotes.hatunatu.util.collection;
import java.lang.reflect.Array;
import java.util.List;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
/**
* @author higa
*
*/
public class ArrayUtilTest {
/**
* @throws Exception
*/
@Test
public void testAsArray() throws Exception {
assertArrayEquals(new int[] { 1, 2 }, ArrayUtil.asIntArray(1, 2));
assertArrayEquals(new Object[] { "a", 2 }, ArrayUtil.asArray("a", 2));
assertArrayEquals(
new String[] { "a", "b" },
ArrayUtil.asArray("a", "b"));
assertArrayEquals(new Integer[] { 1, 2 }, ArrayUtil.asArray(1, 2));
}
/**
* @throws Exception
*/
@Test
public void testAddAll() throws Exception {
String[] array = new String[] { "111" };
String[] newArray = ArrayUtil.add(array, "222");
assertThat(newArray.length, is(2));
assertThat(newArray[0], is("111"));
assertThat(newArray[1], is("222"));
String[] emptyArray = new String[0];
assertThat(
ArrayUtil.addAll((Object[]) null, (Object[]) null),
is(nullValue()));
assertThat(ArrayUtil.addAll(null, emptyArray).length, is(0));
assertThat(
ArrayUtil.addAll(emptyArray, null),
is(sameInstance(emptyArray)));
assertThat(
ArrayUtil.addAll(emptyArray, null),
is(sameInstance(emptyArray)));
assertThat(
ArrayUtil.addAll(emptyArray, emptyArray),
is(sameInstance(emptyArray)));
assertThat(ArrayUtil.addAll(emptyArray, array), is(sameInstance(array)));
assertThat(ArrayUtil.addAll(array, emptyArray), is(sameInstance(array)));
}
/**
* @throws Exception
*/
@Test
public void testAddAll2() throws Exception {
String[] a = new String[] { "1", "2" };
String[] b = new String[] { "3" };
a = ArrayUtil.addAll(a, b);
assertThat(a.length, is(3));
assertThat(a[0], is("1"));
assertThat(a[1], is("2"));
assertThat(a[2], is("3"));
}
/**
* @throws Exception
*/
@Test
public void testAddAll_int() throws Exception {
int[] array = new int[] { 111 };
int[] newArray = ArrayUtil.add(array, 222);
assertThat(newArray.length, is(2));
assertThat(newArray[0], is(111));
assertThat(newArray[1], is(222));
int[] emptyArray = new int[0];
assertThat(
ArrayUtil.addAll((int[]) null, (int[]) null),
is(nullValue()));
assertThat(ArrayUtil.addAll(null, emptyArray).length, is(0));
assertThat(
ArrayUtil.addAll(emptyArray, null),
is(sameInstance(emptyArray)));
assertThat(
ArrayUtil.addAll(emptyArray, null),
is(sameInstance(emptyArray)));
assertThat(
ArrayUtil.addAll(emptyArray, emptyArray),
is(sameInstance(emptyArray)));
assertThat(ArrayUtil.addAll(emptyArray, array), is(sameInstance(array)));
assertThat(ArrayUtil.addAll(array, emptyArray), is(sameInstance(array)));
}
/**
* @throws Exception
*/
@Test
public void testAdd_int() throws Exception {
int[] array = new int[] { 1 };
int[] newArray = ArrayUtil.add(array, 2);
assertThat(newArray.length, is(2));
assertThat(newArray[0], is(1));
assertThat(newArray[1], is(2));
}
/**
* @throws Exception
*/
@Test
public void testIndexOf() throws Exception {
String[] array = new String[] { "111", "222", "333", "222" };
assertThat(ArrayUtil.indexOf(array, "222"), is(1));
assertThat(ArrayUtil.indexOf(array, "222", 2), is(3));
assertThat(ArrayUtil.indexOf(array, new Object()), is(-1));
assertThat(ArrayUtil.indexOf(array, null), is(-1));
array[1] = null;
assertThat(ArrayUtil.indexOf(array, null), is(1));
}
/**
* @throws Exception
*/
@Test
public void testIndexOf_character() throws Exception {
char[] array = new char[] { 'a', 'b', 'c' };
assertThat(ArrayUtil.indexOf(array, 'a'), is(0));
assertThat(ArrayUtil.indexOf(array, 'd'), is(-1));
}
/**
* @throws Exception
*/
@Test
public void testRemoveFirst() throws Exception {
String[] array = new String[] { "111", "222", "333" };
String[] newArray = ArrayUtil.remove(array, "111");
assertThat(newArray.length, is(2));
assertThat(newArray[0], is("222"));
assertThat(newArray[1], is("333"));
}
/**
* @throws Exception
*/
@Test
public void testRemoveMiddle() throws Exception {
String[] array = new String[] { "111", "222", "333" };
String[] newArray = ArrayUtil.remove(array, "222");
assertThat(newArray.length, is(2));
assertThat(newArray[0], is("111"));
assertThat(newArray[1], is("333"));
}
/**
* @throws Exception
*/
@Test
public void testRemoveLast() throws Exception {
String[] array = new String[] { "111", "222", "333" };
String[] newArray = ArrayUtil.remove(array, "333");
assertThat(newArray.length, is(2));
assertThat(newArray[0], is("111"));
assertThat(newArray[1], is("222"));
}
/**
* @throws Exception
*/
@Test
public void testRemoveNothing() throws Exception {
String[] array = new String[] { "111", "222", "333" };
String[] newArray = ArrayUtil.remove(array, "444");
assertThat(newArray, is(sameInstance(array)));
}
/**
*
*/
@Test
public void testIsEmpty() {
assertTrue(ArrayUtil.isEmpty((Object[]) null));
assertTrue(ArrayUtil.isEmpty(new Object[] {}));
assertFalse(ArrayUtil.isEmpty(new Object[] { "" }));
assertFalse(ArrayUtil.isEmpty(new Object[] { "aaa" }));
}
/**
* @throws Exception
*/
@Test
public void testContains() throws Exception {
assertThat(ArrayUtil.contains(new Object[] { "1" }, "1"), is(true));
assertThat(ArrayUtil.contains(new Object[] { "1" }, "2"), is(not(true)));
assertThat(ArrayUtil.contains(new Object[] { "2", "1" }, "1"), is(true));
assertThat(ArrayUtil.contains((Object[]) null, "1"), is(not(true)));
assertThat(ArrayUtil.contains((Object[]) null, null), is(not(true)));
assertThat(ArrayUtil.contains(new Object[] { null }, null), is(true));
}
/**
* @throws Exception
*/
@Test
public void testContains_character() throws Exception {
assertThat(ArrayUtil.contains(new char[] { '1', '2' }, '1'), is(true));
assertThat(ArrayUtil.contains(new char[] { '1' }, '2'), is(not(true)));
assertThat(ArrayUtil.contains(new char[] { '2', '1' }, '1'), is(true));
assertThat(ArrayUtil.contains((char[]) null, '1'), is(not(true)));
}
/**
* @throws Exception
*/
@Test
public void testEqualsIgnoreSequence() throws Exception {
assertThat(ArrayUtil.equalsIgnoreSequence(
new Object[] { "1" },
new Object[] { "1" }), is(true));
assertThat(ArrayUtil.equalsIgnoreSequence(
new Object[] { "1", "2", "3" },
new Object[] { "2", "3", "1" }), is(true));
assertThat(ArrayUtil.equalsIgnoreSequence(
new Object[] { "1" },
new Object[] { "2" }), is(not(true)));
assertThat(ArrayUtil.equalsIgnoreSequence(
new Object[] { "1" },
new Object[] {}), is(not(true)));
assertThat(ArrayUtil.equalsIgnoreSequence(new Object[] { new Integer(
"1") }, new Object[] { "1" }), is(not(true)));
assertThat(ArrayUtil.equalsIgnoreSequence(
new Object[] { "1", "1" },
new Object[] { "1", "2" }), is(not(true)));
assertThat(ArrayUtil.equalsIgnoreSequence(new Object[] { "1", "2", "1",
"2" }, new Object[] { "2", "2", "1", "1" }), is(true));
assertThat(ArrayUtil.equalsIgnoreSequence(null, null), is(true));
assertThat(
ArrayUtil.equalsIgnoreSequence(null, new Object[] {}),
is(not(true)));
}
/**
* @throws Exception
*/
@Test
public void testSetArrayValue() throws Exception {
Object o = Array.newInstance(int.class, 3);
Array.set(o, 0, new Integer(1));
Array.set(o, 1, new Integer(2));
Array.set(o, 2, new Integer(3));
int[] num = (int[]) o;
assertArrayEquals(new int[] { 1, 2, 3 }, num);
}
/**
* @throws Exception
*/
@Test
public void testToObjectArray() throws Exception {
final Object[] a = ArrayUtil.toObjectArray(new int[] { 1, 5, 2 });
assertArrayEquals(new Integer[] { new Integer(1), new Integer(5),
new Integer(2) }, a);
}
/**
* @throws Exception
*/
@Test(expected = IllegalArgumentException.class)
public void testToObjectArray_NoArray() throws Exception {
ArrayUtil.toObjectArray("a");
}
/**
* @throws Exception
*/
@Test
public void testToList() throws Exception {
final Object a = new int[] { 1, 5 };
List<Integer> list = ArrayUtil.toList(a);
assertThat(list.get(0), is(Integer.valueOf(1)));
assertThat(list.get(1), is(Integer.valueOf(5)));
}
/**
*
*/
@Test
public void testIsArray() {
assertFalse(ArrayUtil.isArray(null));
assertFalse(ArrayUtil.isArray("hoge"));
assertTrue(ArrayUtil.isArray(new Object[] {}));
assertTrue(ArrayUtil.isArray(new Object[] { "" }));
assertTrue(ArrayUtil.isArray(new Object[] { "aaa" }));
}
/**
*
*/
@Test
public void testIsNotArray() {
assertTrue(ArrayUtil.isNotArray(null));
assertTrue(ArrayUtil.isNotArray("hoge"));
assertFalse(ArrayUtil.isNotArray(new Object[] {}));
assertFalse(ArrayUtil.isNotArray(new Object[] { "" }));
assertFalse(ArrayUtil.isNotArray(new Object[] { "aaa" }));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.memory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.hamcrest.Matchers.equalTo;
public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
static class MockController extends IndexingMemoryController {
final static ByteSizeValue INACTIVE = new ByteSizeValue(-1);
final Map<IndexShard, ByteSizeValue> indexingBuffers = new HashMap<>();
final Map<IndexShard, Long> lastIndexTimeNanos = new HashMap<>();
final Set<IndexShard> activeShards = new HashSet<>();
long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds();
public MockController(Settings settings) {
super(Settings.builder()
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
.put(settings)
.build(),
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
}
public void deleteShard(IndexShard id) {
indexingBuffers.remove(id);
}
public void assertBuffers(IndexShard id, ByteSizeValue indexing) {
assertThat(indexingBuffers.get(id), equalTo(indexing));
}
public void assertInactive(IndexShard id) {
assertThat(indexingBuffers.get(id), equalTo(INACTIVE));
}
@Override
protected long currentTimeInNanos() {
return TimeValue.timeValueSeconds(currentTimeSec).nanos();
}
@Override
protected List<IndexShard> availableShards() {
return new ArrayList<>(indexingBuffers.keySet());
}
@Override
protected boolean shardAvailable(IndexShard shard) {
return indexingBuffers.containsKey(shard);
}
@Override
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize) {
indexingBuffers.put(shard, shardIndexingBufferSize);
}
@Override
protected boolean checkIdle(IndexShard shard) {
final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
Long ns = lastIndexTimeNanos.get(shard);
if (ns == null) {
return true;
} else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) {
indexingBuffers.put(shard, INACTIVE);
activeShards.remove(shard);
return true;
} else {
return false;
}
}
public void incrementTimeSec(int sec) {
currentTimeSec += sec;
}
public void simulateIndexing(IndexShard shard) {
lastIndexTimeNanos.put(shard, currentTimeInNanos());
if (indexingBuffers.containsKey(shard) == false) {
// First time we are seeing this shard; start it off with inactive buffers as IndexShard does:
indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
}
activeShards.add(shard);
forceCheck();
}
}
public void testShardAdditionAndRemoval() {
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb").build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB)); // translog is maxed at 64K
// add another shard
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB));
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB));
// remove first shard
controller.deleteShard(shard0);
controller.forceCheck();
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB)); // translog is maxed at 64K
// remove second shard
controller.deleteShard(shard1);
controller.forceCheck();
// add a new one
IndexShard shard2 = test.getShard(2);
controller.simulateIndexing(shard2);
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB)); // translog is maxed at 64K
}
public void testActiveInactive() {
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
.build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB));
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB));
// index into both shards, move the clock and see that they are still active
controller.simulateIndexing(shard0);
controller.simulateIndexing(shard1);
controller.incrementTimeSec(10);
controller.forceCheck();
// both shards now inactive
controller.assertInactive(shard0);
controller.assertInactive(shard1);
// index into one shard only, see it becomes active
controller.simulateIndexing(shard0);
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB));
controller.assertInactive(shard1);
controller.incrementTimeSec(3); // increment but not enough to become inactive
controller.forceCheck();
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB));
controller.assertInactive(shard1);
controller.incrementTimeSec(3); // increment some more
controller.forceCheck();
controller.assertInactive(shard0);
controller.assertInactive(shard1);
// index some and shard becomes immediately active
controller.simulateIndexing(shard1);
controller.assertInactive(shard0);
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB));
}
public void testMinShardBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB));
}
public void testMaxShardBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB));
}
public void testRelativeBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
.build());
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB)));
}
public void testMinBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb").build());
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
}
public void testMaxBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb").build());
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
}
protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) {
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffers(shard0, indexBufferSize);
controller.assertBuffers(shard1, indexBufferSize);
}
}
| |
/*
Copyright 2014, Strategic Gains, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.strategicgains.hyperexpress.serialization.jackson;
import static org.junit.Assert.assertThat;
import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
import java.io.IOException;
import java.io.InputStreamReader;
import java.text.SimpleDateFormat;
import org.junit.BeforeClass;
import org.junit.Test;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.io.CharStreams;
import com.strategicgains.hyperexpress.builder.DefaultLinkBuilder;
import com.strategicgains.hyperexpress.builder.LinkBuilder;
import com.strategicgains.hyperexpress.domain.Resource;
import com.strategicgains.hyperexpress.domain.hal.HalResource;
/**
* @author toddf
* @since Aug 6, 2014
*/
public class HalResourceSerializerTest
{
private static ObjectMapper mapper = new ObjectMapper();
@BeforeClass
public static void setUpBeforeClass() throws Exception
{
SimpleModule module = new SimpleModule();
module.addSerializer(HalResource.class, new HalResourceSerializer());
mapper.registerModule(module);
mapper
.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.setVisibility(PropertyAccessor.FIELD, Visibility.ANY)
.setVisibility(PropertyAccessor.GETTER, Visibility.NONE)
.setVisibility(PropertyAccessor.SETTER, Visibility.NONE)
.setVisibility(PropertyAccessor.IS_GETTER, Visibility.NONE)
.setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ"));
}
@Test
public void shouldSerializeSingleNamespace()
throws JsonProcessingException
{
Resource r = new HalResource();
r.addNamespace("ns:1", "/namespaces/1");
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "single-namespace.json");
}
@Test
public void shouldSerializeNamespaceArray()
throws JsonProcessingException
{
Resource r = new HalResource();
r.addNamespace("ns:1", "/namespaces/1");
r.addNamespace("ns:2", "/namespaces/2");
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "namespace-array.json");
}
@Test
public void shouldSerializeSingleLink()
throws JsonProcessingException
{
Resource r = new HalResource();
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something").build());
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "single-link.json");
}
@Test
public void shouldSerializeTemplated()
throws JsonProcessingException
{
Resource r = new HalResource();
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something/{templated}").build());
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "templated.json");
}
@Test
public void shouldSerializeTemplatedArray()
throws JsonProcessingException
{
Resource r = new HalResource();
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something/{templated}").build());
r.addLink(l.rel("self").urlPattern("/something/not_templated").build());
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "templated-array.json");
}
@Test
public void shouldSerializeLinkArray()
throws JsonProcessingException
{
Resource r = new HalResource();
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something").build());
r.addLink(l.rel("self").urlPattern("/something/else").build());
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "link-array2.json");
}
@Test
public void shouldSerializeAsLinkArray()
throws JsonProcessingException
{
Resource r = new HalResource();
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something").build(), true);
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "link-array.json");
}
@Test
public void shouldSerializeProperties()
throws JsonProcessingException
{
Resource r = new HalResource();
r.addProperty("name", "A HAL resource");
r.addProperty("value", new Integer(42));
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "properties.json");
}
@Test
public void shouldSerializeSingleEmbed()
throws JsonProcessingException
{
Resource r = new HalResource().addProperty("name", "root");
r.addResource("children", new HalResource().addProperty("name", "child"));
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "single-embed.json");
}
@Test
public void shouldSerializeEmbedAsArray()
throws JsonProcessingException
{
Resource r = new HalResource().addProperty("name", "root");
r.addResource("children", new HalResource().addProperty("name", "child"), true);
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "embed-as-array.json");
}
@Test
public void shouldSerializeEmbeddedArray()
throws JsonProcessingException
{
Resource r = new HalResource().addProperty("name", "root");
r.addResource("children", new HalResource().addProperty("name", "child 1"));
r.addResource("children", new HalResource().addProperty("name", "child 2"));
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json, "embedded-array.json");
}
@Test
public void shouldSerializeResource()
throws JsonProcessingException
{
Resource r = new HalResource().addProperty("name", "root");
r.addNamespace("ns:1", "/namespaces/1");
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something").build());
r.addResource("children", new HalResource().addProperty("name", "child"));
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json,"resource.json");
}
@Test
public void shouldSerializeResourceAsArrays()
throws JsonProcessingException
{
Resource r = new HalResource().addProperty("name", "root");
r.addNamespace("ns:1", "/namespaces/1");
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something").build(), true);
r.addResource("children", new HalResource().addProperty("name", "child"), true);
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json,"resource-as-arrays.json");
}
@Test
public void shouldSerializeResourceWithArrays()
throws JsonProcessingException
{
Resource r = new HalResource().addProperty("name", "root");
r.addNamespace("ns:1", "/namespaces/1");
r.addNamespace("ns:2", "/namespaces/2");
LinkBuilder l = new DefaultLinkBuilder();
r.addLink(l.rel("self").urlPattern("/something").build());
r.addLink(l.rel("self").urlPattern("/something/{templated}").build());
r.addResource("children", new HalResource().addProperty("name", "child 1"));
r.addResource("children", new HalResource().addProperty("name", "child 2"));
String json = mapper.writeValueAsString(r);
thenJsonShouldBeEqualTo(json,"resource-with-arrays.json");
}
protected void thenJsonShouldBeEqualTo(String checked,String filePath) {
try {
assertThat(checked,sameJSONAs(fileContent(filePath)).allowingExtraUnexpectedFields());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public String fileContent(String filename) throws IOException {
try(InputStreamReader reader = new InputStreamReader(this.getClass().getResourceAsStream(filename))) {
return CharStreams.toString(reader);
}
}
}
| |
package org.codehaus.plexus.archiver.diags;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.codehaus.plexus.archiver.*;
import org.codehaus.plexus.components.io.resources.PlexusIoResource;
import org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection;
import org.codehaus.plexus.util.StringUtils;
import javax.annotation.Nonnull;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A diagnostic archiver that keeps track of stuff that has been added.
*/
public class TrackingArchiver
implements Archiver
{
private File destFile;
public final List<Addition> added = new ArrayList<Addition>();
private boolean useJvmChmod;
private boolean ignorePermissions;
public void createArchive()
throws ArchiverException, IOException
{
}
public void addDirectory( final @Nonnull File directory )
throws ArchiverException
{
added.add( new Addition( directory, null, null, null, -1 ) );
}
public void addDirectory( final @Nonnull File directory, final String prefix )
throws ArchiverException
{
added.add( new Addition( directory, prefix, null, null, -1 ) );
}
public void addDirectory( final @Nonnull File directory, final String[] includes, final String[] excludes )
throws ArchiverException
{
added.add( new Addition( directory, null, includes, excludes, -1 ) );
}
public void addDirectory( final @Nonnull File directory, final String prefix, final String[] includes,
final String[] excludes )
throws ArchiverException
{
added.add( new Addition( directory, prefix, includes, excludes, -1 ) );
}
public void addFileSet( final @Nonnull FileSet fileSet )
throws ArchiverException
{
added.add( new Addition( fileSet, null, null, null, -1 ) );
}
public void addFile( final @Nonnull File inputFile, final @Nonnull String destFileName )
throws ArchiverException
{
added.add( new Addition( inputFile, destFileName, null, null, -1 ) );
}
public void addFile( final @Nonnull File inputFile, final @Nonnull String destFileName, final int permissions )
throws ArchiverException
{
added.add( new Addition( inputFile, destFileName, null, null, permissions ) );
}
public void addArchivedFileSet( final @Nonnull File archiveFile )
throws ArchiverException
{
added.add( new Addition( archiveFile, null, null, null, -1 ) );
}
public void addArchivedFileSet( final @Nonnull File archiveFile, final String prefix )
throws ArchiverException
{
added.add( new Addition( archiveFile, prefix, null, null, -1 ) );
}
public void addSymlink( String s, String s2 )
throws ArchiverException
{
added.add( new Addition( s, null, null, null, -1 ) );
}
public void addSymlink( String s, int i, String s2 )
throws ArchiverException
{
added.add( new Addition( s, null, null, null, -1 ) );
}
public void addArchivedFileSet( final File archiveFile, final String[] includes, final String[] excludes )
throws ArchiverException
{
added.add( new Addition( archiveFile, null, includes, excludes, -1 ) );
}
public void addArchivedFileSet( final @Nonnull File archiveFile, final String prefix, final String[] includes,
final String[] excludes )
throws ArchiverException
{
added.add( new Addition( archiveFile, prefix, includes, excludes, -1 ) );
}
public void addArchivedFileSet( final ArchivedFileSet fileSet )
throws ArchiverException
{
added.add( new Addition( fileSet, null, null, null, -1 ) );
}
public void addArchivedFileSet( final ArchivedFileSet fileSet, Charset charset )
throws ArchiverException
{
added.add( new Addition( fileSet, null, null, null, -1 ) );
}
public void addResource( final PlexusIoResource resource, final String destFileName, final int permissions )
throws ArchiverException
{
added.add( new Addition( resource, destFileName, null, null, permissions ) );
}
public void addResources( final PlexusIoResourceCollection resources )
throws ArchiverException
{
added.add( new Addition( resources, null, null, null, -1 ) );
}
public File getDestFile()
{
return destFile;
}
public void setDestFile( final File destFile )
{
this.destFile = destFile;
}
public void setFileMode( final int mode )
{
}
public int getFileMode()
{
return Integer.parseInt( "0644", 8 );
}
public int getOverrideFileMode()
{
return Integer.parseInt( "0644", 8 );
}
public void setDefaultFileMode( final int mode )
{
}
public int getDefaultFileMode()
{
return Integer.parseInt( "0644", 8 );
}
public void setDirectoryMode( final int mode )
{
}
public int getDirectoryMode()
{
return Integer.parseInt( "0755", 8 );
}
public int getOverrideDirectoryMode()
{
return Integer.parseInt( "0755", 8 );
}
public void setDefaultDirectoryMode( final int mode )
{
}
public int getDefaultDirectoryMode()
{
return Integer.parseInt( "0755", 8 );
}
public boolean getIncludeEmptyDirs()
{
return false;
}
public void setIncludeEmptyDirs( final boolean includeEmptyDirs )
{
}
public void setDotFileDirectory( final File dotFileDirectory )
{
}
public
@Nonnull
ResourceIterator getResources()
throws ArchiverException
{
throw new RuntimeException("Not implemented");
}
@SuppressWarnings( "rawtypes" )
public Map<String, ArchiveEntry> getFiles()
{
return new HashMap<String, ArchiveEntry>();
}
public boolean isForced()
{
return false;
}
public void setForced( final boolean forced )
{
}
public boolean isSupportingForced()
{
return true;
}
public String getDuplicateBehavior()
{
return null;
}
public void setDuplicateBehavior( final String duplicate )
{
}
public class Addition
{
/**
* {@inheritDoc}
*
* @see Object#toString()
*/
@Override
public String toString()
{
return "Addition (\n resource= " + resource + "\n directory= " + directory + "\n destination= "
+ destination + "\n permissions= " + permissions + "\n includes= " + ( includes == null
? "-none-"
: StringUtils.join( includes, ", " ) ) + "\n excludes= " + ( excludes == null
? "-none-"
: StringUtils.join( excludes, ", " ) ) + "\n)";
}
public final Object resource;
public final File directory;
public final String destination;
public final int permissions;
public final String[] includes;
public final String[] excludes;
public Addition( final Object resource, final String destination, final String[] includes,
final String[] excludes, final int permissions )
{
this.resource = resource;
if ( resource instanceof FileSet)
{
final FileSet fs = (FileSet) resource;
directory = fs.getDirectory();
this.destination = fs.getPrefix();
this.includes = fs.getIncludes();
this.excludes = fs.getExcludes();
this.permissions = permissions;
}
else
{
if ( resource instanceof File && ( (File) resource ).isDirectory() )
{
directory = (File) resource;
}
else
{
directory = null;
}
this.destination = destination;
this.includes = includes;
this.excludes = excludes;
this.permissions = permissions;
}
}
}
public boolean isUseJvmChmod()
{
return useJvmChmod;
}
public void setUseJvmChmod( final boolean useJvmChmod )
{
this.useJvmChmod = useJvmChmod;
}
public boolean isIgnorePermissions()
{
return ignorePermissions;
}
public void setIgnorePermissions( final boolean ignorePermissions )
{
this.ignorePermissions = ignorePermissions;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: task_final_report.proto
package firmament;
public final class TaskFinalReportOuterClass {
private TaskFinalReportOuterClass() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface TaskFinalReportOrBuilder extends
// @@protoc_insertion_point(interface_extends:firmament.TaskFinalReport)
com.google.protobuf.MessageOrBuilder {
/**
* <code>optional uint64 task_id = 1;</code>
*/
long getTaskId();
/**
* <code>optional uint64 start_time = 2;</code>
*/
long getStartTime();
/**
* <code>optional uint64 finish_time = 3;</code>
*/
long getFinishTime();
/**
* <code>optional uint64 instructions = 4;</code>
*/
long getInstructions();
/**
* <code>optional uint64 cycles = 5;</code>
*/
long getCycles();
/**
* <code>optional uint64 llc_refs = 6;</code>
*/
long getLlcRefs();
/**
* <code>optional uint64 llc_misses = 7;</code>
*/
long getLlcMisses();
/**
* <code>optional double runtime = 8;</code>
*/
double getRuntime();
}
/**
* Protobuf type {@code firmament.TaskFinalReport}
*/
public static final class TaskFinalReport extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:firmament.TaskFinalReport)
TaskFinalReportOrBuilder {
// Use TaskFinalReport.newBuilder() to construct.
private TaskFinalReport(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TaskFinalReport() {
taskId_ = 0L;
startTime_ = 0L;
finishTime_ = 0L;
instructions_ = 0L;
cycles_ = 0L;
llcRefs_ = 0L;
llcMisses_ = 0L;
runtime_ = 0D;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private TaskFinalReport(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 8: {
taskId_ = input.readUInt64();
break;
}
case 16: {
startTime_ = input.readUInt64();
break;
}
case 24: {
finishTime_ = input.readUInt64();
break;
}
case 32: {
instructions_ = input.readUInt64();
break;
}
case 40: {
cycles_ = input.readUInt64();
break;
}
case 48: {
llcRefs_ = input.readUInt64();
break;
}
case 56: {
llcMisses_ = input.readUInt64();
break;
}
case 65: {
runtime_ = input.readDouble();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return firmament.TaskFinalReportOuterClass.internal_static_firmament_TaskFinalReport_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return firmament.TaskFinalReportOuterClass.internal_static_firmament_TaskFinalReport_fieldAccessorTable
.ensureFieldAccessorsInitialized(
firmament.TaskFinalReportOuterClass.TaskFinalReport.class, firmament.TaskFinalReportOuterClass.TaskFinalReport.Builder.class);
}
public static final int TASK_ID_FIELD_NUMBER = 1;
private long taskId_;
/**
* <code>optional uint64 task_id = 1;</code>
*/
public long getTaskId() {
return taskId_;
}
public static final int START_TIME_FIELD_NUMBER = 2;
private long startTime_;
/**
* <code>optional uint64 start_time = 2;</code>
*/
public long getStartTime() {
return startTime_;
}
public static final int FINISH_TIME_FIELD_NUMBER = 3;
private long finishTime_;
/**
* <code>optional uint64 finish_time = 3;</code>
*/
public long getFinishTime() {
return finishTime_;
}
public static final int INSTRUCTIONS_FIELD_NUMBER = 4;
private long instructions_;
/**
* <code>optional uint64 instructions = 4;</code>
*/
public long getInstructions() {
return instructions_;
}
public static final int CYCLES_FIELD_NUMBER = 5;
private long cycles_;
/**
* <code>optional uint64 cycles = 5;</code>
*/
public long getCycles() {
return cycles_;
}
public static final int LLC_REFS_FIELD_NUMBER = 6;
private long llcRefs_;
/**
* <code>optional uint64 llc_refs = 6;</code>
*/
public long getLlcRefs() {
return llcRefs_;
}
public static final int LLC_MISSES_FIELD_NUMBER = 7;
private long llcMisses_;
/**
* <code>optional uint64 llc_misses = 7;</code>
*/
public long getLlcMisses() {
return llcMisses_;
}
public static final int RUNTIME_FIELD_NUMBER = 8;
private double runtime_;
/**
* <code>optional double runtime = 8;</code>
*/
public double getRuntime() {
return runtime_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (taskId_ != 0L) {
output.writeUInt64(1, taskId_);
}
if (startTime_ != 0L) {
output.writeUInt64(2, startTime_);
}
if (finishTime_ != 0L) {
output.writeUInt64(3, finishTime_);
}
if (instructions_ != 0L) {
output.writeUInt64(4, instructions_);
}
if (cycles_ != 0L) {
output.writeUInt64(5, cycles_);
}
if (llcRefs_ != 0L) {
output.writeUInt64(6, llcRefs_);
}
if (llcMisses_ != 0L) {
output.writeUInt64(7, llcMisses_);
}
if (runtime_ != 0D) {
output.writeDouble(8, runtime_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (taskId_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, taskId_);
}
if (startTime_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, startTime_);
}
if (finishTime_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(3, finishTime_);
}
if (instructions_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(4, instructions_);
}
if (cycles_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(5, cycles_);
}
if (llcRefs_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(6, llcRefs_);
}
if (llcMisses_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(7, llcMisses_);
}
if (runtime_ != 0D) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(8, runtime_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof firmament.TaskFinalReportOuterClass.TaskFinalReport)) {
return super.equals(obj);
}
firmament.TaskFinalReportOuterClass.TaskFinalReport other = (firmament.TaskFinalReportOuterClass.TaskFinalReport) obj;
boolean result = true;
result = result && (getTaskId()
== other.getTaskId());
result = result && (getStartTime()
== other.getStartTime());
result = result && (getFinishTime()
== other.getFinishTime());
result = result && (getInstructions()
== other.getInstructions());
result = result && (getCycles()
== other.getCycles());
result = result && (getLlcRefs()
== other.getLlcRefs());
result = result && (getLlcMisses()
== other.getLlcMisses());
result = result && (
java.lang.Double.doubleToLongBits(getRuntime())
== java.lang.Double.doubleToLongBits(
other.getRuntime()));
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getTaskId());
hash = (37 * hash) + START_TIME_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getStartTime());
hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getFinishTime());
hash = (37 * hash) + INSTRUCTIONS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getInstructions());
hash = (37 * hash) + CYCLES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getCycles());
hash = (37 * hash) + LLC_REFS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getLlcRefs());
hash = (37 * hash) + LLC_MISSES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getLlcMisses());
hash = (37 * hash) + RUNTIME_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getRuntime()));
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(firmament.TaskFinalReportOuterClass.TaskFinalReport prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code firmament.TaskFinalReport}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:firmament.TaskFinalReport)
firmament.TaskFinalReportOuterClass.TaskFinalReportOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return firmament.TaskFinalReportOuterClass.internal_static_firmament_TaskFinalReport_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return firmament.TaskFinalReportOuterClass.internal_static_firmament_TaskFinalReport_fieldAccessorTable
.ensureFieldAccessorsInitialized(
firmament.TaskFinalReportOuterClass.TaskFinalReport.class, firmament.TaskFinalReportOuterClass.TaskFinalReport.Builder.class);
}
// Construct using firmament.TaskFinalReportOuterClass.TaskFinalReport.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
taskId_ = 0L;
startTime_ = 0L;
finishTime_ = 0L;
instructions_ = 0L;
cycles_ = 0L;
llcRefs_ = 0L;
llcMisses_ = 0L;
runtime_ = 0D;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return firmament.TaskFinalReportOuterClass.internal_static_firmament_TaskFinalReport_descriptor;
}
public firmament.TaskFinalReportOuterClass.TaskFinalReport getDefaultInstanceForType() {
return firmament.TaskFinalReportOuterClass.TaskFinalReport.getDefaultInstance();
}
public firmament.TaskFinalReportOuterClass.TaskFinalReport build() {
firmament.TaskFinalReportOuterClass.TaskFinalReport result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public firmament.TaskFinalReportOuterClass.TaskFinalReport buildPartial() {
firmament.TaskFinalReportOuterClass.TaskFinalReport result = new firmament.TaskFinalReportOuterClass.TaskFinalReport(this);
result.taskId_ = taskId_;
result.startTime_ = startTime_;
result.finishTime_ = finishTime_;
result.instructions_ = instructions_;
result.cycles_ = cycles_;
result.llcRefs_ = llcRefs_;
result.llcMisses_ = llcMisses_;
result.runtime_ = runtime_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof firmament.TaskFinalReportOuterClass.TaskFinalReport) {
return mergeFrom((firmament.TaskFinalReportOuterClass.TaskFinalReport)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(firmament.TaskFinalReportOuterClass.TaskFinalReport other) {
if (other == firmament.TaskFinalReportOuterClass.TaskFinalReport.getDefaultInstance()) return this;
if (other.getTaskId() != 0L) {
setTaskId(other.getTaskId());
}
if (other.getStartTime() != 0L) {
setStartTime(other.getStartTime());
}
if (other.getFinishTime() != 0L) {
setFinishTime(other.getFinishTime());
}
if (other.getInstructions() != 0L) {
setInstructions(other.getInstructions());
}
if (other.getCycles() != 0L) {
setCycles(other.getCycles());
}
if (other.getLlcRefs() != 0L) {
setLlcRefs(other.getLlcRefs());
}
if (other.getLlcMisses() != 0L) {
setLlcMisses(other.getLlcMisses());
}
if (other.getRuntime() != 0D) {
setRuntime(other.getRuntime());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
firmament.TaskFinalReportOuterClass.TaskFinalReport parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (firmament.TaskFinalReportOuterClass.TaskFinalReport) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private long taskId_ ;
/**
* <code>optional uint64 task_id = 1;</code>
*/
public long getTaskId() {
return taskId_;
}
/**
* <code>optional uint64 task_id = 1;</code>
*/
public Builder setTaskId(long value) {
taskId_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 task_id = 1;</code>
*/
public Builder clearTaskId() {
taskId_ = 0L;
onChanged();
return this;
}
private long startTime_ ;
/**
* <code>optional uint64 start_time = 2;</code>
*/
public long getStartTime() {
return startTime_;
}
/**
* <code>optional uint64 start_time = 2;</code>
*/
public Builder setStartTime(long value) {
startTime_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 start_time = 2;</code>
*/
public Builder clearStartTime() {
startTime_ = 0L;
onChanged();
return this;
}
private long finishTime_ ;
/**
* <code>optional uint64 finish_time = 3;</code>
*/
public long getFinishTime() {
return finishTime_;
}
/**
* <code>optional uint64 finish_time = 3;</code>
*/
public Builder setFinishTime(long value) {
finishTime_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 finish_time = 3;</code>
*/
public Builder clearFinishTime() {
finishTime_ = 0L;
onChanged();
return this;
}
private long instructions_ ;
/**
* <code>optional uint64 instructions = 4;</code>
*/
public long getInstructions() {
return instructions_;
}
/**
* <code>optional uint64 instructions = 4;</code>
*/
public Builder setInstructions(long value) {
instructions_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 instructions = 4;</code>
*/
public Builder clearInstructions() {
instructions_ = 0L;
onChanged();
return this;
}
private long cycles_ ;
/**
* <code>optional uint64 cycles = 5;</code>
*/
public long getCycles() {
return cycles_;
}
/**
* <code>optional uint64 cycles = 5;</code>
*/
public Builder setCycles(long value) {
cycles_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 cycles = 5;</code>
*/
public Builder clearCycles() {
cycles_ = 0L;
onChanged();
return this;
}
private long llcRefs_ ;
/**
* <code>optional uint64 llc_refs = 6;</code>
*/
public long getLlcRefs() {
return llcRefs_;
}
/**
* <code>optional uint64 llc_refs = 6;</code>
*/
public Builder setLlcRefs(long value) {
llcRefs_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 llc_refs = 6;</code>
*/
public Builder clearLlcRefs() {
llcRefs_ = 0L;
onChanged();
return this;
}
private long llcMisses_ ;
/**
* <code>optional uint64 llc_misses = 7;</code>
*/
public long getLlcMisses() {
return llcMisses_;
}
/**
* <code>optional uint64 llc_misses = 7;</code>
*/
public Builder setLlcMisses(long value) {
llcMisses_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 llc_misses = 7;</code>
*/
public Builder clearLlcMisses() {
llcMisses_ = 0L;
onChanged();
return this;
}
private double runtime_ ;
/**
* <code>optional double runtime = 8;</code>
*/
public double getRuntime() {
return runtime_;
}
/**
* <code>optional double runtime = 8;</code>
*/
public Builder setRuntime(double value) {
runtime_ = value;
onChanged();
return this;
}
/**
* <code>optional double runtime = 8;</code>
*/
public Builder clearRuntime() {
runtime_ = 0D;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:firmament.TaskFinalReport)
}
// @@protoc_insertion_point(class_scope:firmament.TaskFinalReport)
private static final firmament.TaskFinalReportOuterClass.TaskFinalReport DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new firmament.TaskFinalReportOuterClass.TaskFinalReport();
}
public static firmament.TaskFinalReportOuterClass.TaskFinalReport getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TaskFinalReport>
PARSER = new com.google.protobuf.AbstractParser<TaskFinalReport>() {
public TaskFinalReport parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TaskFinalReport(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<TaskFinalReport> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TaskFinalReport> getParserForType() {
return PARSER;
}
public firmament.TaskFinalReportOuterClass.TaskFinalReport getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_firmament_TaskFinalReport_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_firmament_TaskFinalReport_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\027task_final_report.proto\022\tfirmament\"\250\001\n" +
"\017TaskFinalReport\022\017\n\007task_id\030\001 \001(\004\022\022\n\nsta" +
"rt_time\030\002 \001(\004\022\023\n\013finish_time\030\003 \001(\004\022\024\n\014in" +
"structions\030\004 \001(\004\022\016\n\006cycles\030\005 \001(\004\022\020\n\010llc_" +
"refs\030\006 \001(\004\022\022\n\nllc_misses\030\007 \001(\004\022\017\n\007runtim" +
"e\030\010 \001(\001b\006proto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_firmament_TaskFinalReport_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_firmament_TaskFinalReport_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_firmament_TaskFinalReport_descriptor,
new java.lang.String[] { "TaskId", "StartTime", "FinishTime", "Instructions", "Cycles", "LlcRefs", "LlcMisses", "Runtime", });
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright (c) 2001-2010 JGoodies Karsten Lentzsch. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of JGoodies Karsten Lentzsch nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jgoodies.looks.plastic;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.DataBuffer;
import java.awt.image.IndexColorModel;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.swing.Icon;
/**
* Creates, adjusts and paints the bumps used in the JGoodies Plastic L&Fs.
*
* @author Karsten Lentzsch
* @version $Revision: 1.11 $
*/
final class PlasticBumps implements Icon {
private static final List BUFFERS = new ArrayList();
private int xBumps;
private int yBumps;
private Color topColor;
private Color shadowColor;
private Color backColor;
private BumpBuffer buffer;
// Instance Creation *****************************************************
PlasticBumps(int width, int height,
Color newTopColor, Color newShadowColor, Color newBackColor) {
setBumpArea(width, height);
setBumpColors(newTopColor, newShadowColor, newBackColor);
}
// Package API ************************************************************
void setBumpArea(int width, int height) {
xBumps = width / 2;
yBumps = height / 2;
}
void setBumpColors(Color newTopColor, Color newShadowColor, Color newBackColor) {
topColor = newTopColor;
shadowColor = newShadowColor;
backColor = newBackColor;
}
// Icon Implementation ****************************************************
public void paintIcon(Component c, Graphics g, int x, int y) {
GraphicsConfiguration gc = (g instanceof Graphics2D)
? (GraphicsConfiguration) ((Graphics2D) g).getDeviceConfiguration()
: null;
buffer = getBuffer(gc, topColor, shadowColor, backColor);
int bufferWidth = buffer.getImageSize().width;
int bufferHeight = buffer.getImageSize().height;
int iconWidth = getIconWidth();
int iconHeight = getIconHeight();
int x2 = x + iconWidth;
int y2 = y + iconHeight;
int savex = x;
while (y < y2) {
int h = Math.min(y2 - y, bufferHeight);
for (x = savex; x < x2; x += bufferWidth) {
int w = Math.min(x2 - x, bufferWidth);
g.drawImage(buffer.getImage(), x, y, x + w, y + h, 0, 0, w, h, null);
}
y += bufferHeight;
}
}
public int getIconWidth() { return xBumps * 2; }
public int getIconHeight() { return yBumps * 2; }
// Helper Code ************************************************************
private BumpBuffer getBuffer(GraphicsConfiguration gc,
Color aTopColor, Color aShadowColor, Color aBackColor) {
if ((buffer != null)
&& buffer.hasSameConfiguration(gc, aTopColor, aShadowColor, aBackColor)) {
return buffer;
}
BumpBuffer result = null;
for (Iterator iterator = BUFFERS.iterator(); iterator.hasNext();) {
BumpBuffer aBuffer = (BumpBuffer) iterator.next();
if (aBuffer.hasSameConfiguration(gc, aTopColor, aShadowColor, aBackColor)) {
result = aBuffer;
break;
}
}
if (result == null) {
result = new BumpBuffer(gc, topColor, shadowColor, backColor);
BUFFERS.add(result);
}
return result;
}
// Helper Class ***********************************************************
private static final class BumpBuffer {
private static final int IMAGE_SIZE = 64;
private static Dimension imageSize = new Dimension(IMAGE_SIZE, IMAGE_SIZE);
transient Image image;
private final Color topColor;
private final Color shadowColor;
private final Color backColor;
private final GraphicsConfiguration gc;
BumpBuffer(
GraphicsConfiguration gc,
Color aTopColor,
Color aShadowColor,
Color aBackColor) {
this.gc = gc;
topColor = aTopColor;
shadowColor = aShadowColor;
backColor = aBackColor;
createImage();
fillBumpBuffer();
}
boolean hasSameConfiguration(
GraphicsConfiguration aGC,
Color aTopColor,
Color aShadowColor,
Color aBackColor) {
if (gc != null) {
if (!gc.equals(aGC)) {
return false;
}
} else if (aGC != null) {
return false;
}
return topColor.equals(aTopColor)
&& shadowColor.equals(aShadowColor)
&& backColor.equals(aBackColor);
}
/**
* Returns the Image containing the bumps appropriate for the passed in
* {@code GraphicsConfiguration}.
*/
Image getImage() { return image; }
Dimension getImageSize() { return imageSize; }
/**
* Paints the bumps into the current image.
*/
private void fillBumpBuffer() {
Graphics g = image.getGraphics();
g.setColor(backColor);
g.fillRect(0, 0, IMAGE_SIZE, IMAGE_SIZE);
g.setColor(topColor);
for (int x = 0; x < IMAGE_SIZE; x += 4) {
for (int y = 0; y < IMAGE_SIZE; y += 4) {
g.drawLine(x, y, x, y);
g.drawLine(x + 2, y + 2, x + 2, y + 2);
}
}
g.setColor(shadowColor);
for (int x = 0; x < IMAGE_SIZE; x += 4) {
for (int y = 0; y < IMAGE_SIZE; y += 4) {
g.drawLine(x + 1, y + 1, x + 1, y + 1);
g.drawLine(x + 3, y + 3, x + 3, y + 3);
}
}
g.dispose();
}
/**
* Creates the image appropriate for the passed in
* {@code GraphicsConfiguration}, which may be null.
*/
private void createImage() {
if (gc != null) {
image = gc.createCompatibleImage(IMAGE_SIZE, IMAGE_SIZE);
} else {
int[] cmap = { backColor.getRGB(), topColor.getRGB(), shadowColor.getRGB()};
IndexColorModel icm =
new IndexColorModel(8, 3, cmap, 0, false, -1, DataBuffer.TYPE_BYTE);
image = new BufferedImage(IMAGE_SIZE, IMAGE_SIZE, BufferedImage.TYPE_BYTE_INDEXED, icm);
}
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.aggregation.histogram.sql;
import com.google.common.collect.ImmutableList;
import io.druid.java.util.common.StringUtils;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.histogram.ApproximateHistogram;
import io.druid.query.aggregation.histogram.ApproximateHistogramAggregatorFactory;
import io.druid.query.aggregation.histogram.ApproximateHistogramFoldingAggregatorFactory;
import io.druid.query.aggregation.histogram.QuantilePostAggregator;
import io.druid.query.filter.DimFilter;
import io.druid.segment.VirtualColumn;
import io.druid.segment.column.ValueType;
import io.druid.segment.virtual.ExpressionVirtualColumn;
import io.druid.sql.calcite.aggregation.Aggregation;
import io.druid.sql.calcite.aggregation.Aggregations;
import io.druid.sql.calcite.aggregation.SqlAggregator;
import io.druid.sql.calcite.expression.DruidExpression;
import io.druid.sql.calcite.expression.Expressions;
import io.druid.sql.calcite.planner.PlannerContext;
import io.druid.sql.calcite.table.RowSignature;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlAggFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import java.util.ArrayList;
import java.util.List;
public class QuantileSqlAggregator implements SqlAggregator
{
private static final SqlAggFunction FUNCTION_INSTANCE = new QuantileSqlAggFunction();
private static final String NAME = "APPROX_QUANTILE";
@Override
public SqlAggFunction calciteFunction()
{
return FUNCTION_INSTANCE;
}
@Override
public Aggregation toDruidAggregation(
final String name,
final RowSignature rowSignature,
final PlannerContext plannerContext,
final List<Aggregation> existingAggregations,
final Project project,
final AggregateCall aggregateCall,
final DimFilter filter
)
{
final DruidExpression input = Expressions.toDruidExpression(
plannerContext,
rowSignature,
Expressions.fromFieldAccess(
rowSignature,
project,
aggregateCall.getArgList().get(0)
)
);
if (input == null) {
return null;
}
final AggregatorFactory aggregatorFactory;
final String histogramName = StringUtils.format("%s:agg", name);
final RexNode probabilityArg = Expressions.fromFieldAccess(
rowSignature,
project,
aggregateCall.getArgList().get(1)
);
final float probability = ((Number) RexLiteral.value(probabilityArg)).floatValue();
final int resolution;
if (aggregateCall.getArgList().size() >= 3) {
final RexNode resolutionArg = Expressions.fromFieldAccess(
rowSignature,
project,
aggregateCall.getArgList().get(2)
);
resolution = ((Number) RexLiteral.value(resolutionArg)).intValue();
} else {
resolution = ApproximateHistogram.DEFAULT_HISTOGRAM_SIZE;
}
final int numBuckets = ApproximateHistogram.DEFAULT_BUCKET_SIZE;
final float lowerLimit = Float.NEGATIVE_INFINITY;
final float upperLimit = Float.POSITIVE_INFINITY;
// Look for existing matching aggregatorFactory.
for (final Aggregation existing : existingAggregations) {
for (AggregatorFactory factory : existing.getAggregatorFactories()) {
final boolean matches = Aggregations.aggregatorMatches(
factory,
filter,
ApproximateHistogramAggregatorFactory.class,
theFactory -> {
// Check input for equivalence.
final boolean inputMatches;
final VirtualColumn virtualInput = existing.getVirtualColumns()
.stream()
.filter(
virtualColumn ->
virtualColumn.getOutputName()
.equals(theFactory.getFieldName())
)
.findFirst()
.orElse(null);
if (virtualInput == null) {
inputMatches = input.isDirectColumnAccess()
&& input.getDirectColumn().equals(theFactory.getFieldName());
} else {
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression()
.equals(input.getExpression());
}
return inputMatches
&& theFactory.getResolution() == resolution
&& theFactory.getNumBuckets() == numBuckets
&& theFactory.getLowerLimit() == lowerLimit
&& theFactory.getUpperLimit() == upperLimit;
}
);
if (matches) {
// Found existing one. Use this.
return Aggregation.create(
ImmutableList.<AggregatorFactory>of(),
new QuantilePostAggregator(name, factory.getName(), probability)
);
}
}
}
// No existing match found. Create a new one.
final List<VirtualColumn> virtualColumns = new ArrayList<>();
if (input.isDirectColumnAccess()) {
if (rowSignature.getColumnType(input.getDirectColumn()) == ValueType.COMPLEX) {
aggregatorFactory = new ApproximateHistogramFoldingAggregatorFactory(
histogramName,
input.getDirectColumn(),
resolution,
numBuckets,
lowerLimit,
upperLimit
);
} else {
aggregatorFactory = new ApproximateHistogramAggregatorFactory(
histogramName,
input.getDirectColumn(),
resolution,
numBuckets,
lowerLimit,
upperLimit
);
}
} else {
final ExpressionVirtualColumn virtualColumn = input.toVirtualColumn(
StringUtils.format("%s:v", name),
ValueType.FLOAT,
plannerContext.getExprMacroTable()
);
virtualColumns.add(virtualColumn);
aggregatorFactory = new ApproximateHistogramAggregatorFactory(
histogramName,
virtualColumn.getOutputName(),
resolution,
numBuckets,
lowerLimit,
upperLimit
);
}
return Aggregation.create(
virtualColumns,
ImmutableList.of(aggregatorFactory),
new QuantilePostAggregator(name, histogramName, probability)
).filter(filter);
}
private static class QuantileSqlAggFunction extends SqlAggFunction
{
private static final String SIGNATURE1 = "'" + NAME + "(column, probability)'\n";
private static final String SIGNATURE2 = "'" + NAME + "(column, probability, resolution)'\n";
QuantileSqlAggFunction()
{
super(
NAME,
null,
SqlKind.OTHER_FUNCTION,
ReturnTypes.explicit(SqlTypeName.DOUBLE),
null,
OperandTypes.or(
OperandTypes.and(
OperandTypes.sequence(SIGNATURE1, OperandTypes.ANY, OperandTypes.LITERAL),
OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC)
),
OperandTypes.and(
OperandTypes.sequence(SIGNATURE2, OperandTypes.ANY, OperandTypes.LITERAL, OperandTypes.LITERAL),
OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC, SqlTypeFamily.EXACT_NUMERIC)
)
),
SqlFunctionCategory.NUMERIC,
false,
false
);
}
}
}
| |
package org.tiltedwindmills.fantasy.mfl.services.impl;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
import static org.tiltedwindmills.fantasy.mfl.RetrofitUtils.getDummyHttpError;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import mockit.Capturing;
import mockit.NonStrictExpectations;
import mockit.Verifications;
import org.joda.time.DateTime;
import org.junit.Test;
import org.tiltedwindmills.fantasy.mfl.JsonDataConverter;
import org.tiltedwindmills.fantasy.mfl.model.Player;
import org.tiltedwindmills.fantasy.mfl.model.injuries.InjuriesResponse;
import org.tiltedwindmills.fantasy.mfl.model.injuries.Injury;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerAvailabilityStatus;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerResponse;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerScore;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerScoresResponse;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerScoresWrapper;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerStatusResponse;
import org.tiltedwindmills.fantasy.mfl.model.players.PlayerStatusWrapper;
import org.tiltedwindmills.fantasy.mfl.services.PlayerService;
import org.tiltedwindmills.fantasy.mfl.services.exception.MFLServiceException;
import retrofit.RetrofitError;
import com.google.common.collect.ImmutableSet;
/**
* Tests for {@link org.tiltedwindmills.fantasy.mfl.services.impl.JsonPlayerServiceImpl}.
*
* @author John Daniel
*/
public class JsonPlayerServiceImplTest {
// TODO : need to add parameter tests. e.g. don't think any of these are testing league or server ID validations.
// constants
private static final int RANDOM_LEAGUE_ID = 11111;
private static final String RANDOM_SERVER_ID = "1";
private static final DateTime MOCK_DATE_PARAM = DateTime.now();
// mock any implementers of the Retrofit interface
@Capturing private MflPlayerExport mflPlayerExport;
/*------------------------------------------------ getPlayers ------------------------------------------------*/
@Test
public void getPlayersTest() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayers(anyString, anyInt); returns(JsonDataConverter.players("multiple-players"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Player> players = playerService.getPlayers(Arrays.asList("1234","5678"));
assertThat(players, is(not(nullValue())));
assertThat(players.size(), is(2));
assertThat(players.get(0), is(not(nullValue())));
assertThat(players.get(0).getName(), is("Brees, Drew"));
}
@Test
public void getPlayersTest_NoResults() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayers(anyString, anyInt); returns(JsonDataConverter.players("no-players"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Player> players = playerService.getPlayers(Arrays.asList("1234","5678"));
assertThat(players, is(not(nullValue())));
assertThat(players.size(), is(0));
}
@Test
public void getPlayersTest_NullPlayerIds() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayers(null);
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
assertThat(e.getMessage(), is("Cannot retrieve player information without IDs."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayers(anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayersTest_EmptyPlayerIds() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayers(new ArrayList<String>());
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
assertThat(e.getMessage(), is("Cannot retrieve player information without IDs."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayers(anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayersTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayers(anyString, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayers(Arrays.asList("1234","5678"));
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
assertThat(e.getMessage(), is("Error retrieving player data."));
}
}
@Test
public void getPlayersTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayers(anyString, anyInt); returns(null);
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayers(Arrays.asList("1234","5678"));
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getMessage(), is("Invalid response retrieving players with IDs : [1234, 5678]"));
}
}
@Test
public void getPlayersTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayers(anyString, anyInt); returns(new PlayerResponse());
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayers(Arrays.asList("1234","5678"));
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getMessage(), is("Invalid response retrieving players with IDs : [1234, 5678]"));
}
}
/*------------------------------------------- getPlayersSinceDate --------------------------------------------*/
@Test
public void getPlayersSinceDateTest() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayersSince(anyString, anyInt);
returns(JsonDataConverter.players("player-updates-with-timestamp"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Player> players = playerService.getPlayersSinceDate(MOCK_DATE_PARAM);
assertThat(players, is(not(nullValue())));
assertThat(players.size(), is(4));
assertThat(players.get(0), is(not(nullValue())));
assertThat(players.get(0).getName(), is("Johnson, Chris"));
assertThat(players.get(0).getTeam(), is("FA*"));
}
@Test
public void getPlayersSinceDateTest_NoResults() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayersSince(anyString, anyInt);
returns(JsonDataConverter.players("player-updates-with-timestamp-error"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Player> players = playerService.getPlayersSinceDate(MOCK_DATE_PARAM);
assertThat(players, is(not(nullValue())));
assertThat(players.size(), is(0));
}
@Test
public void getPlayersSinceDateTest_NullDate() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayersSinceDate(null);
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
assertThat(e.getMessage(), is("null is an invalid date for MFL player requests"));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getAllPlayersSince(anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayersSinceDateTest_FutureDate() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayersSinceDate(MOCK_DATE_PARAM.plusMinutes(1));
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
assertThat(e.getMessage(), containsString(" is an invalid date for MFL player requests"));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getAllPlayersSince(anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayersSinceDateTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayersSince(anyString, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayersSinceDate(MOCK_DATE_PARAM);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
assertThat(e.getMessage(), containsString("Error retrieving player updates after "));
}
}
@Test
public void getPlayersSinceDateTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayersSince(anyString, anyInt); returns(null);
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayersSinceDate(MOCK_DATE_PARAM);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getMessage(), containsString("Invalid response retrieving player update after : "));
}
}
@Test
public void getPlayersSinceDateTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayersSince(anyString, anyInt); returns(new PlayerResponse());
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayersSinceDate(MOCK_DATE_PARAM);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getMessage(), containsString("Invalid response retrieving player update after : "));
}
}
/*----------------------------------------------- getAllPlayers ----------------------------------------------*/
@Test
public void getAllPlayersTest() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayers(anyInt); returns(JsonDataConverter.players("multiple-players"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Player> players = playerService.getAllPlayers();
assertThat(players, is(not(nullValue())));
assertThat(players.size(), is(2));
assertThat(players.get(0), is(not(nullValue())));
assertThat(players.get(0).getName(), is("Brees, Drew"));
}
@Test
public void getAllPlayersTest_NoResults() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayers(anyInt); returns(JsonDataConverter.players("no-players"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Player> players = playerService.getAllPlayers();
assertThat(players, is(not(nullValue())));
assertThat(players.size(), is(0));
}
@Test
public void getAllPlayersTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayers(anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllPlayers();
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
assertThat(e.getMessage(), is("Error retrieving all player data"));
}
}
@Test
public void getAllPlayersTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayers(anyInt); returns(null);
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllPlayers();
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Invalid response retrieving all players"));
}
}
@Test
public void getAllPlayersTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getAllPlayers(anyInt); returns(new PlayerResponse());
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllPlayers();
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Invalid response retrieving all players"));
}
}
/*---------------------------------------------- getWeeklyScores ---------------------------------------------*/
@Test
public void getWeeklyScoresTest() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("full-schedule"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Map<Integer, Double> playerScoresMap = playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(16));
// before bye week
assertThat(playerScoresMap.containsKey(1), is(true));
assertThat(playerScoresMap.get(1), is(30.7));
// bye week
assertThat(playerScoresMap.containsKey(10), is(false));
// after bye week
assertThat(playerScoresMap.containsKey(11), is(true));
assertThat(playerScoresMap.get(11), is(23.62));
}
@Test
public void getWeeklyScoresTest_InjuredPlayer() {
// Arian Foster missed 3 weeks due to injury. Make sure they don't show in the scoring list.
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("injured-player"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Map<Integer, Double> playerScoresMap = playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(13));
// validate injured weeks are missing
assertThat(playerScoresMap.containsKey(3), is(false));
assertThat(playerScoresMap.containsKey(11), is(false));
assertThat(playerScoresMap.containsKey(12), is(false));
// before injuries
assertThat(playerScoresMap.containsKey(1), is(true));
assertThat(playerScoresMap.get(1), is(11d));
// after injuries
assertThat(playerScoresMap.containsKey(4), is(true));
assertThat(playerScoresMap.get(4), is(9.6));
}
@Test
public void getWeeklyScoresTest_ZeroScoreWeek() {
// Jimmy Graham didn't record a catch weeks 7 or 13. Make sure these *do* show up in report.
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("week-without-scoring"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Map<Integer, Double> playerScoresMap = playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(16));
// validate injured weeks are present
assertThat(playerScoresMap.containsKey(7), is(true));
assertThat(playerScoresMap.get(7), is(0d));
assertThat(playerScoresMap.containsKey(13), is(true));
assertThat(playerScoresMap.get(13), is(0d));
}
@Test
public void getWeeklyScoresTest_InvalidPlayerId() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getWeeklyScores(RANDOM_LEAGUE_ID, -1, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Cannot retrieve player score information without a valid ID."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getWeeklyScoresTest_EarlyYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 1979);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getWeeklyScoresTest_FutureYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
int nextYear = Calendar.getInstance().get(Calendar.YEAR) + 1;
playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, nextYear);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test
public void getWeeklyScoresTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
assertThat(e.getMessage(), is("Error retrieving player scoring data."));
}
}
@Test(expected = MFLServiceException.class)
public void getWeeklyScoresTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(null);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
}
@Test(expected = MFLServiceException.class)
public void getWeeklyScoresTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(new PlayerScoresResponse());
}};
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
}
@Test
public void getWeeklyScoresTest_UnsetScoresList() {
new NonStrictExpectations() {{
PlayerScoresResponse playerScoresResponse = new PlayerScoresResponse();
playerScoresResponse.setWrapper(new PlayerScoresWrapper()); // no scores are set
playerScoresResponse.getWrapper().setPlayerScores(null);
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(playerScoresResponse);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Map<Integer, Double> playerScoreMap = playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
assertThat(playerScoreMap, is(not(nullValue())));
assertThat(playerScoreMap.size(), is(0));
}
@Test
public void getWeeklyScoresTest_NullEntryInScoresList() {
new NonStrictExpectations() {{
PlayerScore playerScore = new PlayerScore();
playerScore.setPlayerId(10695);
playerScore.setScore("12.3");
playerScore.setWeek("5");
PlayerScoresResponse playerScoresResponse = new PlayerScoresResponse();
playerScoresResponse.setWrapper(new PlayerScoresWrapper());
playerScoresResponse.getWrapper().setPlayerScores(Arrays.asList(null, playerScore));
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(playerScoresResponse);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Map<Integer, Double> playerScoreMap = playerService.getWeeklyScores(RANDOM_LEAGUE_ID, 1234, RANDOM_SERVER_ID, 2015);
// make sure the null score didn't screw up the rest of the processing
assertThat(playerScoreMap, is(not(nullValue())));
assertThat(playerScoreMap.size(), is(1));
assertThat(playerScoreMap.get(5), is(12.3));
}
/*------------------------------------------ getAveragePlayerScores ------------------------------------------*/
@Test
public void getAveragePlayerScoresTest() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("multi-player-average"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234);
Map<Integer, Double> playerScoresMap = playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(2));
// andrew luck
assertThat(playerScoresMap.containsKey(10695), is(true));
assertThat(playerScoresMap.get(10695), is(27.109));
// lev bell
assertThat(playerScoresMap.containsKey(11192), is(true));
assertThat(playerScoresMap.get(11192), is(20.937));
}
@Test
public void getAveragePlayerScoresTest_BeforeSeason() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("multi-player-average-before-season-starts"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234);
Map<Integer, Double> playerScoresMap = playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(2));
// andrew luck
assertThat(playerScoresMap.containsKey(10695), is(true));
assertThat(playerScoresMap.get(10695), is(0.0));
// lev bell
assertThat(playerScoresMap.containsKey(11192), is(true));
assertThat(playerScoresMap.get(11192), is(0.0));
}
@Test
public void getAveragePlayerScoresTest_InvalidPlayerId() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, -1, 5678);
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Cannot retrieve player score information without a valid ID."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test
public void getAveragePlayerScoresTest_NullPlayerId() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = new HashSet<Integer>(Arrays.asList(1234, null, 5678));
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Cannot retrieve player score information without a valid ID."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getAveragePlayerScoresTest_EarlyYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 1979);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
public void getAveragePlayerScoresTest_FutureYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
int nextYear = Calendar.getInstance().get(Calendar.YEAR) + 1;
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, nextYear);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test
public void getAveragePlayerScoresTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
assertThat(e.getMessage(), is("Error retrieving player scoring data."));
}
}
@Test(expected = MFLServiceException.class)
public void getAveragePlayerScoresTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(null);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
}
@Test(expected = MFLServiceException.class)
public void getAveragePlayerScoresTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(new PlayerScoresResponse());
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
}
@Test
public void getAveragePlayerScoresTest_UnsetScoresList() {
new NonStrictExpectations() {{
PlayerScoresResponse playerScoresResponse = new PlayerScoresResponse();
playerScoresResponse.setWrapper(new PlayerScoresWrapper()); // no scores are set
playerScoresResponse.getWrapper().setPlayerScores(null);
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(playerScoresResponse);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, Double> playerScoreMap =
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerScoreMap, is(not(nullValue())));
assertThat(playerScoreMap.size(), is(0));
}
@Test
public void getAveragePlayerScoresTest_NullEntryInScoresList() {
new NonStrictExpectations() {{
PlayerScore playerScore = new PlayerScore();
playerScore.setPlayerId(10695);
playerScore.setScore("12.3");
playerScore.setWeek("YTD");
PlayerScoresResponse playerScoresResponse = new PlayerScoresResponse();
playerScoresResponse.setWrapper(new PlayerScoresWrapper());
playerScoresResponse.getWrapper().setPlayerScores(Arrays.asList(null, playerScore));
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(playerScoresResponse);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, Double> playerScoreMap =
playerService.getAveragePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
// make sure the null score didn't screw up the rest of the processing
assertThat(playerScoreMap, is(not(nullValue())));
assertThat(playerScoreMap.size(), is(1));
assertThat(playerScoreMap.get(10695), is(12.3));
}
/*---------------------------------------- getYearToDatePlayerScores ------------------------------------------*/
@Test
public void getYearToDatePlayerScoresTest() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("multi-player-year-to-date"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234);
Map<Integer, Double> playerScoresMap = playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(2));
// andrew luck
assertThat(playerScoresMap.containsKey(10695), is(true));
assertThat(playerScoresMap.get(10695), is(433.74));
// lev bell
assertThat(playerScoresMap.containsKey(11192), is(true));
assertThat(playerScoresMap.get(11192), is(335.0));
}
@Test
public void getYearToDatePlayerScoresTest_BeforeSeason() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt);
returns(JsonDataConverter.playerScores("multi-player-year-to-date-before-season-starts"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234);
Map<Integer, Double> playerScoresMap = playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerScoresMap, is(not(nullValue())));
assertThat(playerScoresMap.size(), is(2));
// andrew luck
assertThat(playerScoresMap.containsKey(10695), is(true));
assertThat(playerScoresMap.get(10695), is(0.0));
// lev bell
assertThat(playerScoresMap.containsKey(11192), is(true));
assertThat(playerScoresMap.get(11192), is(0.0));
}
@Test
public void getYearToDatePlayerScoresTest_InvalidPlayerId() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, -1, 5678);
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Cannot retrieve player score information without a valid ID."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test
public void getYearToDatePlayerScoresTest_NullPlayerId() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = new HashSet<Integer>(Arrays.asList(1234, null, 5678));
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Cannot retrieve player score information without a valid ID."));
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getYearToDatePlayerScoresTest_EarlyYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 1979);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
public void getYearToDatePlayerScores_FutureYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
int nextYear = Calendar.getInstance().get(Calendar.YEAR) + 1;
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, nextYear);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); times = 0;
}};
}
@Test
public void getYearToDatePlayerScoresTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
assertThat(e.getMessage(), is("Error retrieving player scoring data."));
}
}
@Test(expected = MFLServiceException.class)
public void getYearToDatePlayerScoresTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(null);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
}
@Test(expected = MFLServiceException.class)
public void getYearToDatePlayerScoresTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(new PlayerScoresResponse());
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
}
@Test
public void getYearToDatePlayerScoresTest_UnsetScoresList() {
new NonStrictExpectations() {{
PlayerScoresResponse playerScoresResponse = new PlayerScoresResponse();
playerScoresResponse.setWrapper(new PlayerScoresWrapper()); // no scores are set
playerScoresResponse.getWrapper().setPlayerScores(null);
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(playerScoresResponse);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, Double> playerScoreMap =
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerScoreMap, is(not(nullValue())));
assertThat(playerScoreMap.size(), is(0));
}
@Test
public void getYearToDatePlayerScoresTest_NullEntryInScoresList() {
new NonStrictExpectations() {{
PlayerScore playerScore = new PlayerScore();
playerScore.setPlayerId(10695);
playerScore.setScore("12.3");
playerScore.setWeek("YTD");
PlayerScoresResponse playerScoresResponse = new PlayerScoresResponse();
playerScoresResponse.setWrapper(new PlayerScoresWrapper());
playerScoresResponse.getWrapper().setPlayerScores(Arrays.asList(null, playerScore));
mflPlayerExport.getPlayerScores(anyInt, anyString, anyString, anyInt); returns(playerScoresResponse);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, Double> playerScoreMap =
playerService.getYearToDatePlayerScores(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
// make sure the null score didn't screw up the rest of the processing
assertThat(playerScoreMap, is(not(nullValue())));
assertThat(playerScoreMap.size(), is(1));
assertThat(playerScoreMap.get(10695), is(12.3));
}
/*----------------------------------------------- getAllInjuries -----------------------------------------------*/
@Test
public void getAllInjuriesTest() {
new NonStrictExpectations() {{
mflPlayerExport.getInjuries(anyInt, anyInt); returns(JsonDataConverter.injuries("injuries"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
List<Injury> injuries = playerService.getAllInjuries(1, 2015);
assertThat(injuries, is(not(nullValue())));
assertThat(injuries.size(), is(144));
assertThat(injuries.get(0), is(not(nullValue())));
assertThat(injuries.get(0).getPlayerId(), is(10048));
assertThat(injuries.get(0).getStatus(), is("Probable"));
assertThat(injuries.get(0).getDetails(), is("Knee"));
assertThat(injuries.get(0).getWeek(), is(1));
}
@Test
public void getAllInjuriesTest_InvalidWeekProvided() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllInjuries(24, 2015);
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getInjuries(anyInt, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getAllInjuriesTest_EarlyYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllInjuries(1, 1979);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getAllInjuriesTest_FutureYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
int nextYear = Calendar.getInstance().get(Calendar.YEAR) + 1;
playerService.getAllInjuries(1, nextYear);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test
public void getAllInjuriesTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getInjuries(anyInt, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllInjuries(1, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
}
}
@Test(expected = MFLServiceException.class)
public void getAllInjuriesTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getInjuries(anyInt, anyInt); returns(null);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllInjuries(1, 2015);
}
@Test(expected = MFLServiceException.class)
public void getAllInjuriesTest_NullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getInjuries(anyInt, anyInt); returns(new InjuriesResponse());
}};
// must send multiple player IDs to get the wrapper.
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getAllInjuries(1, 2015);
}
/*-------------------------------------------- getPlayerAvailability -------------------------------------------*/
@Test
public void getPlayerAvailabilityTest() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt);
returns(JsonDataConverter.playerStatus("multiple-players"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, String> playerAvailabilityMap = playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerAvailabilityMap, is(not(nullValue())));
assertThat(playerAvailabilityMap.size(), is(4));
assertThat(playerAvailabilityMap.containsKey(11192), is(true));
assertThat(playerAvailabilityMap.get(11192), is("New Orleans Saints - NS"));
assertThat(playerAvailabilityMap.containsKey(10998), is(true));
assertThat(playerAvailabilityMap.get(10998), is("Free Agent"));
assertThat(playerAvailabilityMap.containsKey(10998), is(true));
assertThat(playerAvailabilityMap.get(99999), is("League Does Not Use This Position"));
}
@Test
public void getPlayerAvailabilityTest_SinglePlayer() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt);
returns(JsonDataConverter.playerStatus("single-player"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234);
Map<Integer, String> playerAvailabilityMap = playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerAvailabilityMap, is(not(nullValue())));
assertThat(playerAvailabilityMap.size(), is(1));
assertThat(playerAvailabilityMap.containsKey(11192), is(true));
assertThat(playerAvailabilityMap.get(11192), is("New Orleans Saints - NS"));
}
@Test
public void getPlayerAvailabilityTest_NoPlayerIdsProvided() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt);
returns(JsonDataConverter.playerStatus("invalid-request"));
}};
PlayerService playerService = new JsonPlayerServiceImpl();
try {
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception");
} catch (MFLServiceException e) {
assertThat(e.getMessage(), is("Error retrieving player status : Error - No Valid Player IDs!"));
}
}
@Test
public void getPlayerAvailabilityTest_NullPlayerIdSet() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, null, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayerAvailabilityTest_EmptyPlayerIdSet() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, new HashSet<Integer>(), RANDOM_SERVER_ID, 2015);
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayerAvailabilityTest_NullPlayerIdInSet() {
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = new HashSet<Integer>(Arrays.asList(1234, null, 5678));
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception");
} catch (MFLServiceException e) {
// expected
}
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getPlayerAvailabilityTest_EarlyYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 1979);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test(expected = MFLServiceException.class)
public void getPlayerAvailabilityTest_FutureYear() {
PlayerService playerService = new JsonPlayerServiceImpl();
int nextYear = Calendar.getInstance().get(Calendar.YEAR) + 1;
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, nextYear);
new Verifications() {{
// Verify no calls to the service API occurred
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); times = 0;
}};
}
@Test
public void getPlayerAvailabilityTest_HttpError() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); result = getDummyHttpError();
}};
try {
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
fail("should have thrown exception.");
} catch (MFLServiceException e) {
// expected behavior. Confirm root cause is propagated.
assertThat(e.getCause(), instanceOf(RetrofitError.class));
}
}
@Test(expected = MFLServiceException.class)
public void getPlayerAvailabilityTest_NullResponse() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); returns(null);
}};
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
}
@Test
public void getPlayerAvailabilityTest_SinglePlayerNullStatus() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); returns(new PlayerStatusResponse());
}};
// only send one player ID.
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234);
Map<Integer, String> playerAvailabilityMap = playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerAvailabilityMap, is(not(nullValue())));
assertThat(playerAvailabilityMap.size(), is(0));
}
@Test(expected = MFLServiceException.class)
public void getPlayerAvailabilityTest_MultiplePlayersNullWrapper() {
new NonStrictExpectations() {{
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); returns(new PlayerStatusResponse());
}};
// must send multiple player IDs to get the wrapper.
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
}
@Test
public void getPlayerAvailabilityTest_MultiplePlayersNullStatusList() {
new NonStrictExpectations() {{
PlayerStatusResponse playerStatusResponse = new PlayerStatusResponse();
playerStatusResponse.setWrapper(new PlayerStatusWrapper());
playerStatusResponse.getWrapper().setPlayerStatuses(null);
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); returns(playerStatusResponse);
}};
// send multiple player IDs
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, String> playerAvailabilityMap = playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
assertThat(playerAvailabilityMap, is(not(nullValue())));
assertThat(playerAvailabilityMap.size(), is(0));
}
@Test
public void getPlayerAvailabilityTest_MultiplePlayersNullStatusInList() {
new NonStrictExpectations() {{
PlayerAvailabilityStatus playerAvailabilityStatus = new PlayerAvailabilityStatus();
playerAvailabilityStatus.setPlayerId(10695);
playerAvailabilityStatus.setStatus("Foo");
PlayerStatusResponse playerStatusResponse = new PlayerStatusResponse();
playerStatusResponse.setWrapper(new PlayerStatusWrapper());
playerStatusResponse.getWrapper().setPlayerStatuses(Arrays.asList(null, playerAvailabilityStatus));
mflPlayerExport.getPlayerStatus(anyInt, anyString, anyInt); returns(playerStatusResponse);
}};
// send multiple player IDs
PlayerService playerService = new JsonPlayerServiceImpl();
Set<Integer> playerIds = ImmutableSet.<Integer> of(1234, 5678);
Map<Integer, String> playerAvailabilityMap = playerService.getPlayerAvailability(RANDOM_LEAGUE_ID, playerIds, RANDOM_SERVER_ID, 2015);
// make sure the null status didn't screw up the rest of the processing
assertThat(playerAvailabilityMap, is(not(nullValue())));
assertThat(playerAvailabilityMap.size(), is(1));
assertThat(playerAvailabilityMap.get(10695), is("Foo"));
}
}
| |
package cz.metacentrum.perun.dispatcher.scheduling.impl;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.jms.JMSException;
import cz.metacentrum.perun.core.api.PerunClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import cz.metacentrum.perun.auditparser.AuditParser;
import cz.metacentrum.perun.controller.service.GeneralServiceManager;
import cz.metacentrum.perun.core.api.Destination;
import cz.metacentrum.perun.core.api.Perun;
import cz.metacentrum.perun.core.api.PerunBean;
import cz.metacentrum.perun.core.api.PerunPrincipal;
import cz.metacentrum.perun.core.api.PerunSession;
import cz.metacentrum.perun.core.api.Service;
import cz.metacentrum.perun.core.api.exceptions.FacilityNotExistsException;
import cz.metacentrum.perun.core.api.Facility;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.PrivilegeException;
import cz.metacentrum.perun.core.api.exceptions.ServiceNotExistsException;
import cz.metacentrum.perun.dispatcher.scheduling.DependenciesResolver;
import cz.metacentrum.perun.dispatcher.scheduling.PropagationMaintainer;
import cz.metacentrum.perun.dispatcher.scheduling.SchedulingPool;
import cz.metacentrum.perun.dispatcher.scheduling.TaskScheduler;
//import cz.metacentrum.perun.engine.scheduling.TaskStatus;
import cz.metacentrum.perun.taskslib.dao.TaskResultDao;
import cz.metacentrum.perun.taskslib.model.ExecService;
import cz.metacentrum.perun.taskslib.model.ExecService.ExecServiceType;
import cz.metacentrum.perun.taskslib.model.Task;
import cz.metacentrum.perun.taskslib.model.Task.TaskStatus;
import cz.metacentrum.perun.taskslib.model.TaskResult;
import cz.metacentrum.perun.taskslib.service.ResultManager;
import cz.metacentrum.perun.taskslib.service.TaskManager;
@org.springframework.stereotype.Service(value = "propagationMaintainer")
public class PropagationMaintainerImpl implements PropagationMaintainer {
private final static Logger log = LoggerFactory
.getLogger(PropagationMaintainerImpl.class);
/*
* @Autowired private TaskManager taskManager;
*
* @Autowired private ResultManager resultManager;
*
* @Autowired private TaskResultDao taskResultDao;
*
* @Autowired private EngineManager engineManager;
*/
@Autowired
private SchedulingPool schedulingPool;
@Autowired
private DependenciesResolver dependenciesResolver;
@Autowired
private TaskScheduler taskScheduler;
@Autowired
private Perun perun;
@Autowired
private ResultManager resultManager;
/*
* @Autowired private GeneralServiceManager generalServiceManager;
*/
@Autowired
private Properties dispatcherPropertiesBean;
private PerunSession perunSession;
/**
* After how many minutes is PLANNED/PROCESSING Task considered as stuck and re-scheduled.
*/
private final static int rescheduleTime = 190;
/**
* TODO: Improve logic here: i.e.: stuck ExecutorEngine threads vs. Time-Out
* etc...
*/
@Override
public void checkResults() {
try {
perunSession = perun
.getPerunSession(new PerunPrincipal(
dispatcherPropertiesBean.getProperty("perun.principal.name"),
dispatcherPropertiesBean
.getProperty("perun.principal.extSourceName"),
dispatcherPropertiesBean
.getProperty("perun.principal.extSourceType")),
new PerunClient());
} catch (InternalErrorException e1) {
// TODO Auto-generated catch block
log.error(
"Error establishing perun session to check tasks propagation status: ",
e1);
return;
}
checkFinishedTasks();
rescheduleErrorTasks();
endStuckTasks();
rescheduleOldDoneTasks();
}
private void checkFinishedTasks() {
/* no need to spam the log file
*
for (Task task : schedulingPool.getDoneTasks()) {
log.debug("Task " + task.toString() + " is done.");
}
*/
}
private void rescheduleErrorTasks() {
log.info("I am gonna list tasks in ERROR and reschedule if necessary...");
for (Task task : schedulingPool.getErrorTasks()) {
if (task.getEndTime() == null) {
log.error("RECOVERY FROM INCONSISTENT STATE: ERROR task does not have end_time! Setting end_time to task.getDelay + 1.");
// getDelay is in minutes, therefore we multiply it with 60*1000
Date endTime = new Date(System.currentTimeMillis()
- ((task.getDelay() + 1) * 60000));
task.setEndTime(endTime);
}
int howManyMinutesAgo = (int) (System.currentTimeMillis() - task
.getEndTime().getTime()) / 1000 / 60;
if(howManyMinutesAgo < 0) {
log.error("RECOVERY FROM INCONSISTENT STATE: ERROR task appears to have ended in future.");
Date endTime = new Date(System.currentTimeMillis()
- ((task.getDelay() + 1) * 60000));
task.setEndTime(endTime);
howManyMinutesAgo = task.getDelay() + 1;
}
log.info("TASK [" + task + "] in ERROR state completed "
+ howManyMinutesAgo + " minutes ago.");
// XXX - apparently this is not what the authors had in mind,
// commented out
// check and set recurrence
// int recurrence = task.getRecurrence() - 1;
// if(recurrence < 0) {
// // no more retries, sorry
// log.info("TASK [ " + task +
// "] in ERROR state has no more retries, bailing out.");
// continue;
// }
// task.setRecurrence(recurrence);
// If DELAY time has passed, we reschedule...
int recurrence = task.getRecurrence() + 1;
if(recurrence > task.getExecService().getDefaultRecurrence() &&
howManyMinutesAgo < 60 * 12 &&
!task.isSourceUpdated()) {
log.info("TASK [ " + task + "] in ERROR state has no more retries, bailing out.");
} else if (howManyMinutesAgo >= recurrence * task.getDelay() ||
task.isSourceUpdated()) {
// check if service is still assigned on facility
try {
List<Service> assignedServices = perun.getServicesManager().getAssignedServices(perunSession, task.getFacility());
if (assignedServices.contains(task.getExecService().getService())) {
ExecService execService = task.getExecService();
Facility facility = task.getFacility();
if(recurrence > execService.getDefaultRecurrence()) {
// this ERROR task is rescheduled for being here too long
task.setRecurrence(0);
task.setDestinations(null);
log.info("TASK id " + task.getId() + " is in ERROR state long enough, ");
}
task.setRecurrence(recurrence);
log.info("TASK ["
+ task
+ "] in ERROR state is going to be rescheduled: taskScheduler.propagateService(execService:ID "
+ execService.getId()
+ ", new Date(System.currentTimeMillis()), facility:ID "
+ facility.getId() + ");");
// taskScheduler.propagateService(task, new
// Date(System.currentTimeMillis()));
taskScheduler.scheduleTask(task);
log.info("TASK [" + task
+ "] in ERROR state has been rescheduled.");
// Also (to be sure) reschedule all Tasks that depend on
// this Task
//
// While engine starts in state GEN = ERROR, SEND = DONE
// => GEN will be rescheduled but without this SEND will
// never be propagated
List<ExecService> dependantServices = dependenciesResolver.listDependantServices(execService);
for (ExecService dependantService : dependantServices) {
Task dependantTask = schedulingPool.getTask(dependantService, facility);
if (dependantTask == null) {
dependantTask = new Task();
dependantTask.setExecService(dependantService);
dependantTask.setFacility(facility);
dependantTask.setRecurrence(dependantService.getDefaultRecurrence());
schedulingPool.addToPool(dependantTask, schedulingPool.getQueueForTask(task));
taskScheduler.scheduleTask(dependantTask);
log.info("{} was rescheduled because it depends on {}",
dependantTask, task);
}
}
} else {
// delete this tasks (SEND and GEN) because service is
// no longer assigned to facility
schedulingPool.removeTask(task);
log.warn(
"Removed TASK {} from database, beacuse service is no longer assigned to this facility.",
task.toString());
}
} catch (FacilityNotExistsException e) {
schedulingPool.removeTask(task);
log.error("Removed TASK {} from database, facility no longer exists.",
task.getId());
} catch (InternalErrorException e) {
log.error("{}", e);
} catch (PrivilegeException e) {
log.error("Consistency error. {}", e);
}
}
}
/*
* Original implementation:
*
* //TODO: Take into account Recurrence! for (Task task :
* taskManager.listAllTasksInState(TaskStatus.ERROR,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id")))) {
* if (task.getEndTime() == null) { log.error(
* "RECOVERY FROM INCONSISTATE STATE: ERROR task does not have end_time! Setting end_time to task.getDelay + 1."
* ); // getDelay is in minutes, therefore we multiply it with 60*1000
* Date endTime = new Date(System.currentTimeMillis() -
* ((task.getDelay() + 1) * 60000)); task.setEndTime(endTime);
* taskManager.updateTask(task,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id"))); }
* int howManyMinutesAgo = (int) (System.currentTimeMillis() -
* task.getEndTime().getTime()) / 1000 / 60; log.info("TASK [" + task +
* "] in ERROR state completed " + howManyMinutesAgo + " minutes ago.");
* //If DELAY time has passed, we reschedule... if (howManyMinutesAgo >=
* task.getDelay()) { //check if service is still assigned on facility
* try { List<Service> assignedServices =
* Rpc.ServicesManager.getAssignedServices(engineManager.getRpcCaller(),
* task.getFacility());
* if(assignedServices.contains(task.getExecService().getService())) {
* try { taskManager.updateTask(task,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id")));
* ExecService execService = task.getExecService(); Facility facility =
* task.getFacility(); log.info("TASK [" + task +
* "] in ERROR state is going to be rescheduled: taskScheduler.propagateService(execService:ID "
* + execService.getId() +
* ", new Date(System.currentTimeMillis()), facility:ID " +
* facility.getId() + ");"); taskScheduler.propagateService(execService,
* new Date(System.currentTimeMillis()), facility); log.info("TASK [" +
* task + "] in ERROR state has been rescheduled.");
*
* //Also (to be sure) reschedule all execServices which depends on this
* exec service // //While engine starts in state GEN = ERROR, SEND =
* DONE => GEN will be rescheduled but without this SEND will never be
* propagated List<ExecService> dependentExecServices =
* Rpc.GeneralServiceManager
* .listExecServicesDependingOn(engineManager.getRpcCaller(),
* execService); if(dependentExecServices != null) { for(ExecService
* dependantExecService : dependentExecServices) {
* taskScheduler.propagateService(dependantExecService, new
* Date(System.currentTimeMillis()), facility);
* log.info("{} was rescheduled because it depends on {}",
* dependantExecService, execService); } }
*
* } catch (InternalErrorException e) { log.error(e.toString(), e); } }
* else { //delete this tasks (SEND and GEN) because service is no
* longer assigned to facility List<ExecService> execServicesGenAndSend
* =
* Rpc.GeneralServiceManager.listExecServices(engineManager.getRpcCaller
* (), task.getExecService().getService().getId()); for(ExecService
* execService : execServicesGenAndSend) { Task taskToDelete =
* taskManager.getTask(execService, task.getFacility(),
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id")));
* if(taskToDelete!= null) {
* resultManager.clearByTask(taskToDelete.getId(),
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id")));
* taskManager.removeTask(taskToDelete.getId(),
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id"))); }
* } } } catch(PrivilegeException ex) {
* log.error("Consistency error. {}", ex); }
* catch(FacilityNotExistsException ex) {
* log.error("Consistency error - found task for non-existing facility. {}"
* , ex); } catch(ServiceNotExistsException ex) {
* log.error("Consistency error - found task for non-existing service. {}"
* , ex); } catch(InternalErrorException ex) { log.error("{}", ex); } }
* }
*/
}
private void endStuckTasks() {
// list all tasks in processing and planned and check if any have beeen
// running for too long.
log.info("I am gonna list planned and processing tasks and kill them if necessary...");
List<Task> suspiciousTasks = schedulingPool.getProcessingTasks();
suspiciousTasks.addAll(schedulingPool.getPlannedTasks());
for (Task task : suspiciousTasks) {
// count how many minutes the task stays in one state - if the state
// is PLANNED count it from when it was scheduled ; if it is
// PROCESSING count it from when it started
Date started = task.getStartTime();
Date scheduled = task.getSchedule();
TaskStatus status = task.getStatus();
if (status == null) {
log.error("ERROR: Task presumably in PLANNED or PROCESSING state, but does not have a valid status. Switching to ERROR. {}",
task);
task.setEndTime(new Date(System.currentTimeMillis()));
schedulingPool.setTaskStatus(task, TaskStatus.ERROR);
continue;
}
if (started == null && scheduled == null) {
log.error("ERROR: Task presumably in PLANNED or PROCESSING state, but does not have a valid scheduled or started time. Switching to ERROR. {}",
task);
task.setEndTime(new Date(System.currentTimeMillis()));
schedulingPool.setTaskStatus(task, TaskStatus.ERROR);
continue;
}
int howManyMinutesAgo = (int) (System.currentTimeMillis() - (started == null ? scheduled
: started).getTime()) / 1000 / 60;
// If too much time has passed something is broken
if (howManyMinutesAgo >= rescheduleTime) {
log.error("ERROR: Task is stuck in PLANNED or PROCESSING state. Switching it to ERROR. {}",
task);
task.setEndTime(new Date(System.currentTimeMillis()));
schedulingPool.setTaskStatus(task, TaskStatus.ERROR);
}
}
/*
*
* List<Task> suspiciousTasks =
* taskManager.listAllTasksInState(TaskStatus.PROCESSING,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id")));
* suspiciousTasks
* .addAll(taskManager.listAllTasksInState(TaskStatus.PLANNED,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id"))));
* for (Task task : suspiciousTasks) { //count how many minutes the task
* stays in one state - if the state is PLANNED count it from when it
* was scheduled ; if it is PROCESSING count it from when it started int
* howManyMinutesAgo = (int) (System.currentTimeMillis() - (
* task.getStatus().equals(TaskStatus.PLANNED) ? task.getSchedule() :
* task.getStartTime() ).getTime()) / 1000 / 60;
*
* //If too much time has passed something is broken if
* (howManyMinutesAgo >= 60) { log.error(
* "ERROR: Task is stuck in PLANNED or PROCESSING state. Switching it to ERROR. {}"
* , task); task.setEndTime(new Date(System.currentTimeMillis()));
* task.setStatus(TaskStatus.ERROR); taskManager.updateTask(task,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id"))); }
* }
*/
}
private void rescheduleOldDoneTasks() {
// Reschedule SEND tasks in DONE that haven't been running for quite a
// while
log.info("I am gonna list complete tasks and reschedule if they are too old...");
for (Task task : schedulingPool.getDoneTasks()) {
// skip GEN tasks
if (task.getExecService() != null &&
task.getExecService().getExecServiceType().equals(ExecService.ExecServiceType.GENERATE)) {
log.debug(
"Found finished GEN TASK {} that was not running for a while, leaving it as is.",
task.toString());
continue;
}
Date twoDaysAgo = new Date(System.currentTimeMillis() - 1000 * 60 * 60 * 24 * 2);
if (task.isSourceUpdated()) {
// reschedule the task
log.info("TASK ["
+ task
+ "] data changed. Going to schedule for propagation now.");
taskScheduler.scheduleTask(task);
} else if (task.getEndTime() == null || task.getEndTime().before(twoDaysAgo)) {
// reschedule the task
log.info("TASK ["
+ task
+ "] wasn't propagated for more then 2 days. Going to schedule it for propagation now.");
taskScheduler.scheduleTask(task);
} else {
log.info("TASK [" + task + "] has finished recently, leaving it for now.");
}
}
/*
*
* for(Task task : taskManager.listAllTasksInState(TaskStatus.DONE,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id")))) {
* //skip GEN tasks
* if(task.getExecService().getExecServiceType().equals(
* ExecService.ExecServiceType.GENERATE)) continue;
*
* Date twoDaysAgo = new Date(System.currentTimeMillis() - 1000 * 60 *
* 24 * 2); if(task.getEndTime().before(twoDaysAgo)) { //reschedule the
* task try { taskScheduler.propagateService(task.getExecService(), new
* Date(System.currentTimeMillis()), task.getFacility());
* log.info("TASK [" + task +
* "] wasn't propagated for more then 2 days. Going to schedule it for propagation now."
* ); } catch (InternalErrorException e) { log.error(
* "Rescheduling of task which wasn't propagated for more than 2 days failed. {}, Exception: {}"
* , task, e); } }
*
* }
*/
}
/*
* @Override public Statistics getStatistics() { throw new
* UnsupportedOperationException("Nah..."); }
*/
private void setAllGenerateDependenciesToNone(Task task) {
List<ExecService> dependencies = this.dependenciesResolver.listDependencies(task.getExecService());
for (ExecService dependencyToBeSetDirty : dependencies) {
if (dependencyToBeSetDirty.getExecServiceType().equals(ExecServiceType.GENERATE)) {
Task taskToBeSetDirty = schedulingPool.getTask(dependencyToBeSetDirty, task.getFacility());
if (taskToBeSetDirty != null) {
log.debug(
"Setting GEN dependency task {} to NONE state to regenerate data for completed task {}",
taskToBeSetDirty, task);
schedulingPool.setTaskStatus(taskToBeSetDirty, TaskStatus.NONE);
try {
schedulingPool.setQueueForTask(taskToBeSetDirty, null);
} catch (InternalErrorException e) {
log.error("Could not set destination queue for task {}: {}", task.getId(), e.getMessage());
}
}
}
}
}
@Override
public void setAllGenerateDependenciesToNone(
List<ExecService> dependencies, Facility facility) {
setAllGenerateDependenciesToNone(dependencies, facility.getId());
}
@Override
public void setAllGenerateDependenciesToNone(
List<ExecService> dependencies, int facilityId) {
// And we set all its GENERATE dependencies as "dirty" by switching them
// to NONE state.
// TODO: Optimize this for cycle out with a 1 clever SQL query ???
// ^ setAllGenerateDependenciesToNone(ExecService execService, Facility
// facility) ???
// TODO:ADD TEST CASE!!!
/*
* TODO: rewrite this for (ExecService dependencyToBeSetDirty :
* dependencies) { if
* (dependencyToBeSetDirty.getExecServiceType().equals
* (ExecServiceType.GENERATE)) { Task taskToBeSetDirty =
* taskManager.getTask(dependencyToBeSetDirty.getId(), facilityId,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id"))); if
* (taskToBeSetDirty != null) {
* taskToBeSetDirty.setStatus(TaskStatus.NONE);
* taskManager.updateTask(taskToBeSetDirty,
* Integer.parseInt(propertiesBean.getProperty("engine.unique.id"))); }
* } }
*/
}
@Override
public void closeTasksForEngine(int clientID) {
List<Task> tasks = schedulingPool.getTasksForEngine(clientID);
// switch all processing tasks to error, remove the engine queue association
log.debug("Switching PROCESSING tasks on engine {} to ERROR, the engine went down", clientID);
for(Task task: tasks) {
if(task.getStatus().equals(TaskStatus.PROCESSING)) {
log.debug("switching task {} to ERROR, the engine it was running on went down", task.getId());
schedulingPool.setTaskStatus(task, TaskStatus.ERROR);
}
try {
schedulingPool.setQueueForTask(task, null);
} catch (InternalErrorException e) {
log.error("Could not remove output queue for task {}: {}", task.getId(), e.getMessage());
}
}
}
@Override
public void onTaskComplete(int taskId, int clientID, String status_s,
String string) {
Task completedTask = schedulingPool.getTaskById(taskId);
if (completedTask == null) {
// eh? how would that be possible?
log.error("TASK id {} reported as complete, but we do not know it... (yet?)", taskId);
return;
}
TaskStatus status = TaskStatus.NONE;
if (status_s.equals("ERROR")) {
status = TaskStatus.ERROR;
} else if (status_s.equals("DONE")) {
status = TaskStatus.DONE;
} else {
log.error("Engine reported unexpected status {} for task id {}, setting to ERROR",
status_s, taskId);
status = TaskStatus.ERROR;
}
completedTask.setEndTime(new Date(System.currentTimeMillis()));
// if we are going to run this task again, make sure to generate up to
// date data
if (completedTask.getExecService().getExecServiceType().equals(ExecServiceType.SEND)) {
try {
schedulingPool.setQueueForTask(completedTask, null);
} catch (InternalErrorException e) {
log.error("Could not set destination queue for task {}: {}", completedTask.getId(), e.getMessage());
}
this.setAllGenerateDependenciesToNone(completedTask);
}
if (status.equals(TaskStatus.DONE)) {
// task completed successfully
// set destination list to null to refetch them later
completedTask.setDestinations(null);
schedulingPool.setTaskStatus(completedTask, TaskStatus.DONE);
completedTask.setRecurrence(0);
log.debug("TASK {} reported as DONE", completedTask.toString());
// for GEN tasks, signal SENDs that source data are updated
if(completedTask.getExecService().getExecServiceType().equals(ExecServiceType.GENERATE)) {
List<ExecService> dependantServices = dependenciesResolver.listDependantServices(completedTask.getExecService());
for (ExecService dependantService : dependantServices) {
Task dependantTask = schedulingPool.getTask(dependantService, completedTask.getFacility());
if (dependantTask != null && dependantService.getExecServiceType().equals(ExecServiceType.SEND)) {
dependantTask.setSourceUpdated(false);
}
if(completedTask.isPropagationForced() && dependantTask.isPropagationForced()) {
log.debug("Going to force schedule dependant task " + dependantTask.getId());
taskScheduler.scheduleTask(dependantTask);
}
}
}
completedTask.setPropagationForced(false);
} else {
if (string.isEmpty()) {
// weird - task is in error and no destinations reported as
// failed...
log.warn("TASK {} ended in ERROR state with no remaining destinations.",
completedTask.toString());
} else {
// task failed, some destinations remain
// resolve list of destinations
List<PerunBean> listOfBeans;
List<Destination> destinationList = new ArrayList<Destination>();
try {
listOfBeans = AuditParser.parseLog(string);
log.debug("Found list of destination beans: " + listOfBeans);
for (PerunBean bean : listOfBeans) {
destinationList.add((Destination) bean);
}
} catch (InternalErrorException e) {
log.error("Could not resolve destination from destination list");
}
if(completedTask.getDestinations() != null &&
!completedTask.getDestinations().isEmpty()) {
completedTask.setDestinations(destinationList);
}
}
schedulingPool.setTaskStatus(completedTask, TaskStatus.ERROR);
log.debug("Task set to ERROR state with remaining destinations: "
+ completedTask.getDestinations());
}
}
@Override
public void onTaskDestinationComplete(int clientID, String string) {
if(string == null || string.isEmpty()) {
log.error("Could not parse taskresult message from engine " + clientID);
return;
}
try {
List<PerunBean> listOfBeans = AuditParser.parseLog(string);
if(!listOfBeans.isEmpty()) {
TaskResult taskResult = (TaskResult)listOfBeans.get(0);
resultManager.insertNewTaskResult(taskResult, clientID);
} else {
log.error("No TaskResult bean found in message {} from engine {}", string, clientID);
}
} catch (Exception e) {
log.error("Could not save taskresult message {} from engine " + clientID, string);
log.debug("Error storing taskresult message: " + e.getMessage());
}
}
/*
* public TaskManager getTaskManager() { return taskManager; }
*
* public void setTaskManager(TaskManager taskManager) { this.taskManager =
* taskManager; }
*
* public TaskResultDao getTaskResultDao() { return taskResultDao; }
*
* public void setTaskResultDao(TaskResultDao taskResultDao) {
* this.taskResultDao = taskResultDao; }
*
* public EngineManager getEngineManager() { return engineManager; }
*
* public void setEngineManager(EngineManager engineManager) {
* this.engineManager = engineManager; }
*/
public DependenciesResolver getDependenciesResolver() {
return dependenciesResolver;
}
public void setDependenciesResolver(
DependenciesResolver dependenciesResolver) {
this.dependenciesResolver = dependenciesResolver;
}
public TaskScheduler getTaskScheduler() {
return taskScheduler;
}
public void setTaskScheduler(TaskScheduler taskScheduler) {
this.taskScheduler = taskScheduler;
}
public Properties getDispatcherPropertiesBean() {
return dispatcherPropertiesBean;
}
public void setDispatcherPropertiesBean(Properties propertiesBean) {
this.dispatcherPropertiesBean = propertiesBean;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator.scalar;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.primitives.Doubles;
import io.airlift.json.ObjectMapperProvider;
import io.airlift.slice.DynamicSliceOutput;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceOutput;
import io.prestosql.spi.PrestoException;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.function.LiteralParameter;
import io.prestosql.spi.function.LiteralParameters;
import io.prestosql.spi.function.OperatorType;
import io.prestosql.spi.function.ScalarFunction;
import io.prestosql.spi.function.ScalarOperator;
import io.prestosql.spi.function.SqlNullable;
import io.prestosql.spi.function.SqlType;
import io.prestosql.spi.type.SqlDecimal;
import io.prestosql.spi.type.StandardTypes;
import io.prestosql.spi.type.Type;
import io.prestosql.type.JsonPathType;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import static com.fasterxml.jackson.core.JsonFactory.Feature.CANONICALIZE_FIELD_NAMES;
import static com.fasterxml.jackson.core.JsonParser.NumberType;
import static com.fasterxml.jackson.core.JsonToken.END_ARRAY;
import static com.fasterxml.jackson.core.JsonToken.START_ARRAY;
import static com.fasterxml.jackson.core.JsonToken.START_OBJECT;
import static com.fasterxml.jackson.core.JsonToken.VALUE_FALSE;
import static com.fasterxml.jackson.core.JsonToken.VALUE_NUMBER_FLOAT;
import static com.fasterxml.jackson.core.JsonToken.VALUE_NUMBER_INT;
import static com.fasterxml.jackson.core.JsonToken.VALUE_STRING;
import static com.fasterxml.jackson.core.JsonToken.VALUE_TRUE;
import static com.fasterxml.jackson.databind.SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS;
import static io.airlift.slice.Slices.utf8Slice;
import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.prestosql.spi.type.Chars.padSpaces;
import static io.prestosql.util.JsonUtil.createJsonParser;
import static io.prestosql.util.JsonUtil.truncateIfNecessaryForErrorMessage;
import static java.lang.String.format;
public final class JsonFunctions
{
private static final JsonFactory JSON_FACTORY = new JsonFactory()
.disable(CANONICALIZE_FIELD_NAMES);
private static final JsonFactory MAPPING_JSON_FACTORY = new MappingJsonFactory()
.disable(CANONICALIZE_FIELD_NAMES);
private static final ObjectMapper SORTED_MAPPER = new ObjectMapperProvider().get().configure(ORDER_MAP_ENTRIES_BY_KEYS, true);
private JsonFunctions() {}
@ScalarOperator(OperatorType.CAST)
@SqlType(JsonPathType.NAME)
@LiteralParameters("x")
public static JsonPath castVarcharToJsonPath(@SqlType("varchar(x)") Slice pattern)
{
return new JsonPath(pattern.toStringUtf8());
}
@ScalarOperator(OperatorType.CAST)
@LiteralParameters("x")
@SqlType(JsonPathType.NAME)
public static JsonPath castCharToJsonPath(@LiteralParameter("x") Long charLength, @SqlType("char(x)") Slice pattern)
{
return new JsonPath(padSpaces(pattern, charLength.intValue()).toStringUtf8());
}
@ScalarFunction("is_json_scalar")
@LiteralParameters("x")
@SqlType(StandardTypes.BOOLEAN)
public static boolean varcharIsJsonScalar(@SqlType("varchar(x)") Slice json)
{
return isJsonScalar(json);
}
@ScalarFunction
@SqlType(StandardTypes.BOOLEAN)
public static boolean isJsonScalar(@SqlType(StandardTypes.JSON) Slice json)
{
try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) {
JsonToken nextToken = parser.nextToken();
if (nextToken == null) {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid JSON value: " + truncateIfNecessaryForErrorMessage(json));
}
if (nextToken == START_ARRAY || nextToken == START_OBJECT) {
parser.skipChildren();
if (parser.nextToken() != null) {
// extra trailing token after json array/object
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid JSON value: " + truncateIfNecessaryForErrorMessage(json));
}
return false;
}
if (parser.nextToken() != null) {
// extra trailing token after json scalar
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid JSON value: " + truncateIfNecessaryForErrorMessage(json));
}
return true;
}
catch (IOException e) {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid JSON value: " + truncateIfNecessaryForErrorMessage(json));
}
}
@ScalarFunction
@SqlType(StandardTypes.VARCHAR)
public static Slice jsonFormat(@SqlType(StandardTypes.JSON) Slice slice)
{
return slice;
}
@ScalarFunction
@LiteralParameters("x")
@SqlType(StandardTypes.JSON)
public static Slice jsonParse(@SqlType("varchar(x)") Slice slice)
{
// cast(json_parse(x) AS t)` will be optimized into `$internal$json_string_to_array/map/row_cast` in ExpressionOptimizer
// If you make changes to this function (e.g. use parse JSON string into some internal representation),
// make sure `$internal$json_string_to_array/map/row_cast` is changed accordingly.
try (JsonParser parser = createJsonParser(JSON_FACTORY, slice)) {
byte[] in = slice.getBytes();
SliceOutput dynamicSliceOutput = new DynamicSliceOutput(in.length);
SORTED_MAPPER.writeValue((OutputStream) dynamicSliceOutput, SORTED_MAPPER.readValue(parser, Object.class));
// nextToken() returns null if the input is parsed correctly,
// but will throw an exception if there are trailing characters.
parser.nextToken();
return dynamicSliceOutput.slice();
}
catch (Exception e) {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Cannot convert '%s' to JSON", slice.toStringUtf8()));
}
}
@SqlNullable
@ScalarFunction("json_array_length")
@LiteralParameters("x")
@SqlType(StandardTypes.BIGINT)
public static Long varcharJsonArrayLength(@SqlType("varchar(x)") Slice json)
{
return jsonArrayLength(json);
}
@SqlNullable
@ScalarFunction
@SqlType(StandardTypes.BIGINT)
public static Long jsonArrayLength(@SqlType(StandardTypes.JSON) Slice json)
{
try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) {
if (parser.nextToken() != START_ARRAY) {
return null;
}
long length = 0;
while (true) {
JsonToken token = parser.nextToken();
if (token == null) {
return null;
}
if (token == END_ARRAY) {
return length;
}
parser.skipChildren();
length++;
}
}
catch (IOException e) {
return null;
}
}
@SqlNullable
@ScalarFunction("json_array_contains")
@LiteralParameters("x")
@SqlType(StandardTypes.BOOLEAN)
public static Boolean varcharJsonArrayContains(@SqlType("varchar(x)") Slice json, @SqlType(StandardTypes.BOOLEAN) boolean value)
{
return jsonArrayContains(json, value);
}
@SqlNullable
@ScalarFunction
@SqlType(StandardTypes.BOOLEAN)
public static Boolean jsonArrayContains(@SqlType(StandardTypes.JSON) Slice json, @SqlType(StandardTypes.BOOLEAN) boolean value)
{
try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) {
if (parser.nextToken() != START_ARRAY) {
return null;
}
while (true) {
JsonToken token = parser.nextToken();
if (token == null) {
return null;
}
if (token == END_ARRAY) {
return false;
}
parser.skipChildren();
if (((token == VALUE_TRUE) && value) ||
((token == VALUE_FALSE) && (!value))) {
return true;
}
}
}
catch (IOException e) {
return null;
}
}
@SqlNullable
@ScalarFunction("json_array_contains")
@LiteralParameters("x")
@SqlType(StandardTypes.BOOLEAN)
public static Boolean varcharJsonArrayContains(@SqlType("varchar(x)") Slice json, @SqlType(StandardTypes.BIGINT) long value)
{
return jsonArrayContains(json, value);
}
@SqlNullable
@ScalarFunction
@SqlType(StandardTypes.BOOLEAN)
public static Boolean jsonArrayContains(@SqlType(StandardTypes.JSON) Slice json, @SqlType(StandardTypes.BIGINT) long value)
{
try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) {
if (parser.nextToken() != START_ARRAY) {
return null;
}
while (true) {
JsonToken token = parser.nextToken();
if (token == null) {
return null;
}
if (token == END_ARRAY) {
return false;
}
parser.skipChildren();
if ((token == VALUE_NUMBER_INT) &&
((parser.getNumberType() == NumberType.INT) || (parser.getNumberType() == NumberType.LONG)) &&
(parser.getLongValue() == value)) {
return true;
}
}
}
catch (IOException e) {
return null;
}
}
@SqlNullable
@ScalarFunction("json_array_contains")
@LiteralParameters("x")
@SqlType(StandardTypes.BOOLEAN)
public static Boolean varcharJsonArrayContains(@SqlType("varchar(x)") Slice json, @SqlType(StandardTypes.DOUBLE) double value)
{
return jsonArrayContains(json, value);
}
@SqlNullable
@ScalarFunction
@SqlType(StandardTypes.BOOLEAN)
public static Boolean jsonArrayContains(@SqlType(StandardTypes.JSON) Slice json, @SqlType(StandardTypes.DOUBLE) double value)
{
if (!Doubles.isFinite(value)) {
return false;
}
try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) {
if (parser.nextToken() != START_ARRAY) {
return null;
}
while (true) {
JsonToken token = parser.nextToken();
if (token == null) {
return null;
}
if (token == END_ARRAY) {
return false;
}
parser.skipChildren();
// noinspection FloatingPointEquality
if ((token == VALUE_NUMBER_FLOAT) && (parser.getDoubleValue() == value) &&
(Doubles.isFinite(parser.getDoubleValue()))) {
return true;
}
}
}
catch (IOException e) {
return null;
}
}
@SqlNullable
@ScalarFunction("json_array_contains")
@LiteralParameters({"x", "y"})
@SqlType(StandardTypes.BOOLEAN)
public static Boolean varcharJsonArrayContains(@SqlType("varchar(x)") Slice json, @SqlType("varchar(y)") Slice value)
{
return jsonArrayContains(json, value);
}
@SqlNullable
@ScalarFunction
@LiteralParameters("x")
@SqlType(StandardTypes.BOOLEAN)
public static Boolean jsonArrayContains(@SqlType(StandardTypes.JSON) Slice json, @SqlType("varchar(x)") Slice value)
{
String valueString = value.toStringUtf8();
try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) {
if (parser.nextToken() != START_ARRAY) {
return null;
}
while (true) {
JsonToken token = parser.nextToken();
if (token == null) {
return null;
}
if (token == END_ARRAY) {
return false;
}
parser.skipChildren();
if (token == VALUE_STRING && valueString.equals(parser.getValueAsString())) {
return true;
}
}
}
catch (IOException e) {
return null;
}
}
@SqlNullable
@ScalarFunction("json_array_get")
@LiteralParameters("x")
@SqlType(StandardTypes.JSON)
public static Slice varcharJsonArrayGet(@SqlType("varchar(x)") Slice json, @SqlType(StandardTypes.BIGINT) long index)
{
return jsonArrayGet(json, index);
}
@SqlNullable
@ScalarFunction
@SqlType(StandardTypes.JSON)
public static Slice jsonArrayGet(@SqlType(StandardTypes.JSON) Slice json, @SqlType(StandardTypes.BIGINT) long index)
{
// this value cannot be converted to positive number
if (index == Long.MIN_VALUE) {
return null;
}
try (JsonParser parser = createJsonParser(MAPPING_JSON_FACTORY, json)) {
if (parser.nextToken() != START_ARRAY) {
return null;
}
List<String> tokens = null;
if (index < 0) {
tokens = new LinkedList<>();
}
long count = 0;
while (true) {
JsonToken token = parser.nextToken();
if (token == null) {
return null;
}
if (token == END_ARRAY) {
if (tokens != null && count >= index * -1) {
return utf8Slice(tokens.get(0));
}
return null;
}
String arrayElement;
if (token == START_OBJECT || token == START_ARRAY) {
arrayElement = parser.readValueAsTree().toString();
}
else {
arrayElement = parser.getValueAsString();
}
if (count == index) {
return arrayElement == null ? null : utf8Slice(arrayElement);
}
if (tokens != null) {
tokens.add(arrayElement);
if (count >= index * -1) {
tokens.remove(0);
}
}
count++;
}
}
catch (IOException e) {
return null;
}
}
@ScalarFunction("json_extract_scalar")
@SqlNullable
@LiteralParameters("x")
@SqlType("varchar(x)")
public static Slice varcharJsonExtractScalar(@SqlType("varchar(x)") Slice json, @SqlType(JsonPathType.NAME) JsonPath jsonPath)
{
return JsonExtract.extract(json, jsonPath.getScalarExtractor());
}
@ScalarFunction
@SqlNullable
@SqlType(StandardTypes.VARCHAR)
public static Slice jsonExtractScalar(@SqlType(StandardTypes.JSON) Slice json, @SqlType(JsonPathType.NAME) JsonPath jsonPath)
{
return JsonExtract.extract(json, jsonPath.getScalarExtractor());
}
@ScalarFunction("json_extract")
@LiteralParameters("x")
@SqlNullable
@SqlType(StandardTypes.JSON)
public static Slice varcharJsonExtract(@SqlType("varchar(x)") Slice json, @SqlType(JsonPathType.NAME) JsonPath jsonPath)
{
return JsonExtract.extract(json, jsonPath.getObjectExtractor());
}
@ScalarFunction
@SqlNullable
@SqlType(StandardTypes.JSON)
public static Slice jsonExtract(@SqlType(StandardTypes.JSON) Slice json, @SqlType(JsonPathType.NAME) JsonPath jsonPath)
{
return JsonExtract.extract(json, jsonPath.getObjectExtractor());
}
@ScalarFunction("json_size")
@LiteralParameters("x")
@SqlNullable
@SqlType(StandardTypes.BIGINT)
public static Long varcharJsonSize(@SqlType("varchar(x)") Slice json, @SqlType(JsonPathType.NAME) JsonPath jsonPath)
{
return JsonExtract.extract(json, jsonPath.getSizeExtractor());
}
@ScalarFunction
@SqlNullable
@SqlType(StandardTypes.BIGINT)
public static Long jsonSize(@SqlType(StandardTypes.JSON) Slice json, @SqlType(JsonPathType.NAME) JsonPath jsonPath)
{
return JsonExtract.extract(json, jsonPath.getSizeExtractor());
}
public static Object getJsonObjectValue(Type valueType, ConnectorSession session, Block block, int position)
{
Object objectValue = valueType.getObjectValue(session, block, position);
if (objectValue instanceof SqlDecimal) {
objectValue = ((SqlDecimal) objectValue).toBigDecimal();
}
return objectValue;
}
}
| |
package jiraiyah.librarian.infrastructure;
import com.google.common.collect.Maps;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.util.AttributeKey;
import jiraiyah.librarian.interfaces.IDataInput;
import jiraiyah.librarian.interfaces.IDataOutput;
import net.minecraft.client.Minecraft;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.INetHandler;
import net.minecraft.network.NetHandlerPlayServer;
import net.minecraft.network.Packet;
import net.minecraft.network.PacketBuffer;
import net.minecraft.network.play.INetHandlerPlayClient;
import net.minecraft.network.play.INetHandlerPlayServer;
import net.minecraft.server.management.PlayerManager;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraft.world.WorldServer;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.ModContainer;
import net.minecraftforge.fml.common.network.*;
import net.minecraftforge.fml.common.network.handshake.NetworkDispatcher;
import net.minecraftforge.fml.common.network.internal.FMLProxyPacket;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import net.minecraftforge.fml.server.FMLServerHandler;
import org.apache.commons.lang3.ArrayUtils;
import java.util.EnumMap;
import java.util.List;
import java.util.zip.Deflater;
import java.util.zip.Inflater;
public class PacketCustom implements IDataInput, IDataOutput
{
public static interface ICustomPacketHandler
{
}
public interface IClientPacketHandler extends ICustomPacketHandler
{
public void handlePacket(PacketCustom packetCustom, Minecraft mc, INetHandlerPlayClient handler);
}
public interface IServerPacketHandler extends ICustomPacketHandler
{
public void handlePacket(PacketCustom packetCustom, EntityPlayerMP sender, INetHandlerPlayServer handler);
}
public static AttributeKey<CustomInboundHandler> cclHandler = new AttributeKey<CustomInboundHandler>("ccl:handler");
@ChannelHandler.Sharable
public static class CustomInboundHandler extends SimpleChannelInboundHandler<FMLProxyPacket>
{
public EnumMap<Side, CustomHandler> handlers = Maps.newEnumMap(Side.class);
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
super.handlerAdded(ctx);
ctx.channel().attr(cclHandler).set(this);
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, FMLProxyPacket msg) throws Exception {
handlers.get(ctx.channel().attr(NetworkRegistry.CHANNEL_SOURCE).get())
.handle(ctx.channel().attr(NetworkRegistry.NET_HANDLER).get(),
ctx.channel().attr(NetworkRegistry.FML_CHANNEL).get(),
new PacketCustom(msg.payload()));
}
}
private static interface CustomHandler
{
public void handle(INetHandler handler, String channel, PacketCustom packet) throws Exception;
}
public static class ClientInboundHandler implements CustomHandler
{
private IClientPacketHandler handler;
public ClientInboundHandler(ICustomPacketHandler handler) {
this.handler = (IClientPacketHandler) handler;
}
@Override
public void handle(INetHandler netHandler, String channel, PacketCustom packet) throws Exception {
if (netHandler instanceof INetHandlerPlayClient)
handler.handlePacket(packet, Minecraft.getMinecraft(), (INetHandlerPlayClient) netHandler);
else
System.err.println("Invalid INetHandler for PacketCustom on channel: " + channel);
}
}
public static class ServerInboundHandler implements CustomHandler
{
private IServerPacketHandler handler;
public ServerInboundHandler(ICustomPacketHandler handler) {
this.handler = (IServerPacketHandler) handler;
}
@Override
public void handle(INetHandler netHandler, String channel, PacketCustom packet) throws Exception {
if (netHandler instanceof NetHandlerPlayServer)
handler.handlePacket(packet, ((NetHandlerPlayServer) netHandler).playerEntity, (INetHandlerPlayServer) netHandler);
else
System.err.println("Invalid INetHandler for PacketCustom on channel: " + channel);
}
}
public static interface IHandshakeHandler
{
public void handshakeRecieved(NetHandlerPlayServer netHandler);
}
public static class HandshakeInboundHandler extends ChannelInboundHandlerAdapter
{
public IHandshakeHandler handler;
public HandshakeInboundHandler(IHandshakeHandler handler) {
this.handler = handler;
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof NetworkHandshakeEstablished) {
INetHandler netHandler = ((NetworkDispatcher) ctx.channel().attr(FMLOutboundHandler.FML_MESSAGETARGETARGS).get()).getNetHandler();
if (netHandler instanceof NetHandlerPlayServer)
handler.handshakeRecieved((NetHandlerPlayServer) netHandler);
} else
ctx.fireUserEventTriggered(evt);
}
}
public static String channelName(Object channelKey)
{
if (channelKey instanceof String)
return (String) channelKey;
if (channelKey instanceof ModContainer) {
String s = ((ModContainer) channelKey).getModId();
if(s.length() > 20)
throw new IllegalArgumentException("Mod ID ("+s+") too long for use as channel (20 chars). Use a string identifier");
return s;
}
ModContainer mc = FMLCommonHandler.instance().findContainerFor(channelKey);
if (mc != null)
return mc.getModId();
throw new IllegalArgumentException("Invalid channel: " + channelKey);
}
public static FMLEmbeddedChannel getOrCreateChannel(String channelName, Side side)
{
if (!NetworkRegistry.INSTANCE.hasChannel(channelName, side))
NetworkRegistry.INSTANCE.newChannel(channelName, new CustomInboundHandler());
return NetworkRegistry.INSTANCE.getChannel(channelName, side);
}
public static void assignHandler(Object channelKey, ICustomPacketHandler handler)
{
String channelName = channelName(channelKey);
Side side = handler instanceof IServerPacketHandler ? Side.SERVER : Side.CLIENT;
FMLEmbeddedChannel channel = getOrCreateChannel(channelName, side);
channel.attr(cclHandler).get().handlers.put(side, side == Side.SERVER ? new ServerInboundHandler(handler) : new ClientInboundHandler(handler));
}
public static void assignHandshakeHandler(Object channelKey, IHandshakeHandler handler)
{
FMLEmbeddedChannel channel = getOrCreateChannel(channelName(channelKey), Side.SERVER);
channel.pipeline().addLast(new HandshakeInboundHandler(handler));
}
private ByteBuf byteBuf;
private String channel;
private int type;
public PacketCustom(ByteBuf payload) {
byteBuf = payload;
type = byteBuf.readUnsignedByte();
if (type > 0x80)
decompress();
type &= 0x7F;
}
public PacketCustom(Object channelKey, int type) {
if (type <= 0 || type >= 0x80)
throw new IllegalArgumentException("Packet type: " + type + " is not within required 0 < t < 0x80");
this.channel = channelName(channelKey);
this.type = type;
byteBuf = Unpooled.buffer();
byteBuf.writeByte(type);
}
private void decompress() {
Inflater inflater = new Inflater();
try {
int len = byteBuf.readInt();
ByteBuf out = Unpooled.buffer(len);
inflater.setInput(byteBuf.array(), byteBuf.readerIndex(), byteBuf.readableBytes());
inflater.inflate(out.array());
out.writerIndex(len);
byteBuf = out;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
inflater.end();
}
}
private void do_compress() {
Deflater deflater = new Deflater();
try {
byteBuf.readerIndex(1);
int len = byteBuf.readableBytes();
deflater.setInput(byteBuf.array(), byteBuf.readerIndex(), len);
deflater.finish();
ByteBuf out = Unpooled.buffer(len + 5);
int clen = deflater.deflate(out.array(), 5, len);
if (clen >= len - 5 || !deflater.finished())//not worth compressing, gets larger
return;
out.setByte(0, type | 0x80);
out.setInt(1, len);
out.writerIndex(clen + 5);
byteBuf = out;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
byteBuf.readerIndex(0);
deflater.end();
}
}
public boolean incoming()
{
return channel == null;
}
public int getType()
{
return type & 0x7F;
}
public ByteBuf getByteBuf()
{
return byteBuf;
}
public PacketCustom compress() {
if (incoming())
throw new IllegalStateException("Tried to compress an incoming packet");
if ((type & 0x80) != 0)
throw new IllegalStateException("Packet already compressed");
type |= 0x80;
return this;
}
public PacketCustom writeBoolean(boolean b)
{
byteBuf.writeBoolean(b);
return this;
}
public PacketCustom writeByte(int b)
{
byteBuf.writeByte(b);
return this;
}
public PacketCustom writeShort(int s)
{
byteBuf.writeShort(s);
return this;
}
public PacketCustom writeInt(int i)
{
byteBuf.writeInt(i);
return this;
}
public PacketCustom writeFloat(float f) {
byteBuf.writeFloat(f);
return this;
}
public PacketCustom writeDouble(double d) {
byteBuf.writeDouble(d);
return this;
}
public PacketCustom writeLong(long l) {
byteBuf.writeLong(l);
return this;
}
@Override
public PacketCustom writeChar(char c) {
byteBuf.writeChar(c);
return this;
}
public PacketCustom writeVarInt(int i) {
ByteBufUtils.writeVarInt(byteBuf, i, 5);
return this;
}
public PacketCustom writeVarShort(int s) {
ByteBufUtils.writeVarShort(byteBuf, s);
return this;
}
public PacketCustom writeByteArray(byte[] barray) {
byteBuf.writeBytes(barray);
return this;
}
public PacketCustom writeString(String s) {
ByteBufUtils.writeUTF8String(byteBuf, s);
return this;
}
public PacketCustom writeCoord(int x, int y, int z) {
writeInt(x);
writeInt(y);
writeInt(z);
return this;
}
public PacketCustom writeCoord(BlockPos coord) {
writeLong(coord.toLong());
return this;
}
public PacketCustom writeItemStack(ItemStack stack) {
writeItemStack(stack, false);
return this;
}
public PacketCustom writeItemStack(ItemStack stack, boolean large) {
if (stack == null) {
writeShort(-1);
} else {
writeShort(Item.getIdFromItem(stack.getItem()));
if (large)
writeInt(stack.stackSize);
else
writeByte(stack.stackSize);
writeShort(stack.getItemDamage());
writeNBTTagCompound(stack.getTagCompound());//.stackTagCompound);
}
return this;
}
public PacketCustom writeNBTTagCompound(NBTTagCompound compound) {
ByteBufUtils.writeTag(byteBuf, compound);
return this;
}
public boolean readBoolean() {
return byteBuf.readBoolean();
}
public short readUByte() {
return byteBuf.readUnsignedByte();
}
public int readUShort() {
return byteBuf.readUnsignedShort();
}
public byte readByte() {
return byteBuf.readByte();
}
public short readShort() {
return byteBuf.readShort();
}
public int readInt() {
return byteBuf.readInt();
}
public float readFloat() {
return byteBuf.readFloat();
}
public double readDouble() {
return byteBuf.readDouble();
}
public long readLong() {
return byteBuf.readLong();
}
public char readChar() {
return byteBuf.readChar();
}
@Override
public int readVarShort() {
return ByteBufUtils.readVarShort(byteBuf);
}
@Override
public int readVarInt() {
return ByteBufUtils.readVarInt(byteBuf, 5);
}
public BlockPos readCoord() {
return new BlockPos(readInt(), readInt(), readInt());
}
public byte[] readByteArray(int length) {
byte[] barray = new byte[length];
byteBuf.readBytes(barray, 0, length);
return barray;
}
public String readString() {
return ByteBufUtils.readUTF8String(byteBuf);
}
public ItemStack readItemStack() {
return readItemStack(false);
}
public ItemStack readItemStack(boolean large) {
ItemStack item = null;
short itemID = readShort();
if (itemID >= 0) {
int stackSize = large ? readInt() : readByte();
short damage = readShort();
item = new ItemStack(Item.getItemById(itemID), stackSize, damage);
item.setTagCompound(readNBTTagCompound());
}
return item;
}
public NBTTagCompound readNBTTagCompound() {
return ByteBufUtils.readTag(byteBuf);
}
public FMLProxyPacket toPacket() {
if (incoming())
throw new IllegalStateException("Tried to write an incoming packet");
if (byteBuf.readableBytes() > 32000 || (type & 0x80) != 0)
do_compress();
//FML packet impl returns the whole of the backing array, copy used portion of array to another ByteBuf
return new FMLProxyPacket(new PacketBuffer(byteBuf.copy()), channel);
}
public void sendToPlayer(EntityPlayer player) {
sendToPlayer(toPacket(), player);
}
public static void sendToPlayer(Packet packet, EntityPlayer player) {
if (player == null)
sendToClients(packet);
else
((EntityPlayerMP) player).playerNetServerHandler.sendPacket(packet);
}
public void sendToClients() {
sendToClients(toPacket());
}
public static void sendToClients(Packet packet) {
FMLServerHandler.instance().getServer().getPlayerList().sendPacketToAllPlayers(packet);
}
public void sendPacketToAllAround(double x, double y, double z, double range, int dim) {
sendToAllAround(toPacket(), x, y, z, range, dim);
}
public static void sendToAllAround(Packet packet, double x, double y, double z, double range, int dim) {
FMLServerHandler.instance().getServer().getPlayerList().sendToAllNearExcept(null, x, y, z, range, dim, packet);//.sendToAllNear();
}
public void sendToDimension(int dim) {
sendToDimension(toPacket(), dim);
}
public static void sendToDimension(Packet packet, int dim) {
FMLServerHandler.instance().getServer().getPlayerList().sendPacketToAllPlayersInDimension(packet, dim);
}
public void sendToChunk(World world, int chunkX, int chunkZ) {
sendToChunk(toPacket(), world, chunkX, chunkZ);
}
public static void sendToChunk(Packet packet, World world, int chunkX, int chunkZ) {
PlayerManager playerManager = ((WorldServer)world).getPlayerChunkMap();
for (EntityPlayerMP player : (List<EntityPlayerMP>) FMLServerHandler.instance().getServer().getPlayerList().getPlayerList())
if(playerManager.isPlayerWatchingChunk(player, chunkX, chunkZ))
sendToPlayer(packet, player);
/* Commented until forge accepts access tranformer request
PlayerInstance p = ((WorldServer) world).getPlayerManager().getOrCreateChunkWatcher(chunkX, chunkZ, false);
if (p != null)
p.sendToAllPlayersWatchingChunk(packet);*/
}
public void sendToOps() {
sendToOps(toPacket());
}
public static void sendToOps(Packet packet) {
for (EntityPlayerMP player : (List<EntityPlayerMP>) FMLServerHandler.instance().getServer().getPlayerList().getPlayerList())
if (ArrayUtils.contains(FMLServerHandler.instance().getServer().getPlayerList().getOppedPlayers().getKeys(), player.getName()))
sendToPlayer(packet, player);
}
@SideOnly(Side.CLIENT)
public void sendToServer() {
sendToServer(toPacket());
}
@SideOnly(Side.CLIENT)
public static void sendToServer(Packet packet) {
Minecraft.getMinecraft().getNetHandler().addToSendQueue(packet);
}
}
| |
/**
* Copyright 2011-2016 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.bridge.api;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.io.IOException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExternalResource;
import com.asakusafw.bridge.broker.ResourceBroker;
import com.asakusafw.bridge.broker.ResourceBrokerContext;
import com.asakusafw.runtime.core.Report.Delegate;
import com.asakusafw.runtime.core.Report.FailedException;
import com.asakusafw.runtime.core.Report.Level;
import com.asakusafw.runtime.core.ResourceConfiguration;
/**
* Test for {@link Report}.
*/
public class ReportTest {
/**
* setup/cleanup the test case.
*/
@Rule
public final ResourceBrokerContext brokerContext = new ResourceBrokerContext(true);
/**
* initializes tracer.
*/
@Rule
public final ExternalResource initializer = new ExternalResource() {
@Override
protected void before() throws Throwable {
reset();
}
@Override
protected void after() {
reset();
}
private void reset() {
Tracer.lastLevel = null;
Tracer.lastMessage = null;
Tracer.lastThrowable = null;
}
};
private void configure(Class<? extends Delegate> aClass) {
ResourceConfiguration conf = new MapConfiguration();
conf.set(ReportAdapter.CLASS_DELEGATE, aClass.getName());
ResourceBroker.put(ResourceConfiguration.class, conf);
}
/**
* info.
*/
@Test
public void trace_info() {
configure(Tracer.class);
Report.info("testing");
assertThat(Tracer.lastLevel, is(Level.INFO));
assertThat(Tracer.lastMessage, is("testing"));
assertThat(Tracer.lastThrowable, is(nullValue()));
}
/**
* info w/ exception.
*/
@Test
public void trace_info_exception() {
configure(Tracer.class);
Report.info("testing", new UnsupportedOperationException());
assertThat(Tracer.lastLevel, is(Level.INFO));
assertThat(Tracer.lastMessage, is("testing"));
assertThat(Tracer.lastThrowable, is(instanceOf(UnsupportedOperationException.class)));
}
/**
* warn.
*/
@Test
public void trace_warn() {
configure(Tracer.class);
Report.warn("testing");
assertThat(Tracer.lastLevel, is(Level.WARN));
assertThat(Tracer.lastMessage, is("testing"));
assertThat(Tracer.lastThrowable, is(nullValue()));
}
/**
* warn w/ exception.
*/
@Test
public void trace_warn_exception() {
configure(Tracer.class);
Report.warn("testing", new UnsupportedOperationException());
assertThat(Tracer.lastLevel, is(Level.WARN));
assertThat(Tracer.lastMessage, is("testing"));
assertThat(Tracer.lastThrowable, is(instanceOf(UnsupportedOperationException.class)));
}
/**
* error.
*/
@Test
public void trace_error() {
configure(Tracer.class);
Report.error("testing");
assertThat(Tracer.lastLevel, is(Level.ERROR));
assertThat(Tracer.lastMessage, is("testing"));
assertThat(Tracer.lastThrowable, is(nullValue()));
}
/**
* error w/ exception.
*/
@Test
public void trace_error_exception() {
configure(Tracer.class);
Report.error("testing", new UnsupportedOperationException());
assertThat(Tracer.lastLevel, is(Level.ERROR));
assertThat(Tracer.lastMessage, is("testing"));
assertThat(Tracer.lastThrowable, is(instanceOf(UnsupportedOperationException.class)));
}
/**
* info.
*/
@Test(expected = FailedException.class)
public void fail_info() {
configure(Raiser.class);
Report.info("testing");
}
/**
* info w/ exception.
*/
@Test(expected = FailedException.class)
public void fail_info_exception() {
configure(Raiser.class);
Report.info("testing", new UnsupportedOperationException());
}
/**
* warn.
*/
@Test(expected = FailedException.class)
public void fail_warn() {
configure(Raiser.class);
Report.warn("testing");
}
/**
* warn w/ exception.
*/
@Test(expected = FailedException.class)
public void fail_warn_exception() {
configure(Raiser.class);
Report.warn("testing", new UnsupportedOperationException());
}
/**
* error.
*/
@Test(expected = FailedException.class)
public void fail_error() {
configure(Raiser.class);
Report.error("testing");
}
/**
* error w/ exception.
*/
@Test(expected = FailedException.class)
public void fail_error_exception() {
configure(Raiser.class);
Report.error("testing", new UnsupportedOperationException());
}
/**
* Traces report API.
*/
public static final class Tracer extends Delegate {
static Level lastLevel;
static String lastMessage;
static Throwable lastThrowable;
@Override
public void report(Level level, String message) throws IOException {
report(level, message, null);
}
@Override
public void report(Level level, String message, Throwable throwable) throws IOException {
lastLevel = level;
lastMessage = message;
lastThrowable = throwable;
}
}
/**
* Raises error.
*/
public static final class Raiser extends Delegate {
@Override
public void report(Level level, String message) throws IOException {
report(level, message, null);
}
@Override
public void report(Level level, String message, Throwable throwable) throws IOException {
throw new IOException(message, throwable);
}
}
}
| |
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.text.method;
import android.graphics.Rect;
import android.text.Editable;
import android.text.GetChars;
import android.text.Spannable;
import android.text.Spanned;
import android.text.SpannedString;
import android.text.TextUtils;
import android.view.View;
/**
* This transformation method causes the characters in the {@link #getOriginal}
* array to be replaced by the corresponding characters in the
* {@link #getReplacement} array.
*/
public abstract class ReplacementTransformationMethod
implements TransformationMethod
{
/**
* Returns the list of characters that are to be replaced by other
* characters when displayed.
*/
protected abstract char[] getOriginal();
/**
* Returns a parallel array of replacement characters for the ones
* that are to be replaced.
*/
protected abstract char[] getReplacement();
/**
* Returns a CharSequence that will mirror the contents of the
* source CharSequence but with the characters in {@link #getOriginal}
* replaced by ones from {@link #getReplacement}.
*/
public CharSequence getTransformation(CharSequence source, View v) {
char[] original = getOriginal();
char[] replacement = getReplacement();
/*
* Short circuit for faster display if the text will never change.
*/
if (!(source instanceof Editable)) {
/*
* Check whether the text does not contain any of the
* source characters so can be used unchanged.
*/
boolean doNothing = true;
int n = original.length;
for (int i = 0; i < n; i++) {
if (TextUtils.indexOf(source, original[i]) >= 0) {
doNothing = false;
break;
}
}
if (doNothing) {
return source;
}
if (!(source instanceof Spannable)) {
/*
* The text contains some of the source characters,
* but they can be flattened out now instead of
* at display time.
*/
if (source instanceof Spanned) {
return new SpannedString(new SpannedReplacementCharSequence(
(Spanned) source,
original, replacement));
} else {
return new ReplacementCharSequence(source,
original,
replacement).toString();
}
}
}
if (source instanceof Spanned) {
return new SpannedReplacementCharSequence((Spanned) source,
original, replacement);
} else {
return new ReplacementCharSequence(source, original, replacement);
}
}
public void onFocusChanged(View view, CharSequence sourceText,
boolean focused, int direction,
Rect previouslyFocusedRect) {
// This callback isn't used.
}
private static class ReplacementCharSequence
implements CharSequence, GetChars {
private char[] mOriginal, mReplacement;
public ReplacementCharSequence(CharSequence source, char[] original,
char[] replacement) {
mSource = source;
mOriginal = original;
mReplacement = replacement;
}
public int length() {
return mSource.length();
}
public char charAt(int i) {
char c = mSource.charAt(i);
int n = mOriginal.length;
for (int j = 0; j < n; j++) {
if (c == mOriginal[j]) {
c = mReplacement[j];
}
}
return c;
}
public CharSequence subSequence(int start, int end) {
char[] c = new char[end - start];
getChars(start, end, c, 0);
return new String(c);
}
public String toString() {
char[] c = new char[length()];
getChars(0, length(), c, 0);
return new String(c);
}
public void getChars(int start, int end, char[] dest, int off) {
TextUtils.getChars(mSource, start, end, dest, off);
int offend = end - start + off;
int n = mOriginal.length;
for (int i = off; i < offend; i++) {
char c = dest[i];
for (int j = 0; j < n; j++) {
if (c == mOriginal[j]) {
dest[i] = mReplacement[j];
}
}
}
}
private CharSequence mSource;
}
private static class SpannedReplacementCharSequence
extends ReplacementCharSequence
implements Spanned
{
public SpannedReplacementCharSequence(Spanned source, char[] original,
char[] replacement) {
super(source, original, replacement);
mSpanned = source;
}
public CharSequence subSequence(int start, int end) {
return new SpannedString(this).subSequence(start, end);
}
public <T> T[] getSpans(int start, int end, Class<T> type) {
return mSpanned.getSpans(start, end, type);
}
public int getSpanStart(Object tag) {
return mSpanned.getSpanStart(tag);
}
public int getSpanEnd(Object tag) {
return mSpanned.getSpanEnd(tag);
}
public int getSpanFlags(Object tag) {
return mSpanned.getSpanFlags(tag);
}
public int nextSpanTransition(int start, int end, Class type) {
return mSpanned.nextSpanTransition(start, end, type);
}
private Spanned mSpanned;
}
}
| |
/**
* $Revision $
* $Date $
*
* Copyright (C) 2005-2010 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.community.util;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jivesoftware.util.*;
public class DateUtils
{
private static final Logger log = LoggerFactory.getLogger(DateUtils.class);
public static final String DEFAULT_DATE_PATTERN = "MM/dd/yyyy";
private TimeZone timeZone;
private FastDateFormat dateFormat;
private FastDateFormat shortDateFormat;
private FastDateFormat mediumDateFormat;
private FastDateFormat longDateFormat;
private FastDateFormat fullDateFormat;
private FastDateFormat timeFormat;
private FastDateFormat shortTimeFormat;
private FastDateFormat mediumTimeFormat;
private FastDateFormat longTimeFormat;
private FastDateFormat fullTimeFormat;
private Locale locale;
public static final long DURATION_DAY_S = 0x4f1a00L;
public DateUtils(HttpServletRequest request)
{
locale = JiveGlobals.getLocale();
timeZone = JiveGlobals.getTimeZone();
}
public DateUtils(Locale locale, TimeZone timeZone)
{
this.locale = locale;
this.timeZone = timeZone;
}
public DateUtils()
{
locale = JiveGlobals.getLocale();
timeZone = JiveGlobals.getTimeZone();
}
public TimeZone getTimeZone()
{
return timeZone;
}
public String formatDate(Date date)
{
if(date == null)
return "";
else
return getDateFormat().format(date);
}
public String getFullFormatDate()
{
return getFullFormatDate(new Date());
}
public String getFullFormatDate(Date date)
{
if(date == null)
return "";
else
return getFullDateFormat().format(date);
}
public String getLongFormatDate()
{
return getLongFormatDate(new Date());
}
public String getLongFormatDate(Date date)
{
if(date == null)
return "";
else
return getLongDateFormat().format(date);
}
public String getMediumFormatDate()
{
return getMediumFormatDate(new Date());
}
public String getMediumFormatDate(Date date)
{
if(date == null)
return "";
else
return getMediumDateFormat().format(date);
}
public String getShortFormatDate()
{
return getShortFormatDate(new Date());
}
public String getShortFormatDate(Date date)
{
if(date == null)
return "";
else
return getShortDateFormat().format(date);
}
public static String getDatePattern()
{
return JiveGlobals.getProperty("date.defaultPattern", "MM/dd/yyyy");
}
public FastDateFormat getDateFormat()
{
if(dateFormat == null)
dateFormat = FastDateFormat.getDateTimeInstance(FastDateFormat.MEDIUM, FastDateFormat.SHORT, getTimeZone(), getLocale());
return dateFormat;
}
public FastDateFormat getFullDateFormat()
{
if(fullDateFormat == null)
fullDateFormat = FastDateFormat.getDateInstance(FastDateFormat.FULL, getTimeZone(), getLocale());
return fullDateFormat;
}
public FastDateFormat getLongDateFormat()
{
if(longDateFormat == null)
longDateFormat = FastDateFormat.getDateInstance(FastDateFormat.LONG, getTimeZone(), getLocale());
return longDateFormat;
}
public FastDateFormat getMediumDateFormat()
{
if(mediumDateFormat == null)
mediumDateFormat = FastDateFormat.getDateInstance(FastDateFormat.MEDIUM, getTimeZone(), getLocale());
return mediumDateFormat;
}
public FastDateFormat getShortDateFormat()
{
if(shortDateFormat == null)
shortDateFormat = FastDateFormat.getDateInstance(FastDateFormat.SHORT, getTimeZone(), getLocale());
return shortDateFormat;
}
public FastDateFormat getTimeFormat()
{
if(timeFormat == null)
timeFormat = FastDateFormat.getTimeInstance(FastDateFormat.MEDIUM, getTimeZone(), getLocale());
return timeFormat;
}
public FastDateFormat getFullTimeFormat()
{
if(fullTimeFormat == null)
fullTimeFormat = FastDateFormat.getTimeInstance(FastDateFormat.FULL, getTimeZone(), getLocale());
return fullTimeFormat;
}
public FastDateFormat getLongTimeFormat()
{
if(longTimeFormat == null)
longTimeFormat = FastDateFormat.getTimeInstance(FastDateFormat.LONG, getTimeZone(), getLocale());
return longTimeFormat;
}
public FastDateFormat getMediumTimeFormat()
{
if(mediumTimeFormat == null)
mediumTimeFormat = FastDateFormat.getTimeInstance(FastDateFormat.MEDIUM, getTimeZone(), getLocale());
return mediumTimeFormat;
}
public FastDateFormat getShortTimeFormat()
{
if(shortTimeFormat == null)
shortTimeFormat = FastDateFormat.getTimeInstance(FastDateFormat.SHORT, getTimeZone(), getLocale());
return shortTimeFormat;
}
public String getFullFormatTime(Date date)
{
if(date == null)
return "";
else
return getFullTimeFormat().format(date);
}
public String getLongFormatTime(Date date)
{
if(date == null)
return "";
else
return getLongTimeFormat().format(date);
}
public String getMediumFormatTime(Date date)
{
if(date == null)
return "";
else
return getMediumTimeFormat().format(date);
}
public String getShortFormatTime(Date date)
{
if(date == null)
return "";
else
return getShortTimeFormat().format(date);
}
public Date toUserStartOfDay(Date date)
{
Calendar calendar = Calendar.getInstance(getTimeZone());
calendar.setTime(date);
toStartOfDay(calendar);
return calendar.getTime();
}
public Date toUserEndOfDay(Date date)
{
Calendar calendar = Calendar.getInstance(getTimeZone());
calendar.setTime(date);
toEndOfDay(calendar);
return calendar.getTime();
}
private Locale getLocale()
{
return locale;
}
public static Date roundDate(Date date, int seconds)
{
return new Date(roundDate(date.getTime(), seconds));
}
public static long roundDate(long date, int seconds)
{
return date - date % (long)(1000 * seconds);
}
public static Date getMaxDate()
{
return new Date(0x38d7ea4c67fffL);
}
public static Date daysLater(int days, Calendar from)
{
from.add(6, days);
return from.getTime();
}
public static Date daysLater(int days, long from)
{
Calendar date = Calendar.getInstance();
date.setTimeInMillis(from);
return daysLater(days, date);
}
public static Date daysBefore(int days, long from)
{
Calendar date = Calendar.getInstance();
date.setTimeInMillis(from);
return daysLater(-1 * days, date);
}
public static Date daysBefore(int days, Calendar from)
{
from.add(6, -1 * days);
return from.getTime();
}
public static Date daysBefore(int days)
{
return daysBefore(days, System.currentTimeMillis());
}
public static Date daysLater(int days)
{
return daysLater(days, System.currentTimeMillis());
}
public static Date today()
{
Calendar date = Calendar.getInstance();
toMidnight(date);
return date.getTime();
}
public static Date now()
{
return Calendar.getInstance().getTime();
}
private static void toMidnight(Calendar date)
{
date.set(11, 0);
date.set(12, 0);
date.set(13, 0);
date.set(14, 0);
}
public static Date toMidnight(Date date)
{
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
toMidnight(calendar);
return calendar.getTime();
}
private static void toStartOfDay(Calendar date)
{
date.set(11, 0);
date.set(12, 0);
date.set(13, 0);
date.set(14, 0);
}
public static Date toStartOfDay(Date date)
{
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
toStartOfDay(calendar);
return calendar.getTime();
}
private static void toEndOfDay(Calendar date)
{
date.set(11, 23);
date.set(12, 59);
date.set(13, 59);
date.set(14, 999);
}
public static Date toEndOfDay(Date date)
{
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
toEndOfDay(calendar);
return calendar.getTime();
}
public static Date hoursLater(int i, Date from)
{
return later(i, 11, from);
}
public static Date minutesLater(int i, Date from)
{
return later(i, 12, from);
}
public static Date secondsLater(int i, Date from)
{
return later(i, 13, from);
}
private static Date later(int i, int field, Date from)
{
Calendar date = toCalendar(from);
date.add(field, i);
return date.getTime();
}
public static Date hoursBefore(int i, Date from)
{
return before(i, 11, from);
}
public static Date minutesBefore(int i, Date from)
{
return before(i, 12, from);
}
public static Date secondsBefore(int i, Date from)
{
return before(i, 13, from);
}
private static Date before(int i, int field, Date from)
{
Calendar date = toCalendar(from);
date.add(field, -1 * i);
return date.getTime();
}
private static Calendar toCalendar(Date date)
{
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(date.getTime());
return calendar;
}
public static Date yesterday()
{
return daysLater(-1, today().getTime());
}
public static Date tomorrow()
{
return daysLater(1, today().getTime());
}
public String displayFriendly(long time)
{
return displayFriendly(time, 2);
}
public String displayFriendly(Date date)
{
return displayFriendly(date, 2);
}
public String displayFriendly(long time, int limit)
{
return displayFriendly(new Date(time), limit);
}
public String displayFriendly(Date date, int limit)
{
return date.toString();
}
public static Date parseDate(String value)
{
Date date = null;
if(value != null)
{
SimpleDateFormat sdf = new SimpleDateFormat((new DateValidator()).getPattern());
try
{
date = sdf.parse(value);
}
catch(ParseException e)
{
log.info((new StringBuilder()).append("Unable to parse user profile date: ").append(value).append(". Date will not be indexed.").toString());
}
}
return date;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.impl.logsegment;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import io.netty.util.ReferenceCountUtil;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.apache.bookkeeper.client.AsyncCallback;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BookKeeper;
import org.apache.bookkeeper.client.LedgerEntry;
import org.apache.bookkeeper.client.LedgerHandle;
import org.apache.bookkeeper.common.concurrent.FutureUtils;
import org.apache.bookkeeper.common.util.OrderedScheduler;
import org.apache.bookkeeper.common.util.SafeRunnable;
import org.apache.bookkeeper.stats.Counter;
import org.apache.bookkeeper.stats.StatsLogger;
import org.apache.distributedlog.DistributedLogConfiguration;
import org.apache.distributedlog.Entry;
import org.apache.distributedlog.LogSegmentMetadata;
import org.apache.distributedlog.exceptions.BKTransmitException;
import org.apache.distributedlog.exceptions.DLIllegalStateException;
import org.apache.distributedlog.exceptions.DLInterruptedException;
import org.apache.distributedlog.exceptions.EndOfLogSegmentException;
import org.apache.distributedlog.exceptions.ReadCancelledException;
import org.apache.distributedlog.injector.AsyncFailureInjector;
import org.apache.distributedlog.logsegment.LogSegmentEntryReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* BookKeeper ledger based log segment entry reader.
*/
public class BKLogSegmentEntryReader implements SafeRunnable, LogSegmentEntryReader, AsyncCallback.OpenCallback {
private static final Logger logger = LoggerFactory.getLogger(BKLogSegmentEntryReader.class);
private class CacheEntry implements SafeRunnable, AsyncCallback.ReadCallback,
AsyncCallback.ReadLastConfirmedAndEntryCallback {
protected final long entryId;
private boolean done;
private LedgerEntry entry;
private int rc;
private CacheEntry(long entryId) {
this.entryId = entryId;
this.entry = null;
this.rc = BKException.Code.UnexpectedConditionException;
this.done = false;
}
long getEntryId() {
return entryId;
}
synchronized boolean isDone() {
return done;
}
synchronized void release() {
if (null != this.entry) {
this.entry.getEntryBuffer().release();
this.entry = null;
}
}
void release(LedgerEntry entry) {
if (null != entry) {
entry.getEntryBuffer().release();
}
}
void complete(LedgerEntry entry) {
// the reader is already closed
if (isClosed()) {
release(entry);
}
synchronized (this) {
if (done) {
return;
}
this.rc = BKException.Code.OK;
this.entry = entry;
}
setDone(true);
}
void completeExceptionally(int rc) {
synchronized (this) {
if (done) {
return;
}
this.rc = rc;
}
setDone(false);
}
void setDone(boolean success) {
synchronized (this) {
this.done = true;
}
onReadEntryDone(success);
}
synchronized boolean isSuccess() {
return BKException.Code.OK == rc;
}
synchronized LedgerEntry getEntry() {
// retain reference for the caller
this.entry.getEntryBuffer().retain();
return this.entry;
}
synchronized int getRc() {
return rc;
}
@Override
public void readComplete(int rc,
LedgerHandle lh,
Enumeration<LedgerEntry> entries,
Object ctx) {
if (failureInjector.shouldInjectCorruption(entryId, entryId)) {
rc = BKException.Code.DigestMatchException;
}
processReadEntries(rc, lh, entries, ctx);
}
void processReadEntries(int rc,
LedgerHandle lh,
Enumeration<LedgerEntry> entries,
Object ctx) {
if (isDone()) {
return;
}
if (!checkReturnCodeAndHandleFailure(rc, false)) {
return;
}
LedgerEntry entry = null;
while (entries.hasMoreElements()) {
// more entries are returned
if (null != entry) {
completeExceptionally(BKException.Code.UnexpectedConditionException);
return;
}
entry = entries.nextElement();
}
if (null == entry || entry.getEntryId() != entryId) {
completeExceptionally(BKException.Code.UnexpectedConditionException);
return;
}
complete(entry);
}
@Override
public void readLastConfirmedAndEntryComplete(int rc,
long entryId,
LedgerEntry entry,
Object ctx) {
if (failureInjector.shouldInjectCorruption(this.entryId, this.entryId)) {
rc = BKException.Code.DigestMatchException;
}
processReadEntry(rc, entryId, entry, ctx);
}
void processReadEntry(int rc,
long entryId,
LedgerEntry entry,
Object ctx) {
if (isDone()) {
return;
}
if (!checkReturnCodeAndHandleFailure(rc, true)) {
return;
}
if (null != entry && this.entryId == entryId) {
complete(entry);
return;
}
// the long poll is timeout or interrupted; we will retry it again.
issueRead(this);
}
/**
* Check return code and retry if needed.
*
* @param rc the return code
* @param isLongPoll is it a long poll request
* @return is the request successful or not
*/
boolean checkReturnCodeAndHandleFailure(int rc, boolean isLongPoll) {
if (BKException.Code.OK == rc) {
numReadErrorsUpdater.set(BKLogSegmentEntryReader.this, 0);
return true;
}
if (BKException.Code.BookieHandleNotAvailableException == rc
|| (isLongPoll && BKException.Code.NoSuchLedgerExistsException == rc)) {
int numErrors = Math.max(1, numReadErrorsUpdater.incrementAndGet(BKLogSegmentEntryReader.this));
int nextReadBackoffTime = Math.min(numErrors * readAheadWaitTime, maxReadBackoffTime);
scheduler.scheduleOrdered(
getSegment().getLogSegmentId(),
this,
nextReadBackoffTime,
TimeUnit.MILLISECONDS);
} else {
completeExceptionally(rc);
}
return false;
}
@Override
public void safeRun() {
issueRead(this);
}
}
private class PendingReadRequest {
private final int numEntries;
private final List<Entry.Reader> entries;
private final CompletableFuture<List<Entry.Reader>> promise;
PendingReadRequest(int numEntries) {
this.numEntries = numEntries;
if (numEntries == 1) {
this.entries = new ArrayList<Entry.Reader>(1);
} else {
this.entries = new ArrayList<Entry.Reader>();
}
this.promise = new CompletableFuture<List<Entry.Reader>>();
}
CompletableFuture<List<Entry.Reader>> getPromise() {
return promise;
}
void completeExceptionally(Throwable throwable) {
FutureUtils.completeExceptionally(promise, throwable);
}
void addEntry(Entry.Reader entry) {
entries.add(entry);
}
void complete() {
FutureUtils.complete(promise, entries);
onEntriesConsumed(entries.size());
}
boolean hasReadEntries() {
return entries.size() > 0;
}
boolean hasReadEnoughEntries() {
return entries.size() >= numEntries;
}
}
private final BookKeeper bk;
private final DistributedLogConfiguration conf;
private final OrderedScheduler scheduler;
private final long lssn;
private final long startSequenceId;
private final boolean envelopeEntries;
private final boolean deserializeRecordSet;
private final int numPrefetchEntries;
private final int maxPrefetchEntries;
// state
private CompletableFuture<Void> closePromise = null;
private LogSegmentMetadata metadata;
private LedgerHandle lh;
private final List<LedgerHandle> openLedgerHandles;
private CacheEntry outstandingLongPoll;
private long nextEntryId;
private static final AtomicReferenceFieldUpdater<BKLogSegmentEntryReader, Throwable> lastExceptionUpdater =
AtomicReferenceFieldUpdater.newUpdater(BKLogSegmentEntryReader.class, Throwable.class, "lastException");
private volatile Throwable lastException = null;
private static final AtomicLongFieldUpdater<BKLogSegmentEntryReader> scheduleCountUpdater =
AtomicLongFieldUpdater.newUpdater(BKLogSegmentEntryReader.class, "scheduleCount");
private volatile long scheduleCount = 0L;
private volatile boolean hasCaughtupOnInprogress = false;
private final CopyOnWriteArraySet<StateChangeListener> stateChangeListeners =
new CopyOnWriteArraySet<StateChangeListener>();
// read retries
private int readAheadWaitTime;
private final int maxReadBackoffTime;
private static final AtomicIntegerFieldUpdater<BKLogSegmentEntryReader> numReadErrorsUpdater =
AtomicIntegerFieldUpdater.newUpdater(BKLogSegmentEntryReader.class, "numReadErrors");
private volatile int numReadErrors = 0;
private final boolean skipBrokenEntries;
// readahead cache
int cachedEntries = 0;
int numOutstandingEntries = 0;
final LinkedBlockingQueue<CacheEntry> readAheadEntries;
// request queue
final LinkedList<PendingReadRequest> readQueue;
// failure injector
private final AsyncFailureInjector failureInjector;
// Stats
private final Counter skippedBrokenEntriesCounter;
BKLogSegmentEntryReader(LogSegmentMetadata metadata,
LedgerHandle lh,
long startEntryId,
BookKeeper bk,
OrderedScheduler scheduler,
DistributedLogConfiguration conf,
StatsLogger statsLogger,
AsyncFailureInjector failureInjector) {
this.metadata = metadata;
this.lssn = metadata.getLogSegmentSequenceNumber();
this.startSequenceId = metadata.getStartSequenceId();
this.envelopeEntries = metadata.getEnvelopeEntries();
this.deserializeRecordSet = conf.getDeserializeRecordSetOnReads();
this.lh = lh;
this.nextEntryId = Math.max(startEntryId, 0);
this.bk = bk;
this.conf = conf;
this.numPrefetchEntries = conf.getNumPrefetchEntriesPerLogSegment();
this.maxPrefetchEntries = conf.getMaxPrefetchEntriesPerLogSegment();
this.scheduler = scheduler;
this.openLedgerHandles = Lists.newArrayList();
this.openLedgerHandles.add(lh);
this.outstandingLongPoll = null;
// create the readahead queue
this.readAheadEntries = new LinkedBlockingQueue<CacheEntry>();
// create the read request queue
this.readQueue = new LinkedList<PendingReadRequest>();
// read backoff settings
this.readAheadWaitTime = conf.getReadAheadWaitTime();
this.maxReadBackoffTime = 4 * conf.getReadAheadWaitTime();
// other read settings
this.skipBrokenEntries = conf.getReadAheadSkipBrokenEntries();
// Failure Injection
this.failureInjector = failureInjector;
// Stats
this.skippedBrokenEntriesCounter = statsLogger.getCounter("skipped_broken_entries");
}
@VisibleForTesting
public synchronized CacheEntry getOutstandingLongPoll() {
return outstandingLongPoll;
}
@VisibleForTesting
LinkedBlockingQueue<CacheEntry> getReadAheadEntries() {
return this.readAheadEntries;
}
synchronized LedgerHandle getLh() {
return lh;
}
@Override
public synchronized LogSegmentMetadata getSegment() {
return metadata;
}
@VisibleForTesting
synchronized long getNextEntryId() {
return nextEntryId;
}
@Override
public void start() {
prefetchIfNecessary();
}
@Override
public boolean hasCaughtUpOnInprogress() {
return hasCaughtupOnInprogress;
}
@Override
public LogSegmentEntryReader registerListener(StateChangeListener listener) {
stateChangeListeners.add(listener);
return this;
}
@Override
public LogSegmentEntryReader unregisterListener(StateChangeListener listener) {
stateChangeListeners.remove(listener);
return this;
}
private void notifyCaughtupOnInprogress() {
for (StateChangeListener listener : stateChangeListeners) {
listener.onCaughtupOnInprogress();
}
}
//
// Process on Log Segment Metadata Updates
//
@Override
public synchronized void onLogSegmentMetadataUpdated(LogSegmentMetadata segment) {
if (metadata == segment
|| LogSegmentMetadata.COMPARATOR.compare(metadata, segment) == 0
|| !(metadata.isInProgress() && !segment.isInProgress())) {
return;
}
// segment is closed from inprogress, then re-open the log segment
bk.asyncOpenLedger(
segment.getLogSegmentId(),
BookKeeper.DigestType.CRC32,
conf.getBKDigestPW().getBytes(UTF_8),
this,
segment);
}
@Override
public void openComplete(int rc, LedgerHandle lh, Object ctx) {
LogSegmentMetadata segment = (LogSegmentMetadata) ctx;
if (BKException.Code.OK != rc) {
// fail current reader or retry opening the reader
failOrRetryOpenLedger(rc, segment);
return;
}
// switch to new ledger handle if the log segment is moved to completed.
CacheEntry longPollRead = null;
synchronized (this) {
if (isClosed()) {
lh.asyncClose(new AsyncCallback.CloseCallback() {
@Override
public void closeComplete(int rc, LedgerHandle lh, Object ctx) {
logger.debug("Close the open ledger {} since the log segment reader is already closed",
lh.getId());
}
}, null);
return;
}
this.metadata = segment;
this.lh = lh;
this.openLedgerHandles.add(lh);
longPollRead = outstandingLongPoll;
}
if (null != longPollRead) {
// reissue the long poll read when the log segment state is changed
issueRead(longPollRead);
}
// notify readers
notifyReaders();
}
private void failOrRetryOpenLedger(int rc, final LogSegmentMetadata segment) {
if (isClosed()) {
return;
}
if (isBeyondLastAddConfirmed()) {
// if the reader is already caught up, let's fail the reader immediately
// as we need to pull the latest metadata of this log segment.
completeExceptionally(new BKTransmitException("Failed to open ledger for reading log segment "
+ getSegment(), rc),
true);
return;
}
// the reader is still catching up, retry opening the log segment later
scheduler.scheduleOrdered(
segment.getLogSegmentId(),
() -> onLogSegmentMetadataUpdated(segment),
conf.getZKRetryBackoffStartMillis(),
TimeUnit.MILLISECONDS);
}
//
// Change the state of this reader
//
private boolean checkClosedOrInError() {
Throwable cause = lastExceptionUpdater.get(this);
if (null != cause) {
cancelAllPendingReads(cause);
return true;
}
return false;
}
/**
* Set the reader into error state with return code <i>rc</i>.
*
* @param throwable exception indicating the error
* @param isBackground is the reader set exception by background reads or foreground reads
*/
private void completeExceptionally(Throwable throwable, boolean isBackground) {
lastExceptionUpdater.compareAndSet(this, null, throwable);
if (isBackground) {
notifyReaders();
}
}
/**
* Notify the readers with the state change.
*/
private void notifyReaders() {
processReadRequests();
}
private void cancelAllPendingReads(Throwable throwExc) {
List<PendingReadRequest> requestsToCancel;
synchronized (readQueue) {
requestsToCancel = Lists.newArrayListWithExpectedSize(readQueue.size());
requestsToCancel.addAll(readQueue);
readQueue.clear();
}
for (PendingReadRequest request : requestsToCancel) {
request.completeExceptionally(throwExc);
}
}
private void releaseAllCachedEntries() {
synchronized (this) {
CacheEntry entry = readAheadEntries.poll();
while (null != entry) {
entry.release();
entry = readAheadEntries.poll();
}
}
}
//
// Background Read Operations
//
private void onReadEntryDone(boolean success) {
// we successfully read an entry
synchronized (this) {
--numOutstandingEntries;
}
// notify reader that there is entry ready
notifyReaders();
// stop prefetch if we already encountered exceptions
if (success) {
prefetchIfNecessary();
}
}
private void onEntriesConsumed(int numEntries) {
synchronized (this) {
cachedEntries -= numEntries;
}
prefetchIfNecessary();
}
private void prefetchIfNecessary() {
List<CacheEntry> entriesToFetch;
synchronized (this) {
if (cachedEntries >= maxPrefetchEntries) {
return;
}
// we don't have enough entries, do prefetch
int numEntriesToFetch = numPrefetchEntries - numOutstandingEntries;
if (numEntriesToFetch <= 0) {
return;
}
entriesToFetch = new ArrayList<CacheEntry>(numEntriesToFetch);
for (int i = 0; i < numEntriesToFetch; i++) {
if (cachedEntries >= maxPrefetchEntries) {
break;
}
if ((isLedgerClosed() && nextEntryId > getLastAddConfirmed())
|| (!isLedgerClosed() && nextEntryId > getLastAddConfirmed() + 1)) {
break;
}
CacheEntry entry = new CacheEntry(nextEntryId);
entriesToFetch.add(entry);
readAheadEntries.add(entry);
++numOutstandingEntries;
++cachedEntries;
++nextEntryId;
}
}
for (CacheEntry entry : entriesToFetch) {
issueRead(entry);
}
}
private void issueRead(CacheEntry cacheEntry) {
if (isClosed()) {
return;
}
if (isLedgerClosed()) {
if (isNotBeyondLastAddConfirmed(cacheEntry.getEntryId())) {
issueSimpleRead(cacheEntry);
return;
} else {
// Reach the end of stream
notifyReaders();
}
} else { // the ledger is still in progress
if (isNotBeyondLastAddConfirmed(cacheEntry.getEntryId())) {
issueSimpleRead(cacheEntry);
} else {
issueLongPollRead(cacheEntry);
}
}
}
private void issueSimpleRead(CacheEntry cacheEntry) {
getLh().asyncReadEntries(cacheEntry.entryId, cacheEntry.entryId, cacheEntry, null);
}
private void issueLongPollRead(CacheEntry cacheEntry) {
// register the read as outstanding reads
synchronized (this) {
this.outstandingLongPoll = cacheEntry;
}
if (!hasCaughtupOnInprogress) {
hasCaughtupOnInprogress = true;
notifyCaughtupOnInprogress();
}
getLh().asyncReadLastConfirmedAndEntry(
cacheEntry.entryId,
conf.getReadLACLongPollTimeout(),
false,
cacheEntry,
null);
}
//
// Foreground Read Operations
//
Entry.Reader processReadEntry(LedgerEntry entry) throws IOException {
return Entry.newBuilder()
.setLogSegmentInfo(lssn, startSequenceId)
.setEntryId(entry.getEntryId())
.setEnvelopeEntry(envelopeEntries)
.deserializeRecordSet(deserializeRecordSet)
.setEntry(entry.getEntryBuffer())
.buildReader();
}
@Override
public CompletableFuture<List<Entry.Reader>> readNext(int numEntries) {
final PendingReadRequest readRequest = new PendingReadRequest(numEntries);
if (checkClosedOrInError()) {
readRequest.completeExceptionally(lastExceptionUpdater.get(this));
} else {
boolean wasQueueEmpty;
synchronized (readQueue) {
wasQueueEmpty = readQueue.isEmpty();
readQueue.add(readRequest);
}
if (wasQueueEmpty) {
processReadRequests();
}
}
return readRequest.getPromise();
}
private void processReadRequests() {
if (isClosed()) {
// the reader is already closed.
return;
}
long prevCount = scheduleCountUpdater.getAndIncrement(this);
if (0 == prevCount) {
scheduler.executeOrdered(getSegment().getLogSegmentId(), this);
}
}
/**
* The core function to propagate fetched entries to read requests.
*/
@Override
public void safeRun() {
long scheduleCountLocal = scheduleCountUpdater.get(this);
while (true) {
PendingReadRequest nextRequest = null;
synchronized (readQueue) {
nextRequest = readQueue.peek();
}
// if read queue is empty, nothing to read, return
if (null == nextRequest) {
scheduleCountUpdater.set(this, 0L);
return;
}
// if the oldest pending promise is interrupted then we must
// mark the reader in error and abort all pending reads since
// we don't know the last consumed read
if (null == lastExceptionUpdater.get(this)) {
if (nextRequest.getPromise().isCancelled()) {
completeExceptionally(new DLInterruptedException("Interrupted on reading log segment "
+ getSegment() + " : " + nextRequest.getPromise().isCancelled()), false);
}
}
// if the reader is in error state, stop read
if (checkClosedOrInError()) {
return;
}
// read entries from readahead cache to satisfy next read request
readEntriesFromReadAheadCache(nextRequest);
// check if we can satisfy the read request
if (nextRequest.hasReadEntries()) {
PendingReadRequest request;
synchronized (readQueue) {
request = readQueue.poll();
}
if (null != request && nextRequest == request) {
request.complete();
} else {
DLIllegalStateException ise = new DLIllegalStateException("Unexpected condition at reading from "
+ getSegment());
nextRequest.completeExceptionally(ise);
if (null != request) {
request.completeExceptionally(ise);
}
completeExceptionally(ise, false);
}
} else {
if (0 == scheduleCountLocal) {
return;
}
scheduleCountLocal = scheduleCountUpdater.decrementAndGet(this);
}
}
}
private void readEntriesFromReadAheadCache(PendingReadRequest nextRequest) {
while (!nextRequest.hasReadEnoughEntries()) {
CacheEntry entry;
boolean hitEndOfLogSegment;
synchronized (this) {
entry = readAheadEntries.peek();
hitEndOfLogSegment = (null == entry) && isEndOfLogSegment();
}
// reach end of log segment
if (hitEndOfLogSegment) {
completeExceptionally(new EndOfLogSegmentException(getSegment().getZNodeName()), false);
return;
}
if (null == entry) {
return;
}
// entry is not complete yet.
if (!entry.isDone()) {
// we already reached end of the log segment
if (isEndOfLogSegment(entry.getEntryId())) {
completeExceptionally(new EndOfLogSegmentException(getSegment().getZNodeName()), false);
}
return;
}
if (entry.isSuccess()) {
CacheEntry removedEntry = readAheadEntries.poll();
try {
if (entry != removedEntry) {
DLIllegalStateException ise =
new DLIllegalStateException("Unexpected condition at reading from "
+ getSegment());
completeExceptionally(ise, false);
return;
}
try {
// the reference is retained on `entry.getEntry()`.
// Entry.Reader is responsible for releasing it.
nextRequest.addEntry(processReadEntry(entry.getEntry()));
} catch (IOException e) {
completeExceptionally(e, false);
return;
}
} finally {
ReferenceCountUtil.safeRelease(removedEntry);
}
} else if (skipBrokenEntries && BKException.Code.DigestMatchException == entry.getRc()) {
// skip this entry and move forward
skippedBrokenEntriesCounter.inc();
CacheEntry removedEntry = readAheadEntries.poll();
removedEntry.release();
continue;
} else {
completeExceptionally(new BKTransmitException("Encountered issue on reading entry " + entry.getEntryId()
+ " @ log segment " + getSegment(), entry.getRc()), false);
return;
}
}
}
//
// State Management
//
private synchronized boolean isEndOfLogSegment() {
return isEndOfLogSegment(nextEntryId);
}
private boolean isEndOfLogSegment(long entryId) {
return isLedgerClosed() && entryId > getLastAddConfirmed();
}
@Override
public synchronized boolean isBeyondLastAddConfirmed() {
return isBeyondLastAddConfirmed(nextEntryId);
}
private boolean isBeyondLastAddConfirmed(long entryId) {
return entryId > getLastAddConfirmed();
}
private boolean isNotBeyondLastAddConfirmed(long entryId) {
return entryId <= getLastAddConfirmed();
}
private boolean isLedgerClosed() {
return getLh().isClosed();
}
@Override
public long getLastAddConfirmed() {
return getLh().getLastAddConfirmed();
}
synchronized boolean isClosed() {
return null != closePromise;
}
@Override
public CompletableFuture<Void> asyncClose() {
final CompletableFuture<Void> closeFuture;
ReadCancelledException exception;
LedgerHandle[] lhsToClose;
synchronized (this) {
if (null != closePromise) {
return closePromise;
}
closeFuture = closePromise = new CompletableFuture<Void>();
lhsToClose = openLedgerHandles.toArray(new LedgerHandle[openLedgerHandles.size()]);
// set the exception to cancel pending and subsequent reads
exception = new ReadCancelledException(getSegment().getZNodeName(), "Reader was closed");
completeExceptionally(exception, false);
}
// release the cached entries
releaseAllCachedEntries();
// cancel all pending reads
cancelAllPendingReads(exception);
// close all the open ledger
FutureUtils.proxyTo(
BKUtils.closeLedgers(lhsToClose),
closeFuture
);
return closeFuture;
}
}
| |
package com.example.android.sunshine.app.data;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.test.AndroidTestCase;
import java.util.HashSet;
public class TestDb extends AndroidTestCase {
public static final String LOG_TAG = TestDb.class.getSimpleName();
// Since we want each test to start with a clean slate
void deleteTheDatabase() {
mContext.deleteDatabase(WeatherDbHelper.DATABASE_NAME);
}
/*
This function gets called before each test is executed to delete the database. This makes
sure that we always have a clean test.
*/
public void setUp() {
deleteTheDatabase();
}
/*
Students: Uncomment this test once you've written the code to create the Location
table. Note that you will have to have chosen the same column names that I did in
my solution for this test to compile, so if you haven't yet done that, this is
a good time to change your column names to match mine.
Note that this only tests that the Location table has the correct columns, since we
give you the code for the weather table. This test does not look at the
*/
public void testCreateDb() throws Throwable {
// build a HashSet of all of the table names we wish to look for
// Note that there will be another table in the DB that stores the
// Android metadata (db version information)
final HashSet<String> tableNameHashSet = new HashSet<String>();
tableNameHashSet.add(WeatherContract.LocationEntry.TABLE_NAME);
tableNameHashSet.add(WeatherContract.WeatherEntry.TABLE_NAME);
mContext.deleteDatabase(WeatherDbHelper.DATABASE_NAME);
SQLiteDatabase db = new WeatherDbHelper(
this.mContext).getWritableDatabase();
assertEquals(true, db.isOpen());
// have we created the tables we want?
Cursor c = db.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null);
assertTrue("Error: This means that the database has not been created correctly",
c.moveToFirst());
// verify that the tables have been created
do {
tableNameHashSet.remove(c.getString(0));
} while( c.moveToNext() );
// if this fails, it means that your database doesn't contain both the location entry
// and weather entry tables
assertTrue("Error: Your database was created without both the location entry and weather entry tables",
tableNameHashSet.isEmpty());
// now, do our tables contain the correct columns?
c = db.rawQuery("PRAGMA table_info(" + WeatherContract.LocationEntry.TABLE_NAME + ")",
null);
assertTrue("Error: This means that we were unable to query the database for table information.",
c.moveToFirst());
// Build a HashSet of all of the column names we want to look for
final HashSet<String> locationColumnHashSet = new HashSet<String>();
locationColumnHashSet.add(WeatherContract.LocationEntry._ID);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_CITY_NAME);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_COORD_LAT);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_COORD_LONG);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING);
int columnNameIndex = c.getColumnIndex("name");
do {
String columnName = c.getString(columnNameIndex);
locationColumnHashSet.remove(columnName);
} while(c.moveToNext());
// if this fails, it means that your database doesn't contain all of the required location
// entry columns
assertTrue("Error: The database doesn't contain all of the required location entry columns",
locationColumnHashSet.isEmpty());
db.close();
}
/*
Students: Here is where you will build code to test that we can insert and query the
location database. We've done a lot of work for you. You'll want to look in TestUtilities
where you can uncomment out the "createNorthPoleLocationValues" function. You can
also make use of the ValidateCurrentRecord function from within TestUtilities.
*/
public void testLocationTable() {
insertLocation();
}
/*
Students: Here is where you will build code to test that we can insert and query the
database. We've done a lot of work for you. You'll want to look in TestUtilities
where you can use the "createWeatherValues" function. You can
also make use of the validateCurrentRecord function from within TestUtilities.
*/
public void testWeatherTable() {
// First insert the location, and then use the locationRowId to insert
// the weather. Make sure to cover as many failure cases as you can.
long locationRowId = insertLocation();
// Make sure we have a valid row ID.
assertFalse("Error: Location Not Inserted Correctly", locationRowId == -1L);
// Instead of rewriting all of the code we've already written in testLocationTable
// we can move this code to insertLocation and then call insertLocation from both
// tests. Why move it? We need the code to return the ID of the inserted location
// and our testLocationTable can only return void because it's a test.
// First step: Get reference to writable database
WeatherDbHelper dbHelper = new WeatherDbHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
// Create ContentValues of what you want to insert
// (you can use the createWeatherValues TestUtilities function if you wish)
ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId);
// Insert ContentValues into database and get a row ID back
long weatherRowId = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, weatherValues);
assertTrue(weatherRowId != -1);
// Query the database and receive a Cursor back
Cursor weatherCursor = db.query(
WeatherContract.WeatherEntry.TABLE_NAME, // Table to Query
null, // leaving "columns" null just returns all the columns.
null, // cols for "where" clause
null, // values for "where" clause
null, // columns to group by
null, // columns to filter by row groups
null // sort order
);
// Move the cursor to a valid database row
assertTrue( "Error: No Records returned from location query", weatherCursor.moveToFirst());
// Validate data in resulting Cursor with the original ContentValues
// (you can use the validateCurrentRecord function in TestUtilities to validate the
// query if you like)
TestUtilities.validateCurrentRecord("testInsertReadDb weatherEntry failed to validate",
weatherCursor, weatherValues);
assertFalse( "Error: More than one record returned from weather query",
weatherCursor.moveToNext() );
// Finally, close the cursor and database
weatherCursor.close();
dbHelper.close();
}
/*
Students: This is a helper method for the testWeatherTable quiz. You can move your
code from testLocationTable to here so that you can call this code from both
testWeatherTable and testLocationTable.
*/
public long insertLocation() {
// First step: Get reference to writable database
// If there's an error in those massive SQL table creation Strings,
// errors will be thrown here when you try to get a writable database.
WeatherDbHelper dbHelper = new WeatherDbHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
// Create ContentValues of what you want to insert
ContentValues testValues = TestUtilities.createNorthPoleLocationValues();
// Insert ContentValues into database and get a row ID back
long locationRowId;
locationRowId = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, testValues);
assertTrue(locationRowId != -1);
// Query the database and receive a Cursor back
Cursor cursor = db.query(
WeatherContract.LocationEntry.TABLE_NAME, // Table to Query
null, // all columns
null, // Columns for the "where" clause
null, // Values for the "where" clause
null, // columns to group by
null, // columns to filter by row groups
null // sort order
);
// Move the cursor to a valid database row
assertTrue( "Error: No Records returned from location query", cursor.moveToFirst() );
// Validate data in resulting Cursor with the original ContentValues
// (you can use the validateCurrentRecord function in TestUtilities to validate the
// query if you like)
TestUtilities.validateCurrentRecord("Error: Location Query Validation Failed", cursor, testValues);
assertFalse( "Error: More than one record returned from location query", cursor.moveToNext() );
// Finally, close the cursor and database
cursor.close();
db.close();
return locationRowId;
}
}
| |
/******************************************************************************
* Copyright (c) 2006, 2010 VMware Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html and the Apache License v2.0
* is available at http://www.opensource.org/licenses/apache2.0.php.
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
* VMware Inc.
*****************************************************************************/
package org.eclipse.gemini.blueprint.util;
import java.util.Collections;
import java.util.Dictionary;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.gemini.blueprint.util.internal.MapBasedDictionary;
import org.eclipse.gemini.blueprint.util.internal.ServiceReferenceBasedMap;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
/**
* Utility class for retrieving OSGi service references. This class offers a unified filter-based access for OSGi
* services as well as translation of checked exceptions {@link InvalidSyntaxException} into unchecked ones.
*
* <p/>
*
* This classes uses {@link OsgiFilterUtils} underneath to allow multiple class names to be used for service reference
* lookup.
*
* @see OsgiFilterUtils
* @author Costin Leau
*
*/
public abstract class OsgiServiceReferenceUtils {
private static final Log log = LogFactory.getLog(OsgiServiceReferenceUtils.class);
/**
* Returns a reference to the <em>best matching</em> service for the given class names.
*
* @param bundleContext OSGi bundle context
* @param classes array of fully qualified class names
* @return reference to the <em>best matching</em> service
*/
public static ServiceReference getServiceReference(BundleContext bundleContext, String[] classes) {
return getServiceReference(bundleContext, classes, null);
}
/**
* Returns a reference to the <em>best matching</em> service for the given class and OSGi filter.
*
* @param bundleContext OSGi bundle context
* @param clazz fully qualified class name (can be <code>null</code>)
* @param filter valid OSGi filter (can be <code>null</code>)
* @return reference to the <em>best matching</em> service
*/
public static ServiceReference getServiceReference(BundleContext bundleContext, String clazz, String filter) {
ServiceReference[] refs = getServiceReferences(bundleContext, clazz, filter);
// pick the best service
return getServiceReference(refs);
}
public static ServiceReference getServiceReference(ServiceReference... references) {
if (ObjectUtils.isEmpty(references)) {
return null;
}
ServiceReference winningReference = references[0];
if (references.length > 1) {
long winningId = getServiceId(winningReference);
int winningRanking = getServiceRanking(winningReference);
// start iterating in order to find the best match
for (int i = 1; i < references.length; i++) {
ServiceReference reference = references[i];
int serviceRanking = getServiceRanking(reference);
long serviceId = getServiceId(reference);
if ((serviceRanking > winningRanking) || (serviceRanking == winningRanking && winningId > serviceId)) {
winningReference = reference;
winningId = serviceId;
winningRanking = serviceRanking;
}
}
}
return winningReference;
}
/**
* Returns a reference to the <em>best matching</em> service for the given classes and OSGi filter.
*
* @param bundleContext OSGi bundle context
* @param classes array of fully qualified class names
* @param filter valid OSGi filter (can be <code>null</code>)
* @return reference to the <em>best matching</em> service
*/
public static ServiceReference getServiceReference(BundleContext bundleContext, String[] classes, String filter) {
// use #getServiceReference(BundleContext, String, String) method to
// speed the service lookup process by
// giving one class as a hint to the OSGi implementation
String clazz = (ObjectUtils.isEmpty(classes) ? null : classes[0]);
return getServiceReference(bundleContext, clazz, OsgiFilterUtils.unifyFilter(classes, filter));
}
/**
* Returns a reference to the <em>best matching</em> service for the given OSGi filter.
*
* @param bundleContext OSGi bundle context
* @param filter valid OSGi filter (can be <code>null</code>)
* @return reference to the <em>best matching</em> service
*/
public static ServiceReference getServiceReference(BundleContext bundleContext, String filter) {
return getServiceReference(bundleContext, (String) null, filter);
}
/**
* Returns references to <em>all</em> services matching the given class names.
*
* @param bundleContext OSGi bundle context
* @param classes array of fully qualified class names
* @return non-<code>null</code> array of references to matching services
*/
public static ServiceReference[] getServiceReferences(BundleContext bundleContext, String[] classes) {
return getServiceReferences(bundleContext, classes, null);
}
/**
* Returns references to <em>all</em> services matching the given class name and OSGi filter.
*
* @param bundleContext OSGi bundle context
* @param clazz fully qualified class name (can be <code>null</code>)
* @param filter valid OSGi filter (can be <code>null</code>)
* @return non-<code>null</code> array of references to matching services
*/
public static ServiceReference[] getServiceReferences(BundleContext bundleContext, String clazz, String filter) {
Assert.notNull(bundleContext, "bundleContext should be not null");
try {
ServiceReference[] refs = bundleContext.getServiceReferences(clazz, filter);
return (refs == null ? new ServiceReference[0] : refs);
} catch (InvalidSyntaxException ise) {
throw (RuntimeException) new IllegalArgumentException("invalid filter: " + ise.getFilter()).initCause(ise);
}
}
/**
* Returns references to <em>all</em> services matching the given class names and OSGi filter.
*
* @param bundleContext OSGi bundle context
* @param classes array of fully qualified class names
* @param filter valid OSGi filter (can be <code>null</code>)
* @return non-<code>null</code> array of references to matching services
*/
public static ServiceReference[] getServiceReferences(BundleContext bundleContext, String[] classes, String filter) {
// use #getServiceReferences(BundleContext, String, String) method to
// speed the service lookup process by
// giving one class as a hint to the OSGi implementation
// additionally this allows type filtering
String clazz = (ObjectUtils.isEmpty(classes) ? null : classes[0]);
return getServiceReferences(bundleContext, clazz, OsgiFilterUtils.unifyFilter(classes, filter));
}
/**
* Returns references to <em>all</em> services matching the OSGi filter.
*
* @param bundleContext OSGi bundle context
* @param filter valid OSGi filter (can be <code>null</code>)
* @return non-<code>null</code> array of references to matching services
*/
public static ServiceReference[] getServiceReferences(BundleContext bundleContext, String filter) {
return getServiceReferences(bundleContext, (String) null, filter);
}
/**
* Returns the service id ({@link Constants#SERVICE_ID}) of the given service reference.
*
* @param reference OSGi service reference
* @return service id
*/
public static long getServiceId(ServiceReference reference) {
Assert.notNull(reference);
return ((Long) reference.getProperty(Constants.SERVICE_ID)).longValue();
}
/**
* Returns the service ranking ({@link Constants#SERVICE_RANKING}) of the given service reference.
*
* @param reference OSGi service reference
* @return service ranking
*/
public static int getServiceRanking(ServiceReference reference) {
Assert.notNull(reference);
Object ranking = reference.getProperty(Constants.SERVICE_RANKING);
// if the property is not supplied or of incorrect type, use a
// default
return ((ranking != null && ranking instanceof Integer) ? ((Integer) ranking).intValue() : 0);
}
/**
* Returns the advertised class names ({@link Constants#OBJECTCLASS}) of the given service reference.
*
* @param reference OSGi service reference
* @return service advertised class names
*/
public static String[] getServiceObjectClasses(ServiceReference reference) {
Assert.notNull(reference);
return (String[]) reference.getProperty(Constants.OBJECTCLASS);
}
/**
* Returns a {@link Map} containing the properties available for the given service reference. This method takes a
* snapshot of the properties; future changes to the service properties will not be reflected in the returned
* dictionary.
*
* <p/> Note that the returned type implements the {@link java.util.Map} interface also.
*
* @param reference OSGi service reference
* @return a <code>Dictionary</code> containing the service reference properties taken as a snapshot
*/
public static Dictionary getServicePropertiesSnapshot(ServiceReference reference) {
return new MapBasedDictionary(getServicePropertiesSnapshotAsMap(reference));
}
/**
* Returns a {@link Map} containing the properties available for the given service reference. This method takes a
* snapshot of the properties; future changes to the service properties will not be reflected in the returned
* dictionary.
*
* @param reference OSGi service reference
* @return a <code>Map</code> containing the service reference properties taken as a snapshot
*/
public static Map getServicePropertiesSnapshotAsMap(ServiceReference reference) {
Assert.notNull(reference);
String[] keys = reference.getPropertyKeys();
Map map = new LinkedHashMap(keys.length);
for (int i = 0; i < keys.length; i++) {
map.put(keys[i], reference.getProperty(keys[i]));
}
// mark it as read-only
map = Collections.unmodifiableMap(map);
return map;
}
/**
* Returns a {@link Dictionary} containing the properties available for the given service reference. The returned
* object will reflect any updates made to to the <code>ServiceReference</code> through the owning
* <code>ServiceRegistration</code>.
*
*
* <p/> Note that the returned type implements the {@link java.util.Map} interface also.
*
* @param reference OSGi service reference
* @return a <code>Dictionary</code> containing the latest service reference properties
*/
public static Dictionary getServiceProperties(ServiceReference reference) {
return new MapBasedDictionary(getServicePropertiesAsMap(reference));
}
/**
* Returns a {@link Map} containing the properties available for the given service reference. The returned object
* will reflect any updates made to to the <code>ServiceReference</code> through the owning
* <code>ServiceRegistration</code>. Consider using {@link #getServiceProperties(ServiceReference)} which returns an
* object that extends {@link Dictionary} as well as implements the {@link Map} interface.
*
* @param reference OSGi service reference
* @return a <code>Map</code> containing the latest service reference properties
* @see #getServiceProperties(ServiceReference)
*/
public static Map getServicePropertiesAsMap(ServiceReference reference) {
Assert.notNull(reference);
return new ServiceReferenceBasedMap(reference);
}
/**
* Checks if the given filter matches at least one OSGi service or not.
*
* @param bundleContext OSGi bundle context
* @param filter valid OSGi filter (can be <code>null</code>)
* @return true if the filter matches at least one OSGi service, false otherwise
*/
public static boolean isServicePresent(BundleContext bundleContext, String filter) {
return !ObjectUtils.isEmpty(getServiceReferences(bundleContext, filter));
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.util.registry;
import com.intellij.icons.AllIcons;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationEx;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.ShadowAction;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.*;
import com.intellij.ui.table.JBTable;
import com.intellij.util.PlatformIcons;
import com.intellij.util.ui.UIUtil;
import com.intellij.xml.util.XmlStringUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableColumn;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.image.BufferedImage;
import java.util.*;
import java.util.List;
/**
* @author Kirill Kalishev
* @author Konstantin Bulenkov
*/
public class RegistryUi implements Disposable {
private static final String RECENT_PROPERTIES_KEY = "RegistryRecentKeys";
private final JBTable myTable;
private final JTextArea myDescriptionLabel;
private final JPanel myContent = new JPanel();
private static final Icon RESTART_ICON = PlatformIcons.CHECK_ICON;
private final RestoreDefaultsAction myRestoreDefaultsAction;
private final MyTableModel myModel;
public RegistryUi() {
myContent.setLayout(new BorderLayout(UIUtil.DEFAULT_HGAP, UIUtil.DEFAULT_VGAP));
myModel = new MyTableModel();
myTable = new JBTable(myModel);
myTable.setCellSelectionEnabled(true);
myTable.setEnableAntialiasing(true);
final MyRenderer r = new MyRenderer();
final TableColumn c0 = myTable.getColumnModel().getColumn(0);
c0.setCellRenderer(r);
c0.setMaxWidth(RESTART_ICON.getIconWidth() + 12);
c0.setMinWidth(RESTART_ICON.getIconWidth() + 12);
c0.setHeaderValue(null);
final TableColumn c1 = myTable.getColumnModel().getColumn(1);
c1.setCellRenderer(r);
c1.setHeaderValue("Key");
final TableColumn c2 = myTable.getColumnModel().getColumn(2);
c2.setCellRenderer(r);
c2.setHeaderValue("Value");
c2.setCellEditor(new MyEditor());
myTable.setStriped(true);
myDescriptionLabel = new JTextArea(3, 50);
myDescriptionLabel.setWrapStyleWord(true);
myDescriptionLabel.setLineWrap(true);
myDescriptionLabel.setEditable(false);
final JScrollPane label = ScrollPaneFactory.createScrollPane(myDescriptionLabel);
final JPanel descriptionPanel = new JPanel(new BorderLayout());
descriptionPanel.add(label, BorderLayout.CENTER);
descriptionPanel.setBorder(IdeBorderFactory.createTitledBorder("Description", false));
myContent.add(ScrollPaneFactory.createScrollPane(myTable), BorderLayout.CENTER);
myContent.add(descriptionPanel, BorderLayout.SOUTH);
myTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(@NotNull ListSelectionEvent e) {
if (e.getValueIsAdjusting()) return;
final int selected = myTable.getSelectedRow();
if (selected != -1) {
final RegistryValue value = myModel.getRegistryValue(selected);
String desc = value.getDescription();
if (value.isRestartRequired()) {
String required = " Requires IDE restart.";
if (desc.endsWith(".")) {
desc += required;
} else {
desc += "." + required;
}
}
myDescriptionLabel.setText(desc);
} else {
myDescriptionLabel.setText(null);
}
}
});
myRestoreDefaultsAction = new RestoreDefaultsAction();
final DefaultActionGroup tbGroup = new DefaultActionGroup();
tbGroup.add(new EditAction());
tbGroup.add(new RevertAction());
final ActionToolbar tb = ActionManager.getInstance().createActionToolbar("Registry", tbGroup, true);
tb.setTargetComponent(myTable);
myContent.add(tb.getComponent(), BorderLayout.NORTH);
final TableSpeedSearch search = new TableSpeedSearch(myTable);
search.setComparator(new SpeedSearchComparator(false));
myTable.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(@NotNull KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_SPACE) {
int row = myTable.getSelectedRow();
if (row != -1) {
RegistryValue rv = myModel.getRegistryValue(row);
if (rv.isBoolean()) {
rv.setValue(!rv.asBoolean());
keyChanged(rv.getKey());
for (int i : new int[]{0, 1, 2}) myModel.fireTableCellUpdated(row, i);
invalidateActions();
if (search.isPopupActive()) search.hidePopup();
}
}
}
}
});
}
private class RevertAction extends AnAction {
private RevertAction() {
new ShadowAction(this, ActionManager.getInstance().getAction("EditorDelete"), myTable);
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(!myTable.isEditing() && myTable.getSelectedRow() >= 0);
e.getPresentation().setText("Revert to Default");
e.getPresentation().setIcon(AllIcons.General.Reset);
if (e.getPresentation().isEnabled()) {
final RegistryValue rv = myModel.getRegistryValue(myTable.getSelectedRow());
e.getPresentation().setEnabled(rv.isChangedFromDefault());
}
}
@Override
public void actionPerformed(AnActionEvent e) {
final RegistryValue rv = myModel.getRegistryValue(myTable.getSelectedRow());
rv.resetToDefault();
myModel.fireTableCellUpdated(myTable.getSelectedRow(), 0);
myModel.fireTableCellUpdated(myTable.getSelectedRow(), 1);
myModel.fireTableCellUpdated(myTable.getSelectedRow(), 2);
invalidateActions();
}
}
private class EditAction extends AnAction {
private EditAction() {
new ShadowAction(this, ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE), myTable);
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(!myTable.isEditing() && myTable.getSelectedRow() >= 0);
e.getPresentation().setText("Edit");
e.getPresentation().setIcon(AllIcons.Actions.EditSource);
}
@Override
public void actionPerformed(AnActionEvent e) {
startEditingAtSelection();
}
}
private void startEditingAtSelection() {
myTable.editCellAt(myTable.getSelectedRow(), 2);
if (myTable.isEditing()) {
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> {
IdeFocusManager.getGlobalInstance().requestFocus(myTable.getEditorComponent(), true);
});
}
}
private static class MyTableModel extends AbstractTableModel {
private final List<RegistryValue> myAll;
private MyTableModel() {
myAll = Registry.getAll();
final List<String> recent = getRecent();
Collections.sort(myAll, (o1, o2) -> {
final String key1 = o1.getKey();
boolean changed1 = o1.isChangedFromDefault();
boolean changed2 = o2.isChangedFromDefault();
if (changed1 && !changed2) return -1;
if (!changed1 && changed2) return 1;
final String key2 = o2.getKey();
final int i1 = recent.indexOf(key1);
final int i2 = recent.indexOf(key2);
final boolean c1 = i1 != -1;
final boolean c2 = i2 != -1;
if (c1 && !c2) return -1;
if (!c1 && c2) return 1;
if (c1 && c2) return i1 - i2;
return key1.compareToIgnoreCase(key2);
});
}
public void fireChanged() {
fireTableDataChanged();
}
@Override
public int getRowCount() {
return myAll.size();
}
@Override
public int getColumnCount() {
return 3;
}
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
RegistryValue value = getRegistryValue(rowIndex);
switch (columnIndex) {
case 0:
return "";
case 1:
return value.getKey();
case 2:
return value.asString();
default:
return value;
}
}
private RegistryValue getRegistryValue(final int rowIndex) {
return myAll.get(rowIndex);
}
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) {
return columnIndex == 2;
}
}
private static List<String> getRecent() {
String value = PropertiesComponent.getInstance().getValue(RECENT_PROPERTIES_KEY);
return StringUtil.isEmpty(value) ? new ArrayList<>(0) : StringUtil.split(value, "=");
}
private static void keyChanged(String key) {
final List<String> recent = getRecent();
recent.remove(key);
recent.add(0, key);
PropertiesComponent.getInstance().setValue(RECENT_PROPERTIES_KEY, StringUtil.join(recent, "="), "");
}
public boolean show() {
DialogWrapper dialog = new DialogWrapper(true) {
{
setTitle("Registry");
setModal(true);
init();
invalidateActions();
}
private AbstractAction myCloseAction;
@Nullable
@Override
protected JComponent createNorthPanel() {
if (!ApplicationManager.getApplication().isInternal()) {
JLabel warningLabel = new JLabel(XmlStringUtil.wrapInHtml("<b>Changing these values may cause unwanted behavior of " +
ApplicationNamesInfo.getInstance().getFullProductName() + ". Please do not change these unless you have been asked.</b>"));
warningLabel.setIcon(UIUtil.getWarningIcon());
warningLabel.setForeground(JBColor.RED);
return warningLabel;
}
return null;
}
@Override
protected JComponent createCenterPanel() {
return myContent;
}
@Override
protected void dispose() {
super.dispose();
Disposer.dispose(RegistryUi.this);
}
@Override
protected String getDimensionServiceKey() {
return "Registry";
}
@Override
public JComponent getPreferredFocusedComponent() {
return myTable;
}
@NotNull
@Override
protected Action[] createActions() {
return new Action[]{myRestoreDefaultsAction, myCloseAction};
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myCloseAction = new AbstractAction("Close") {
@Override
public void actionPerformed(@NotNull ActionEvent e) {
processClose();
doOKAction();
}
};
myCloseAction.putValue(DialogWrapper.DEFAULT_ACTION, true);
}
@Override
public void doCancelAction() {
final TableCellEditor cellEditor = myTable.getCellEditor();
if (cellEditor != null) {
cellEditor.stopCellEditing();
}
processClose();
super.doCancelAction();
}
};
return dialog.showAndGet();
}
private void processClose() {
if (Registry.getInstance().isRestartNeeded()) {
ApplicationEx app = (ApplicationEx) ApplicationManager.getApplication();
String message = "You need to restart " + ApplicationNamesInfo.getInstance().getFullProductName() + " for the changes to take effect";
String action = app.isRestartCapable() ? "Restart" : "Shutdown";
int r = Messages.showOkCancelDialog(myContent, message, "Restart Required", action + " Now", action + " Later", Messages.getQuestionIcon());
if (r == Messages.OK) {
ApplicationManager.getApplication().invokeLater(() -> app.restart(true), ModalityState.NON_MODAL);
}
}
}
private void restoreDefaults() {
String message = "Are you sure you want to revert registry settings to default values?";
int r = Messages.showYesNoDialog(myContent, message, "Revert To Defaults", Messages.getQuestionIcon());
if (r == Messages.YES) {
Registry.getInstance().restoreDefaults();
myModel.fireChanged();
invalidateActions();
}
}
private void invalidateActions() {
myRestoreDefaultsAction.setEnabled(!Registry.getInstance().isInDefaultState());
}
@Override
public void dispose() { }
private static class MyRenderer implements TableCellRenderer {
private final JLabel myLabel = new JLabel();
@NotNull
@Override
public Component getTableCellRendererComponent(@NotNull JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
final RegistryValue v = ((MyTableModel)table.getModel()).getRegistryValue(row);
myLabel.setIcon(null);
myLabel.setText(null);
myLabel.setHorizontalAlignment(SwingConstants.LEFT);
Color fg = isSelected ? table.getSelectionForeground() : v.isChangedFromDefault() ? JBColor.blue : table.getForeground();
Color bg = isSelected ? table.getSelectionBackground() : table.getBackground();
if (v != null) {
switch (column) {
case 0:
myLabel.setIcon(v.isRestartRequired() ? RESTART_ICON : null);
myLabel.setHorizontalAlignment(SwingConstants.CENTER);
break;
case 1:
myLabel.setText(v.getKey());
break;
case 2:
if (v.asColor(null) != null) {
myLabel.setIcon(createColoredIcon(v.asColor(null)));
} else if (v.isBoolean()) {
final JCheckBox box = new JCheckBox();
box.setSelected(v.asBoolean());
box.setBackground(bg);
return box;
} else {
myLabel.setText(v.asString());
}
}
myLabel.setOpaque(true);
myLabel.setFont(myLabel.getFont().deriveFont(v.isChangedFromDefault() ? Font.BOLD : Font.PLAIN));
myLabel.setForeground(fg);
myLabel.setBackground(bg);
}
return myLabel;
}
}
private static final Map<Color, Icon> icons_cache = new HashMap<>();
private static Icon createColoredIcon(Color color) {
Icon icon = icons_cache.get(color);
if (icon != null) return icon;
final BufferedImage image = GraphicsEnvironment.getLocalGraphicsEnvironment()
.getDefaultScreenDevice().getDefaultConfiguration()
.createCompatibleImage(16, 16, Transparency.TRANSLUCENT);
final Graphics g = image.getGraphics();
g.setColor(color);
g.fillRect(0, 0, 16, 16);
g.dispose();
icon = new ImageIcon(image);
icons_cache.put(color, icon);
return icon;
}
private class MyEditor extends AbstractCellEditor implements TableCellEditor {
private final JTextField myField = new JTextField();
private final JCheckBox myCheckBox = new JCheckBox();
private RegistryValue myValue;
@Override
@Nullable
public Component getTableCellEditorComponent(JTable table, Object value, boolean isSelected, int row, int column) {
myValue = ((MyTableModel)table.getModel()).getRegistryValue(row);
if (myValue.asColor(null) != null) {
final Color color = ColorChooser.chooseColor(table, "Choose color", myValue.asColor(Color.WHITE));
if (color != null) {
myValue.setValue(color.getRed() + "," + color.getGreen() + "," + color.getBlue());
keyChanged(myValue.getKey());
}
return null;
} else if (myValue.isBoolean()) {
myCheckBox.setSelected(myValue.asBoolean());
myCheckBox.setBackground(table.getBackground());
return myCheckBox;
} else {
myField.setText(myValue.asString());
myField.setBorder(null);
myField.selectAll();
return myField;
}
}
@Override
public boolean stopCellEditing() {
if (myValue != null) {
if (myValue.isBoolean()) {
myValue.setValue(myCheckBox.isSelected());
} else {
myValue.setValue(myField.getText().trim());
}
keyChanged(myValue.getKey());
}
invalidateActions();
return super.stopCellEditing();
}
@Override
public Object getCellEditorValue() {
return myValue;
}
}
private class RestoreDefaultsAction extends AbstractAction {
public RestoreDefaultsAction() {
super("Restore Defaults");
}
@Override
public void actionPerformed(@NotNull ActionEvent e) {
restoreDefaults();
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Thief;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2006-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Thief_MarkTrapped extends ThiefSkill
{
@Override
public String ID()
{
return "Thief_MarkTrapped";
}
private final static String localizedName = CMLib.lang().L("Mark Trapped");
@Override
public String name()
{
return localizedName;
}
@Override
protected int canAffectCode()
{
return Ability.CAN_ITEMS|Ability.CAN_EXITS|Ability.CAN_ROOMS;
}
@Override
protected int canTargetCode()
{
return Ability.CAN_ITEMS|Ability.CAN_EXITS|Ability.CAN_ROOMS;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
private static final String[] triggerStrings =I(new String[] {"MARKTRAPPED"});
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
@Override
public int classificationCode()
{
return Ability.ACODE_THIEF_SKILL|Ability.DOMAIN_DETRAP;
}
@Override
public int usageType()
{
return USAGE_MOVEMENT|USAGE_MANA;
}
public int code=0;
public LinkedList<Physical> lastMarked = new LinkedList<Physical>();
@Override
public int abilityCode()
{
return code;
}
@Override
public void setAbilityCode(final int newCode)
{
code=newCode;
}
@Override
protected boolean ignoreCompounding()
{
return true;
}
@Override
public void affectPhyStats(final Physical host, final PhyStats stats)
{
super.affectPhyStats(host,stats);
stats.addAmbiance("^Wtrapped^?");
}
public void marked(final Physical P)
{
synchronized(lastMarked)
{
if(lastMarked.size()>=5)
{
final Physical P2=lastMarked.removeFirst();
final Ability A=P2.fetchEffect(ID());
if((A!=null)&&(A.invoker()==invoker()))
{
A.unInvoke();
P2.delEffect(A);
P2.recoverPhyStats();
}
}
lastMarked.add(P);
}
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
if((commands.size()<1)&&(givenTarget==null))
{
mob.tell(L("What item would you like to mark as trapped?"));
return false;
}
final int dir=CMLib.directions().getGoodDirectionCode(CMParms.combine(commands,0));
Physical item=givenTarget;
if((dir>=0)
&&(item==null)
&&(mob.location()!=null)
&&(mob.location().getExitInDir(dir)!=null)
&&(mob.location().getRoomInDir(dir)!=null))
item=mob.location().getExitInDir(dir);
if((item==null)
&&(CMParms.combine(commands,0).equalsIgnoreCase("room")
||CMParms.combine(commands,0).equalsIgnoreCase("here")))
item=mob.location();
if(item==null)
item=getAnyTarget(mob,commands,givenTarget,Wearable.FILTER_UNWORNONLY,false,true);
if(item==null)
return false;
if((!auto)&&(item instanceof MOB))
{
mob.tell(L("Umm.. you can't mark @x1 as trapped.",item.name()));
return false;
}
if(item instanceof Item)
{
if((!auto)
&&(item.phyStats().weight()>((adjustedLevel(mob,asLevel)*2)+(getXLEVELLevel(mob)*10))))
{
mob.tell(L("You aren't good enough to effectively mark anything that large."));
return false;
}
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
CMMsg msg;
final Ability A=item.fetchEffect(ID());
if((A!=null)&&((givenTarget==null)||(auto)))
msg=CMClass.getMsg(mob,item,null,CMMsg.MSG_THIEF_ACT,L("<S-NAME> remove(s) the mark on <T-NAME>."),CMMsg.MSG_THIEF_ACT,null,CMMsg.MSG_THIEF_ACT,null);
else
msg=CMClass.getMsg(mob,item,this,CMMsg.MSG_THIEF_ACT,L("<S-NAME> mark(s) <T-NAME> as trapped."),CMMsg.MSG_THIEF_ACT,null,CMMsg.MSG_THIEF_ACT,null);
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
if(A!=null)
{
if((givenTarget==null)||(auto))
{
A.unInvoke();
item.delEffect(A);
}
}
else
{
marked(item);
this.beneficialAffect(mob, item, asLevel, 900); // approx an hour
}
item.recoverPhyStats();
}
}
else
beneficialVisualFizzle(mob,item,L("<S-NAME> attempt(s) to mark <T-NAME> as trapped, but fail(s)."));
return success;
}
}
| |
/******************************************************************
*
* CyberUPnP for Java
*
* Copyright (C) Satoshi Konno 2002-2003
*
* File: UPnP.java
*
* Revision;
*
* 11/18/02
* - first revision.
* 05/13/03
* - Added support for IPv6 and loopback address.
* 12/26/03
* - Added support for XML Parser
* 06/18/03
* - Added INMPR03 and INMPR03_VERSION.
* 04/14/06
* - Added some functios about time-to-live, and the default value is 4.
* 05/11/09
* - Changed loadDefaultXMLParser() to load org.cybergarage.xml.parser.XmlPullParser at first.
*
******************************************************************/
package org.cybergarage.upnp;
import org.cybergarage.net.HostInterface;
import org.cybergarage.soap.SOAP;
import org.cybergarage.upnp.ssdp.SSDP;
import org.cybergarage.util.Debug;
import org.cybergarage.xml.Parser;
public class UPnP
{
////////////////////////////////////////////////
// Constants
////////////////////////////////////////////////
/**
* Name of the system properties used to identifies the default XML Parser.<br>
* The value of the properties MUST BE the fully qualified class name of<br>
* XML Parser which CyberLink should use.
*/
public final static String XML_CLASS_PROPERTTY="cyberlink.upnp.xml.parser";
public final static String NAME = "CyberLinkJava";
public final static String VERSION = "3.0";
public final static int SERVER_RETRY_COUNT = 100;
public final static int DEFAULT_EXPIRED_DEVICE_EXTRA_TIME = 60;
public final static String getServerName()
{
String osName = System.getProperty("os.name");
String osVer = System.getProperty("os.version");
return osName + "/" + osVer + " UPnP/1.0 " + NAME + "/" + VERSION;
}
public final static String INMPR03 = "INMPR03";
public final static String INMPR03_VERSION = "1.0";
public final static int INMPR03_DISCOVERY_OVER_WIRELESS_COUNT = 4;
public final static String XML_DECLARATION = "<?xml version=\"1.0\" encoding=\"utf-8\"?>";
public final static int CONFIGID_UPNP_ORG_MAX = 16777215;
////////////////////////////////////////////////
// Enable / Disable
////////////////////////////////////////////////
public final static int USE_ONLY_IPV6_ADDR = 1;
public final static int USE_LOOPBACK_ADDR = 2;
public final static int USE_IPV6_LINK_LOCAL_SCOPE = 3;
public final static int USE_IPV6_SUBNET_SCOPE = 4;
public final static int USE_IPV6_ADMINISTRATIVE_SCOPE = 5;
public final static int USE_IPV6_SITE_LOCAL_SCOPE = 6;
public final static int USE_IPV6_GLOBAL_SCOPE = 7;
public final static int USE_SSDP_SEARCHRESPONSE_MULTIPLE_INTERFACES = 8;
public final static int USE_ONLY_IPV4_ADDR = 9;
public final static void setEnable(int value)
{
switch (value) {
case USE_ONLY_IPV6_ADDR:
{
HostInterface.USE_ONLY_IPV6_ADDR = true;
}
break;
case USE_ONLY_IPV4_ADDR:
{
HostInterface.USE_ONLY_IPV4_ADDR = true;
}
break;
case USE_LOOPBACK_ADDR:
{
HostInterface.USE_LOOPBACK_ADDR = true;
}
break;
case USE_IPV6_LINK_LOCAL_SCOPE:
{
SSDP.setIPv6Address(SSDP.IPV6_LINK_LOCAL_ADDRESS);
}
break;
case USE_IPV6_SUBNET_SCOPE:
{
SSDP.setIPv6Address(SSDP.IPV6_SUBNET_ADDRESS);
}
break;
case USE_IPV6_ADMINISTRATIVE_SCOPE:
{
SSDP.setIPv6Address(SSDP.IPV6_ADMINISTRATIVE_ADDRESS);
}
break;
case USE_IPV6_SITE_LOCAL_SCOPE:
{
SSDP.setIPv6Address(SSDP.IPV6_SITE_LOCAL_ADDRESS);
}
break;
case USE_IPV6_GLOBAL_SCOPE:
{
SSDP.setIPv6Address(SSDP.IPV6_GLOBAL_ADDRESS);
}
break;
}
}
public final static void setDisable(int value)
{
switch (value) {
case USE_ONLY_IPV6_ADDR:
{
HostInterface.USE_ONLY_IPV6_ADDR = false;
}
break;
case USE_ONLY_IPV4_ADDR:
{
HostInterface.USE_ONLY_IPV4_ADDR = false;
}
break;
case USE_LOOPBACK_ADDR:
{
HostInterface.USE_LOOPBACK_ADDR = false;
}
break;
}
}
public final static boolean isEnabled(int value)
{
switch (value) {
case USE_ONLY_IPV6_ADDR:
{
return HostInterface.USE_ONLY_IPV6_ADDR;
}
case USE_ONLY_IPV4_ADDR:
{
return HostInterface.USE_ONLY_IPV4_ADDR;
}
case USE_LOOPBACK_ADDR:
{
return HostInterface.USE_LOOPBACK_ADDR;
}
}
return false;
}
////////////////////////////////////////////////
// UUID
////////////////////////////////////////////////
private static final String toUUID(int seed)
{
String id = Integer.toString((int)(seed & 0xFFFF), 16);
int idLen = id.length();
String uuid = "";
for (int n=0; n<(4-idLen); n++)
uuid += "0";
uuid += id;
return uuid;
}
public static final String createUUID()
{
long time1 = System.currentTimeMillis();
long time2 = (long)((double)System.currentTimeMillis() * Math.random());
return
toUUID((int)(time1 & 0xFFFF)) + "-" +
toUUID((int)((time1 >> 32) | 0xA000) & 0xFFFF) + "-" +
toUUID((int)(time2 & 0xFFFF)) + "-" +
toUUID((int)((time2 >> 32) | 0xE000) & 0xFFFF);
}
////////////////////////////////////////////////
// BootId
////////////////////////////////////////////////
public static final int createBootId()
{
return (int)(System.currentTimeMillis() / 1000L);
}
////////////////////////////////////////////////
// ConfigId
////////////////////////////////////////////////
public static final int caluculateConfigId(String configXml)
{
if (configXml == null)
return 0;
int configId = 0;
int configLen = configXml.length();
for (int n=0; n<configLen; n++) {
configId += configXml.codePointAt(n);
if (configId < CONFIGID_UPNP_ORG_MAX)
continue;
configId = configId % CONFIGID_UPNP_ORG_MAX;
}
return configId;
}
////////////////////////////////////////////////
// XML Parser
////////////////////////////////////////////////
private static Parser xmlParser;
public final static void setXMLParser(Parser parser)
{
xmlParser = parser;
SOAP.setXMLParser(parser);
}
public final static Parser getXMLParser()
{
if(xmlParser == null){
xmlParser = loadDefaultXMLParser();
if(xmlParser == null)
throw new RuntimeException("No XML parser defined. And unable to laod any. \n" +
"Try to invoke UPnP.setXMLParser before UPnP.getXMLParser");
SOAP.setXMLParser(xmlParser);
}
return xmlParser;
}
/**
* This method loads the default XML Parser using the following behavior:
* - First if present loads the parsers specified by the system property {@link UPnP#XML_CLASS_PROPERTTY}<br>
* - Second by a fall-back technique, it tries to load the XMLParser from one<br>
* of the following classes: {@link JaxpParser}, {@link kXML2Parser}, {@link XercesParser}
*
* @return {@link Parser} which has been loaded successuflly or null otherwise
*
* @since 1.8.0
*/
private static Parser loadDefaultXMLParser() {
Parser parser = null;
String[] parserClass = new String[]{
System.getProperty(XML_CLASS_PROPERTTY),
"org.cybergarage.xml.parser.XmlPullParser",
"org.cybergarage.xml.parser.JaxpParser",
"org.cybergarage.xml.parser.kXML2Parser",
"org.cybergarage.xml.parser.XercesParser"
};
for (int i = 0; i < parserClass.length; i++) {
if(parserClass[i]==null)
continue;
try {
parser = (Parser) Class.forName(parserClass[i]).newInstance();
return parser;
} catch (Throwable e) {
Debug.warning("Unable to load "+parserClass[i]+" as XMLParser due to "+e);
}
}
return null;
}
////////////////////////////////////////////////
// TTL
////////////////////////////////////////////////
public final static int DEFAULT_TTL = 4;
private static int timeToLive = DEFAULT_TTL;
public final static void setTimeToLive(int value)
{
timeToLive = value;
}
public final static int getTimeToLive()
{
return timeToLive;
}
////////////////////////////////////////////////
// Initialize
////////////////////////////////////////////////
static
{
////////////////////////////
// Interface Option
////////////////////////////
//setXMLParser(new kXML2Parser());
////////////////////////////
// TimeToLive
////////////////////////////
setTimeToLive(DEFAULT_TTL);
////////////////////////////
// Debug Option
////////////////////////////
//Debug.on();
}
public final static void initialize()
{
// Dummy function to call UPnP.static
}
}
| |
// Copyright (C) 2012 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.httpd.rpc.project;
import com.google.gerrit.common.ChangeHooks;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.common.data.AccessSection;
import com.google.gerrit.common.data.GlobalCapability;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.extensions.api.changes.AddReviewerInput;
import com.google.gerrit.extensions.restapi.ResourceNotFoundException;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetAncestor;
import com.google.gerrit.reviewdb.client.PatchSetInfo;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.client.RevId;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.GroupBackend;
import com.google.gerrit.server.change.ChangeResource;
import com.google.gerrit.server.change.ChangesCollection;
import com.google.gerrit.server.change.PostReviewers;
import com.google.gerrit.server.config.AllProjectsNameProvider;
import com.google.gerrit.server.git.MetaDataUpdate;
import com.google.gerrit.server.git.ProjectConfig;
import com.google.gerrit.server.group.SystemGroupBackend;
import com.google.gerrit.server.index.ChangeIndexer;
import com.google.gerrit.server.mail.CreateChangeSender;
import com.google.gerrit.server.patch.PatchSetInfoFactory;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectControl;
import com.google.gerrit.server.project.SetParent;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.assistedinject.Assisted;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.revwalk.RevCommit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class ReviewProjectAccess extends ProjectAccessHandler<Change.Id> {
private static final Logger log =
LoggerFactory.getLogger(ReviewProjectAccess.class);
interface Factory {
ReviewProjectAccess create(
@Assisted("projectName") Project.NameKey projectName,
@Nullable @Assisted ObjectId base,
@Assisted List<AccessSection> sectionList,
@Nullable @Assisted("parentProjectName") Project.NameKey parentProjectName,
@Nullable @Assisted String message);
}
private final ReviewDb db;
private final IdentifiedUser user;
private final PatchSetInfoFactory patchSetInfoFactory;
private final Provider<PostReviewers> reviewersProvider;
private final ChangeIndexer indexer;
private final ChangeHooks hooks;
private final CreateChangeSender.Factory createChangeSenderFactory;
private final ProjectCache projectCache;
private final ChangesCollection changes;
@Inject
ReviewProjectAccess(final ProjectControl.Factory projectControlFactory,
GroupBackend groupBackend,
MetaDataUpdate.User metaDataUpdateFactory, ReviewDb db,
IdentifiedUser user, PatchSetInfoFactory patchSetInfoFactory,
Provider<PostReviewers> reviewersProvider,
ChangeIndexer indexer,
ChangeHooks hooks,
CreateChangeSender.Factory createChangeSenderFactory,
ProjectCache projectCache,
AllProjectsNameProvider allProjects,
ChangesCollection changes,
Provider<SetParent> setParent,
@Assisted("projectName") Project.NameKey projectName,
@Nullable @Assisted ObjectId base,
@Assisted List<AccessSection> sectionList,
@Nullable @Assisted("parentProjectName") Project.NameKey parentProjectName,
@Nullable @Assisted String message) {
super(projectControlFactory, groupBackend, metaDataUpdateFactory,
allProjects, setParent, projectName, base, sectionList,
parentProjectName, message, false);
this.db = db;
this.user = user;
this.patchSetInfoFactory = patchSetInfoFactory;
this.reviewersProvider = reviewersProvider;
this.indexer = indexer;
this.hooks = hooks;
this.createChangeSenderFactory = createChangeSenderFactory;
this.projectCache = projectCache;
this.changes = changes;
}
@Override
protected Change.Id updateProjectConfig(ProjectConfig config,
MetaDataUpdate md, boolean parentProjectUpdate) throws IOException,
OrmException {
Change.Id changeId = new Change.Id(db.nextChangeId());
PatchSet ps =
new PatchSet(new PatchSet.Id(changeId, Change.INITIAL_PATCH_SET_ID));
RevCommit commit = config.commitToNewRef(md, ps.getRefName());
if (commit.getId().equals(base)) {
return null;
}
Change change = new Change(
new Change.Key("I" + commit.name()),
changeId,
user.getAccountId(),
new Branch.NameKey(
config.getProject().getNameKey(),
RefNames.REFS_CONFIG),
TimeUtil.nowTs());
ps.setCreatedOn(change.getCreatedOn());
ps.setUploader(change.getOwner());
ps.setRevision(new RevId(commit.name()));
PatchSetInfo info = patchSetInfoFactory.get(commit, ps.getId());
change.setCurrentPatchSet(info);
ChangeUtil.updated(change);
db.changes().beginTransaction(changeId);
try {
insertAncestors(ps.getId(), commit);
db.patchSets().insert(Collections.singleton(ps));
db.changes().insert(Collections.singleton(change));
db.commit();
} finally {
db.rollback();
}
indexer.index(db, change);
hooks.doPatchsetCreatedHook(change, ps, db);
try {
CreateChangeSender cm =
createChangeSenderFactory.create(change);
cm.setFrom(change.getOwner());
cm.setPatchSet(ps, info);
cm.send();
} catch (Exception err) {
log.error("Cannot send email for new change " + change.getId(), err);
}
ChangeResource rsrc;
try {
rsrc = changes.parse(changeId);
} catch (ResourceNotFoundException e) {
throw new IOException(e);
}
addProjectOwnersAsReviewers(rsrc);
if (parentProjectUpdate) {
addAdministratorsAsReviewers(rsrc);
}
return changeId;
}
private void insertAncestors(PatchSet.Id id, RevCommit src)
throws OrmException {
final int cnt = src.getParentCount();
List<PatchSetAncestor> toInsert = new ArrayList<>(cnt);
for (int p = 0; p < cnt; p++) {
PatchSetAncestor a;
a = new PatchSetAncestor(new PatchSetAncestor.Id(id, p + 1));
a.setAncestorRevision(new RevId(src.getParent(p).name()));
toInsert.add(a);
}
db.patchSetAncestors().insert(toInsert);
}
private void addProjectOwnersAsReviewers(ChangeResource rsrc) {
final String projectOwners =
groupBackend.get(SystemGroupBackend.PROJECT_OWNERS).getName();
try {
AddReviewerInput input = new AddReviewerInput();
input.reviewer = projectOwners;
reviewersProvider.get().apply(rsrc, input);
} catch (Exception e) {
// one of the owner groups is not visible to the user and this it why it
// can't be added as reviewer
}
}
private void addAdministratorsAsReviewers(ChangeResource rsrc) {
List<PermissionRule> adminRules =
projectCache.getAllProjects().getConfig()
.getAccessSection(AccessSection.GLOBAL_CAPABILITIES)
.getPermission(GlobalCapability.ADMINISTRATE_SERVER).getRules();
for (PermissionRule r : adminRules) {
try {
AddReviewerInput input = new AddReviewerInput();
input.reviewer = r.getGroup().getUUID().get();
reviewersProvider.get().apply(rsrc, input);
} catch (Exception e) {
// ignore
}
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.datetimepicker.date;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat;
import android.text.format.DateFormat;
import android.text.format.DateUtils;
import android.text.format.Time;
import android.util.SparseArray;
import android.view.MotionEvent;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import de.bennir.DVBViewerController.R;
import com.android.datetimepicker.Utils;
import com.android.datetimepicker.date.SimpleMonthAdapter.CalendarDay;
import com.googlecode.eyesfree.utils.TouchExplorationHelper;
import java.security.InvalidParameterException;
import java.util.Calendar;
import java.util.Formatter;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
/**
* A calendar-like view displaying a specified month and the appropriate selectable day numbers
* within the specified month.
*/
public class SimpleMonthView extends View {
private static final String TAG = "SimpleMonthView";
/**
* These params can be passed into the view to control how it appears.
* {@link #VIEW_PARAMS_WEEK} is the only required field, though the default
* values are unlikely to fit most layouts correctly.
*/
/**
* This sets the height of this week in pixels
*/
public static final String VIEW_PARAMS_HEIGHT = "height";
/**
* This specifies the position (or weeks since the epoch) of this week,
* calculated using {@link Utils#getWeeksSinceEpochFromJulianDay}
*/
public static final String VIEW_PARAMS_MONTH = "month";
/**
* This specifies the position (or weeks since the epoch) of this week,
* calculated using {@link Utils#getWeeksSinceEpochFromJulianDay}
*/
public static final String VIEW_PARAMS_YEAR = "year";
/**
* This sets one of the days in this view as selected {@link Time#SUNDAY}
* through {@link Time#SATURDAY}.
*/
public static final String VIEW_PARAMS_SELECTED_DAY = "selected_day";
/**
* Which day the week should start on. {@link Time#SUNDAY} through
* {@link Time#SATURDAY}.
*/
public static final String VIEW_PARAMS_WEEK_START = "week_start";
/**
* How many days to display at a time. Days will be displayed starting with
* {@link #mWeekStart}.
*/
public static final String VIEW_PARAMS_NUM_DAYS = "num_days";
/**
* Which month is currently in focus, as defined by {@link Time#month}
* [0-11].
*/
public static final String VIEW_PARAMS_FOCUS_MONTH = "focus_month";
/**
* If this month should display week numbers. false if 0, true otherwise.
*/
public static final String VIEW_PARAMS_SHOW_WK_NUM = "show_wk_num";
protected static int DEFAULT_HEIGHT = 32;
protected static int MIN_HEIGHT = 10;
protected static final int DEFAULT_SELECTED_DAY = -1;
protected static final int DEFAULT_WEEK_START = Calendar.SUNDAY;
protected static final int DEFAULT_NUM_DAYS = 7;
protected static final int DEFAULT_SHOW_WK_NUM = 0;
protected static final int DEFAULT_FOCUS_MONTH = -1;
protected static final int DEFAULT_NUM_ROWS = 6;
protected static final int MAX_NUM_ROWS = 6;
private static final int SELECTED_CIRCLE_ALPHA = 60;
protected static int DAY_SEPARATOR_WIDTH = 1;
protected static int MINI_DAY_NUMBER_TEXT_SIZE;
protected static int MONTH_LABEL_TEXT_SIZE;
protected static int MONTH_DAY_LABEL_TEXT_SIZE;
protected static int MONTH_HEADER_SIZE;
protected static int DAY_SELECTED_CIRCLE_SIZE;
// used for scaling to the device density
protected static float mScale = 0;
// affects the padding on the sides of this view
protected int mPadding = 0;
private String mDayOfWeekTypeface;
private String mMonthTitleTypeface;
protected Paint mMonthNumPaint;
protected Paint mMonthTitlePaint;
protected Paint mMonthTitleBGPaint;
protected Paint mSelectedCirclePaint;
protected Paint mMonthDayLabelPaint;
private final Formatter mFormatter;
private final StringBuilder mStringBuilder;
// The Julian day of the first day displayed by this item
protected int mFirstJulianDay = -1;
// The month of the first day in this week
protected int mFirstMonth = -1;
// The month of the last day in this week
protected int mLastMonth = -1;
protected int mMonth;
protected int mYear;
// Quick reference to the width of this view, matches parent
protected int mWidth;
// The height this view should draw at in pixels, set by height param
protected int mRowHeight = DEFAULT_HEIGHT;
// If this view contains the today
protected boolean mHasToday = false;
// Which day is selected [0-6] or -1 if no day is selected
protected int mSelectedDay = -1;
// Which day is today [0-6] or -1 if no day is today
protected int mToday = DEFAULT_SELECTED_DAY;
// Which day of the week to start on [0-6]
protected int mWeekStart = DEFAULT_WEEK_START;
// How many days to display
protected int mNumDays = DEFAULT_NUM_DAYS;
// The number of days + a spot for week number if it is displayed
protected int mNumCells = mNumDays;
// The left edge of the selected day
protected int mSelectedLeft = -1;
// The right edge of the selected day
protected int mSelectedRight = -1;
private final Calendar mCalendar;
private final Calendar mDayLabelCalendar;
private final MonthViewNodeProvider mNodeProvider;
private int mNumRows = DEFAULT_NUM_ROWS;
// Optional listener for handling day click actions
private OnDayClickListener mOnDayClickListener;
// Whether to prevent setting the accessibility delegate
private boolean mLockAccessibilityDelegate;
protected int mDayTextColor;
protected int mTodayNumberColor;
protected int mMonthTitleColor;
protected int mMonthTitleBGColor;
public SimpleMonthView(Context context) {
super(context);
Resources res = context.getResources();
mDayLabelCalendar = Calendar.getInstance();
mCalendar = Calendar.getInstance();
mDayOfWeekTypeface = res.getString(R.string.day_of_week_label_typeface);
mMonthTitleTypeface = res.getString(R.string.sans_serif);
mDayTextColor = res.getColor(R.color.date_picker_text_normal);
mTodayNumberColor = res.getColor(R.color.blue);
mMonthTitleColor = res.getColor(R.color.white);
mMonthTitleBGColor = res.getColor(R.color.circle_background);
mStringBuilder = new StringBuilder(50);
mFormatter = new Formatter(mStringBuilder, Locale.getDefault());
MINI_DAY_NUMBER_TEXT_SIZE = res.getDimensionPixelSize(R.dimen.day_number_size);
MONTH_LABEL_TEXT_SIZE = res.getDimensionPixelSize(R.dimen.month_label_size);
MONTH_DAY_LABEL_TEXT_SIZE = res.getDimensionPixelSize(R.dimen.month_day_label_text_size);
MONTH_HEADER_SIZE = res.getDimensionPixelOffset(R.dimen.month_list_item_header_height);
DAY_SELECTED_CIRCLE_SIZE = res
.getDimensionPixelSize(R.dimen.day_number_select_circle_radius);
mRowHeight = (res.getDimensionPixelOffset(R.dimen.date_picker_view_animator_height)
- MONTH_HEADER_SIZE) / MAX_NUM_ROWS;
// Set up accessibility components.
mNodeProvider = new MonthViewNodeProvider(context, this);
ViewCompat.setAccessibilityDelegate(this, mNodeProvider.getAccessibilityDelegate());
ViewCompat.setImportantForAccessibility(this, ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_YES);
mLockAccessibilityDelegate = true;
// Sets up any standard paints that will be used
initView();
}
@Override
public void setAccessibilityDelegate(AccessibilityDelegate delegate) {
// Workaround for a JB MR1 issue where accessibility delegates on
// top-level ListView items are overwritten.
if (!mLockAccessibilityDelegate) {
super.setAccessibilityDelegate(delegate);
}
}
public void setOnDayClickListener(OnDayClickListener listener) {
mOnDayClickListener = listener;
}
@Override
public boolean onHoverEvent(MotionEvent event) {
// First right-of-refusal goes the touch exploration helper.
if (mNodeProvider.onHover(this, event)) {
return true;
}
return super.onHoverEvent(event);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
final CalendarDay day = getDayFromLocation(event.getX(), event.getY());
if (day != null) {
onDayClick(day);
}
break;
}
return true;
}
/**
* Sets up the text and style properties for painting. Override this if you
* want to use a different paint.
*/
protected void initView() {
mMonthTitlePaint = new Paint();
mMonthTitlePaint.setFakeBoldText(true);
mMonthTitlePaint.setAntiAlias(true);
mMonthTitlePaint.setTextSize(MONTH_LABEL_TEXT_SIZE);
mMonthTitlePaint.setTypeface(Typeface.create(mMonthTitleTypeface, Typeface.BOLD));
mMonthTitlePaint.setColor(mDayTextColor);
mMonthTitlePaint.setTextAlign(Align.CENTER);
mMonthTitlePaint.setStyle(Style.FILL);
mMonthTitleBGPaint = new Paint();
mMonthTitleBGPaint.setFakeBoldText(true);
mMonthTitleBGPaint.setAntiAlias(true);
mMonthTitleBGPaint.setColor(mMonthTitleBGColor);
mMonthTitleBGPaint.setTextAlign(Align.CENTER);
mMonthTitleBGPaint.setStyle(Style.FILL);
mSelectedCirclePaint = new Paint();
mSelectedCirclePaint.setFakeBoldText(true);
mSelectedCirclePaint.setAntiAlias(true);
mSelectedCirclePaint.setColor(mTodayNumberColor);
mSelectedCirclePaint.setTextAlign(Align.CENTER);
mSelectedCirclePaint.setStyle(Style.FILL);
mSelectedCirclePaint.setAlpha(SELECTED_CIRCLE_ALPHA);
mMonthDayLabelPaint = new Paint();
mMonthDayLabelPaint.setAntiAlias(true);
mMonthDayLabelPaint.setTextSize(MONTH_DAY_LABEL_TEXT_SIZE);
mMonthDayLabelPaint.setColor(mDayTextColor);
mMonthDayLabelPaint.setTypeface(Typeface.create(mDayOfWeekTypeface, Typeface.NORMAL));
mMonthDayLabelPaint.setStyle(Style.FILL);
mMonthDayLabelPaint.setTextAlign(Align.CENTER);
mMonthDayLabelPaint.setFakeBoldText(true);
mMonthNumPaint = new Paint();
mMonthNumPaint.setAntiAlias(true);
mMonthNumPaint.setTextSize(MINI_DAY_NUMBER_TEXT_SIZE);
mMonthNumPaint.setStyle(Style.FILL);
mMonthNumPaint.setTextAlign(Align.CENTER);
mMonthNumPaint.setFakeBoldText(false);
}
@Override
protected void onDraw(Canvas canvas) {
drawMonthTitle(canvas);
drawMonthDayLabels(canvas);
drawMonthNums(canvas);
}
private int mDayOfWeekStart = 0;
/**
* Sets all the parameters for displaying this week. The only required
* parameter is the week number. Other parameters have a default value and
* will only update if a new value is included, except for focus month,
* which will always default to no focus month if no value is passed in. See
* {@link #VIEW_PARAMS_HEIGHT} for more info on parameters.
*
* @param params A map of the new parameters, see
* {@link #VIEW_PARAMS_HEIGHT}
* @param tz The time zone this view should reference times in
*/
public void setMonthParams(HashMap<String, Integer> params) {
if (!params.containsKey(VIEW_PARAMS_MONTH) && !params.containsKey(VIEW_PARAMS_YEAR)) {
throw new InvalidParameterException("You must specify the month and year for this view");
}
setTag(params);
// We keep the current value for any params not present
if (params.containsKey(VIEW_PARAMS_HEIGHT)) {
mRowHeight = params.get(VIEW_PARAMS_HEIGHT);
if (mRowHeight < MIN_HEIGHT) {
mRowHeight = MIN_HEIGHT;
}
}
if (params.containsKey(VIEW_PARAMS_SELECTED_DAY)) {
mSelectedDay = params.get(VIEW_PARAMS_SELECTED_DAY);
}
// Allocate space for caching the day numbers and focus values
mMonth = params.get(VIEW_PARAMS_MONTH);
mYear = params.get(VIEW_PARAMS_YEAR);
// Figure out what day today is
final Time today = new Time(Time.getCurrentTimezone());
today.setToNow();
mHasToday = false;
mToday = -1;
mCalendar.set(Calendar.MONTH, mMonth);
mCalendar.set(Calendar.YEAR, mYear);
mCalendar.set(Calendar.DAY_OF_MONTH, 1);
mDayOfWeekStart = mCalendar.get(Calendar.DAY_OF_WEEK);
if (params.containsKey(VIEW_PARAMS_WEEK_START)) {
mWeekStart = params.get(VIEW_PARAMS_WEEK_START);
} else {
mWeekStart = mCalendar.getFirstDayOfWeek();
}
mNumCells = Utils.getDaysInMonth(mMonth, mYear);
for (int i = 0; i < mNumCells; i++) {
final int day = i + 1;
if (sameDay(day, today)) {
mHasToday = true;
mToday = day;
}
}
mNumRows = calculateNumRows();
// Invalidate cached accessibility information.
mNodeProvider.invalidateParent();
}
public void reuse() {
mNumRows = DEFAULT_NUM_ROWS;
requestLayout();
}
private int calculateNumRows() {
int offset = findDayOffset();
int dividend = (offset + mNumCells) / mNumDays;
int remainder = (offset + mNumCells) % mNumDays;
return (dividend + (remainder > 0 ? 1 : 0));
}
private boolean sameDay(int day, Time today) {
return mYear == today.year &&
mMonth == today.month &&
day == today.monthDay;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), mRowHeight * mNumRows
+ MONTH_HEADER_SIZE);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
mWidth = w;
// Invalidate cached accessibility information.
mNodeProvider.invalidateParent();
}
private String getMonthAndYearString() {
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_YEAR
| DateUtils.FORMAT_NO_MONTH_DAY;
mStringBuilder.setLength(0);
long millis = mCalendar.getTimeInMillis();
return DateUtils.formatDateRange(getContext(), mFormatter, millis, millis, flags,
Time.getCurrentTimezone()).toString();
}
private void drawMonthTitle(Canvas canvas) {
int x = (mWidth + 2 * mPadding) / 2;
int y = (MONTH_HEADER_SIZE - MONTH_DAY_LABEL_TEXT_SIZE) / 2 + (MONTH_LABEL_TEXT_SIZE / 3);
canvas.drawText(getMonthAndYearString(), x, y, mMonthTitlePaint);
}
private void drawMonthDayLabels(Canvas canvas) {
int y = MONTH_HEADER_SIZE - (MONTH_DAY_LABEL_TEXT_SIZE / 2);
int dayWidthHalf = (mWidth - mPadding * 2) / (mNumDays * 2);
for (int i = 0; i < mNumDays; i++) {
int calendarDay = (i + mWeekStart) % mNumDays;
int x = (2 * i + 1) * dayWidthHalf + mPadding;
mDayLabelCalendar.set(Calendar.DAY_OF_WEEK, calendarDay);
canvas.drawText(mDayLabelCalendar.getDisplayName(Calendar.DAY_OF_WEEK, Calendar.SHORT,
Locale.getDefault()).toUpperCase(Locale.getDefault()), x, y,
mMonthDayLabelPaint);
}
}
/**
* Draws the week and month day numbers for this week. Override this method
* if you need different placement.
*
* @param canvas The canvas to draw on
*/
protected void drawMonthNums(Canvas canvas) {
int y = (((mRowHeight + MINI_DAY_NUMBER_TEXT_SIZE) / 2) - DAY_SEPARATOR_WIDTH)
+ MONTH_HEADER_SIZE;
int dayWidthHalf = (mWidth - mPadding * 2) / (mNumDays * 2);
int j = findDayOffset();
for (int dayNumber = 1; dayNumber <= mNumCells; dayNumber++) {
int x = (2 * j + 1) * dayWidthHalf + mPadding;
if (mSelectedDay == dayNumber) {
canvas.drawCircle(x, y - (MINI_DAY_NUMBER_TEXT_SIZE / 3), DAY_SELECTED_CIRCLE_SIZE,
mSelectedCirclePaint);
}
if (mHasToday && mToday == dayNumber) {
mMonthNumPaint.setColor(mTodayNumberColor);
} else {
mMonthNumPaint.setColor(mDayTextColor);
}
canvas.drawText(String.format("%d", dayNumber), x, y, mMonthNumPaint);
j++;
if (j == mNumDays) {
j = 0;
y += mRowHeight;
}
}
}
private int findDayOffset() {
return (mDayOfWeekStart < mWeekStart ? (mDayOfWeekStart + mNumDays) : mDayOfWeekStart)
- mWeekStart;
}
/**
* Calculates the day that the given x position is in, accounting for week
* number. Returns a Time referencing that day or null if
*
* @param x The x position of the touch event
* @return A time object for the tapped day or null if the position wasn't
* in a day
*/
public CalendarDay getDayFromLocation(float x, float y) {
int dayStart = mPadding;
if (x < dayStart || x > mWidth - mPadding) {
return null;
}
// Selection is (x - start) / (pixels/day) == (x -s) * day / pixels
int row = (int) (y - MONTH_HEADER_SIZE) / mRowHeight;
int column = (int) ((x - dayStart) * mNumDays / (mWidth - dayStart - mPadding));
int day = column - findDayOffset() + 1;
day += row * mNumDays;
if (day < 1 || day > mNumCells) {
return null;
}
return new CalendarDay(mYear, mMonth, day);
}
/**
* Called when the user clicks on a day. Handles callbacks to the
* {@link OnDayClickListener} if one is set.
*
* @param day A time object representing the day that was clicked
*/
private void onDayClick(CalendarDay day) {
if (mOnDayClickListener != null) {
mOnDayClickListener.onDayClick(this, day);
}
// This is a no-op if accessibility is turned off.
mNodeProvider.sendEventForItem(day, AccessibilityEvent.TYPE_VIEW_CLICKED);
}
/**
* @return The date that has accessibility focus, or {@code null} if no date
* has focus
*/
public CalendarDay getAccessibilityFocus() {
return mNodeProvider.getFocusedItem();
}
/**
* Clears accessibility focus within the view. No-op if the view does not
* contain accessibility focus.
*/
public void clearAccessibilityFocus() {
mNodeProvider.clearFocusedItem();
}
/**
* Attempts to restore accessibility focus to the specified date.
*
* @param day The date which should receive focus
* @return {@code false} if the date is not valid for this month view, or
* {@code true} if the date received focus
*/
public boolean restoreAccessibilityFocus(CalendarDay day) {
if ((day.year != mYear) || (day.month != mMonth) || (day.day > mNumCells)) {
return false;
}
mNodeProvider.setFocusedItem(day);
return true;
}
/**
* Provides a virtual view hierarchy for interfacing with an accessibility
* service.
*/
private class MonthViewNodeProvider extends TouchExplorationHelper<CalendarDay> {
private final SparseArray<CalendarDay> mCachedItems = new SparseArray<CalendarDay>();
private final Rect mTempRect = new Rect();
Calendar recycle;
public MonthViewNodeProvider(Context context, View parent) {
super(context, parent);
}
@Override
public void invalidateItem(CalendarDay item) {
super.invalidateItem(item);
mCachedItems.delete(getIdForItem(item));
}
@Override
public void invalidateParent() {
super.invalidateParent();
mCachedItems.clear();
}
@Override
protected boolean performActionForItem(CalendarDay item, int action, Bundle arguments) {
switch (action) {
case AccessibilityNodeInfo.ACTION_CLICK:
onDayClick(item);
return true;
}
return false;
}
@Override
protected void populateEventForItem(CalendarDay item, AccessibilityEvent event) {
event.setContentDescription(getItemDescription(item));
}
@Override
protected void populateNodeForItem(CalendarDay item, AccessibilityNodeInfoCompat node) {
getItemBounds(item, mTempRect);
node.setContentDescription(getItemDescription(item));
node.setBoundsInParent(mTempRect);
node.addAction(AccessibilityNodeInfo.ACTION_CLICK);
if (item.day == mSelectedDay) {
node.setSelected(true);
}
}
@Override
protected void getVisibleItems(List<CalendarDay> items) {
// TODO: Optimize, only return items visible within parent bounds.
for (int day = 1; day <= mNumCells; day++) {
items.add(getItemForId(day));
}
}
@Override
protected CalendarDay getItemAt(float x, float y) {
return getDayFromLocation(x, y);
}
@Override
protected int getIdForItem(CalendarDay item) {
return item.day;
}
@Override
protected CalendarDay getItemForId(int id) {
if ((id < 1) || (id > mNumCells)) {
return null;
}
final CalendarDay item;
if (mCachedItems.indexOfKey(id) >= 0) {
item = mCachedItems.get(id);
} else {
item = new CalendarDay(mYear, mMonth, id);
mCachedItems.put(id, item);
}
return item;
}
/**
* Calculates the bounding rectangle of a given time object.
*
* @param item The time object to calculate bounds for
* @param rect The rectangle in which to store the bounds
*/
private void getItemBounds(CalendarDay item, Rect rect) {
final int offsetX = mPadding;
final int offsetY = MONTH_HEADER_SIZE;
final int cellHeight = mRowHeight;
final int cellWidth = ((mWidth - (2 * mPadding)) / mNumDays);
final int index = ((item.day - 1) + findDayOffset());
final int row = (index / mNumDays);
final int column = (index % mNumDays);
final int x = (offsetX + (column * cellWidth));
final int y = (offsetY + (row * cellHeight));
rect.set(x, y, (x + cellWidth), (y + cellHeight));
}
/**
* Generates a description for a given time object. Since this
* description will be spoken, the components are ordered by descending
* specificity as DAY MONTH YEAR.
*
* @param item The time object to generate a description for
* @return A description of the time object
*/
private CharSequence getItemDescription(CalendarDay item) {
if (recycle == null) {
recycle = Calendar.getInstance();
}
recycle.set(item.year, item.month, item.day);
CharSequence date = DateFormat.format("dd MMMM yyyy", recycle.getTimeInMillis());
if (item.day == mSelectedDay) {
return getContext().getString(R.string.item_is_selected, date);
}
return date;
}
}
/**
* Handles callbacks when the user clicks on a time object.
*/
public interface OnDayClickListener {
public void onDayClick(SimpleMonthView view, CalendarDay day);
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.DateTime;
import com.google.schemaorg.core.datatype.Integer;
import com.google.schemaorg.core.datatype.Number;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/**
* Interface of <a
* href="http://schema.org/CreativeWorkSeries}">http://schema.org/CreativeWorkSeries}</a>.
*/
public interface CreativeWorkSeries extends CreativeWork {
/**
* Builder interface of <a
* href="http://schema.org/CreativeWorkSeries}">http://schema.org/CreativeWorkSeries}</a>.
*/
public interface Builder extends CreativeWork.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property about. */
Builder addAbout(Thing value);
/** Add a value to property about. */
Builder addAbout(Thing.Builder value);
/** Add a value to property about. */
Builder addAbout(String value);
/** Add a value to property accessibilityAPI. */
Builder addAccessibilityAPI(Text value);
/** Add a value to property accessibilityAPI. */
Builder addAccessibilityAPI(String value);
/** Add a value to property accessibilityControl. */
Builder addAccessibilityControl(Text value);
/** Add a value to property accessibilityControl. */
Builder addAccessibilityControl(String value);
/** Add a value to property accessibilityFeature. */
Builder addAccessibilityFeature(Text value);
/** Add a value to property accessibilityFeature. */
Builder addAccessibilityFeature(String value);
/** Add a value to property accessibilityHazard. */
Builder addAccessibilityHazard(Text value);
/** Add a value to property accessibilityHazard. */
Builder addAccessibilityHazard(String value);
/** Add a value to property accountablePerson. */
Builder addAccountablePerson(Person value);
/** Add a value to property accountablePerson. */
Builder addAccountablePerson(Person.Builder value);
/** Add a value to property accountablePerson. */
Builder addAccountablePerson(String value);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating.Builder value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property alternativeHeadline. */
Builder addAlternativeHeadline(Text value);
/** Add a value to property alternativeHeadline. */
Builder addAlternativeHeadline(String value);
/** Add a value to property associatedMedia. */
Builder addAssociatedMedia(MediaObject value);
/** Add a value to property associatedMedia. */
Builder addAssociatedMedia(MediaObject.Builder value);
/** Add a value to property associatedMedia. */
Builder addAssociatedMedia(String value);
/** Add a value to property audience. */
Builder addAudience(Audience value);
/** Add a value to property audience. */
Builder addAudience(Audience.Builder value);
/** Add a value to property audience. */
Builder addAudience(String value);
/** Add a value to property audio. */
Builder addAudio(AudioObject value);
/** Add a value to property audio. */
Builder addAudio(AudioObject.Builder value);
/** Add a value to property audio. */
Builder addAudio(String value);
/** Add a value to property author. */
Builder addAuthor(Organization value);
/** Add a value to property author. */
Builder addAuthor(Organization.Builder value);
/** Add a value to property author. */
Builder addAuthor(Person value);
/** Add a value to property author. */
Builder addAuthor(Person.Builder value);
/** Add a value to property author. */
Builder addAuthor(String value);
/** Add a value to property award. */
Builder addAward(Text value);
/** Add a value to property award. */
Builder addAward(String value);
/** Add a value to property awards. */
Builder addAwards(Text value);
/** Add a value to property awards. */
Builder addAwards(String value);
/** Add a value to property character. */
Builder addCharacter(Person value);
/** Add a value to property character. */
Builder addCharacter(Person.Builder value);
/** Add a value to property character. */
Builder addCharacter(String value);
/** Add a value to property citation. */
Builder addCitation(CreativeWork value);
/** Add a value to property citation. */
Builder addCitation(CreativeWork.Builder value);
/** Add a value to property citation. */
Builder addCitation(Text value);
/** Add a value to property citation. */
Builder addCitation(String value);
/** Add a value to property comment. */
Builder addComment(Comment value);
/** Add a value to property comment. */
Builder addComment(Comment.Builder value);
/** Add a value to property comment. */
Builder addComment(String value);
/** Add a value to property commentCount. */
Builder addCommentCount(Integer value);
/** Add a value to property commentCount. */
Builder addCommentCount(String value);
/** Add a value to property contentLocation. */
Builder addContentLocation(Place value);
/** Add a value to property contentLocation. */
Builder addContentLocation(Place.Builder value);
/** Add a value to property contentLocation. */
Builder addContentLocation(String value);
/** Add a value to property contentRating. */
Builder addContentRating(Text value);
/** Add a value to property contentRating. */
Builder addContentRating(String value);
/** Add a value to property contributor. */
Builder addContributor(Organization value);
/** Add a value to property contributor. */
Builder addContributor(Organization.Builder value);
/** Add a value to property contributor. */
Builder addContributor(Person value);
/** Add a value to property contributor. */
Builder addContributor(Person.Builder value);
/** Add a value to property contributor. */
Builder addContributor(String value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Organization value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Organization.Builder value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Person value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Person.Builder value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(String value);
/** Add a value to property copyrightYear. */
Builder addCopyrightYear(Number value);
/** Add a value to property copyrightYear. */
Builder addCopyrightYear(String value);
/** Add a value to property creator. */
Builder addCreator(Organization value);
/** Add a value to property creator. */
Builder addCreator(Organization.Builder value);
/** Add a value to property creator. */
Builder addCreator(Person value);
/** Add a value to property creator. */
Builder addCreator(Person.Builder value);
/** Add a value to property creator. */
Builder addCreator(String value);
/** Add a value to property dateCreated. */
Builder addDateCreated(Date value);
/** Add a value to property dateCreated. */
Builder addDateCreated(DateTime value);
/** Add a value to property dateCreated. */
Builder addDateCreated(String value);
/** Add a value to property dateModified. */
Builder addDateModified(Date value);
/** Add a value to property dateModified. */
Builder addDateModified(DateTime value);
/** Add a value to property dateModified. */
Builder addDateModified(String value);
/** Add a value to property datePublished. */
Builder addDatePublished(Date value);
/** Add a value to property datePublished. */
Builder addDatePublished(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property discussionUrl. */
Builder addDiscussionUrl(URL value);
/** Add a value to property discussionUrl. */
Builder addDiscussionUrl(String value);
/** Add a value to property editor. */
Builder addEditor(Person value);
/** Add a value to property editor. */
Builder addEditor(Person.Builder value);
/** Add a value to property editor. */
Builder addEditor(String value);
/** Add a value to property educationalAlignment. */
Builder addEducationalAlignment(AlignmentObject value);
/** Add a value to property educationalAlignment. */
Builder addEducationalAlignment(AlignmentObject.Builder value);
/** Add a value to property educationalAlignment. */
Builder addEducationalAlignment(String value);
/** Add a value to property educationalUse. */
Builder addEducationalUse(Text value);
/** Add a value to property educationalUse. */
Builder addEducationalUse(String value);
/** Add a value to property encoding. */
Builder addEncoding(MediaObject value);
/** Add a value to property encoding. */
Builder addEncoding(MediaObject.Builder value);
/** Add a value to property encoding. */
Builder addEncoding(String value);
/** Add a value to property encodings. */
Builder addEncodings(MediaObject value);
/** Add a value to property encodings. */
Builder addEncodings(MediaObject.Builder value);
/** Add a value to property encodings. */
Builder addEncodings(String value);
/** Add a value to property endDate. */
Builder addEndDate(Date value);
/** Add a value to property endDate. */
Builder addEndDate(String value);
/** Add a value to property exampleOfWork. */
Builder addExampleOfWork(CreativeWork value);
/** Add a value to property exampleOfWork. */
Builder addExampleOfWork(CreativeWork.Builder value);
/** Add a value to property exampleOfWork. */
Builder addExampleOfWork(String value);
/** Add a value to property fileFormat. */
Builder addFileFormat(Text value);
/** Add a value to property fileFormat. */
Builder addFileFormat(String value);
/** Add a value to property genre. */
Builder addGenre(Text value);
/** Add a value to property genre. */
Builder addGenre(URL value);
/** Add a value to property genre. */
Builder addGenre(String value);
/** Add a value to property hasPart. */
Builder addHasPart(CreativeWork value);
/** Add a value to property hasPart. */
Builder addHasPart(CreativeWork.Builder value);
/** Add a value to property hasPart. */
Builder addHasPart(String value);
/** Add a value to property headline. */
Builder addHeadline(Text value);
/** Add a value to property headline. */
Builder addHeadline(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Language value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Language.Builder value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Text value);
/** Add a value to property inLanguage. */
Builder addInLanguage(String value);
/** Add a value to property interactionStatistic. */
Builder addInteractionStatistic(InteractionCounter value);
/** Add a value to property interactionStatistic. */
Builder addInteractionStatistic(InteractionCounter.Builder value);
/** Add a value to property interactionStatistic. */
Builder addInteractionStatistic(String value);
/** Add a value to property interactivityType. */
Builder addInteractivityType(Text value);
/** Add a value to property interactivityType. */
Builder addInteractivityType(String value);
/** Add a value to property isBasedOnUrl. */
Builder addIsBasedOnUrl(URL value);
/** Add a value to property isBasedOnUrl. */
Builder addIsBasedOnUrl(String value);
/** Add a value to property isFamilyFriendly. */
Builder addIsFamilyFriendly(Boolean value);
/** Add a value to property isFamilyFriendly. */
Builder addIsFamilyFriendly(String value);
/** Add a value to property isPartOf. */
Builder addIsPartOf(CreativeWork value);
/** Add a value to property isPartOf. */
Builder addIsPartOf(CreativeWork.Builder value);
/** Add a value to property isPartOf. */
Builder addIsPartOf(String value);
/** Add a value to property keywords. */
Builder addKeywords(Text value);
/** Add a value to property keywords. */
Builder addKeywords(String value);
/** Add a value to property learningResourceType. */
Builder addLearningResourceType(Text value);
/** Add a value to property learningResourceType. */
Builder addLearningResourceType(String value);
/** Add a value to property license. */
Builder addLicense(CreativeWork value);
/** Add a value to property license. */
Builder addLicense(CreativeWork.Builder value);
/** Add a value to property license. */
Builder addLicense(URL value);
/** Add a value to property license. */
Builder addLicense(String value);
/** Add a value to property locationCreated. */
Builder addLocationCreated(Place value);
/** Add a value to property locationCreated. */
Builder addLocationCreated(Place.Builder value);
/** Add a value to property locationCreated. */
Builder addLocationCreated(String value);
/** Add a value to property mainEntity. */
Builder addMainEntity(Thing value);
/** Add a value to property mainEntity. */
Builder addMainEntity(Thing.Builder value);
/** Add a value to property mainEntity. */
Builder addMainEntity(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property mentions. */
Builder addMentions(Thing value);
/** Add a value to property mentions. */
Builder addMentions(Thing.Builder value);
/** Add a value to property mentions. */
Builder addMentions(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property offers. */
Builder addOffers(Offer value);
/** Add a value to property offers. */
Builder addOffers(Offer.Builder value);
/** Add a value to property offers. */
Builder addOffers(String value);
/** Add a value to property position. */
Builder addPosition(Integer value);
/** Add a value to property position. */
Builder addPosition(Text value);
/** Add a value to property position. */
Builder addPosition(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property producer. */
Builder addProducer(Organization value);
/** Add a value to property producer. */
Builder addProducer(Organization.Builder value);
/** Add a value to property producer. */
Builder addProducer(Person value);
/** Add a value to property producer. */
Builder addProducer(Person.Builder value);
/** Add a value to property producer. */
Builder addProducer(String value);
/** Add a value to property provider. */
Builder addProvider(Organization value);
/** Add a value to property provider. */
Builder addProvider(Organization.Builder value);
/** Add a value to property provider. */
Builder addProvider(Person value);
/** Add a value to property provider. */
Builder addProvider(Person.Builder value);
/** Add a value to property provider. */
Builder addProvider(String value);
/** Add a value to property publication. */
Builder addPublication(PublicationEvent value);
/** Add a value to property publication. */
Builder addPublication(PublicationEvent.Builder value);
/** Add a value to property publication. */
Builder addPublication(String value);
/** Add a value to property publisher. */
Builder addPublisher(Organization value);
/** Add a value to property publisher. */
Builder addPublisher(Organization.Builder value);
/** Add a value to property publisher. */
Builder addPublisher(Person value);
/** Add a value to property publisher. */
Builder addPublisher(Person.Builder value);
/** Add a value to property publisher. */
Builder addPublisher(String value);
/** Add a value to property publishingPrinciples. */
Builder addPublishingPrinciples(URL value);
/** Add a value to property publishingPrinciples. */
Builder addPublishingPrinciples(String value);
/** Add a value to property recordedAt. */
Builder addRecordedAt(Event value);
/** Add a value to property recordedAt. */
Builder addRecordedAt(Event.Builder value);
/** Add a value to property recordedAt. */
Builder addRecordedAt(String value);
/** Add a value to property releasedEvent. */
Builder addReleasedEvent(PublicationEvent value);
/** Add a value to property releasedEvent. */
Builder addReleasedEvent(PublicationEvent.Builder value);
/** Add a value to property releasedEvent. */
Builder addReleasedEvent(String value);
/** Add a value to property review. */
Builder addReview(Review value);
/** Add a value to property review. */
Builder addReview(Review.Builder value);
/** Add a value to property review. */
Builder addReview(String value);
/** Add a value to property reviews. */
Builder addReviews(Review value);
/** Add a value to property reviews. */
Builder addReviews(Review.Builder value);
/** Add a value to property reviews. */
Builder addReviews(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property schemaVersion. */
Builder addSchemaVersion(Text value);
/** Add a value to property schemaVersion. */
Builder addSchemaVersion(URL value);
/** Add a value to property schemaVersion. */
Builder addSchemaVersion(String value);
/** Add a value to property sourceOrganization. */
Builder addSourceOrganization(Organization value);
/** Add a value to property sourceOrganization. */
Builder addSourceOrganization(Organization.Builder value);
/** Add a value to property sourceOrganization. */
Builder addSourceOrganization(String value);
/** Add a value to property startDate. */
Builder addStartDate(Date value);
/** Add a value to property startDate. */
Builder addStartDate(String value);
/** Add a value to property text. */
Builder addText(Text value);
/** Add a value to property text. */
Builder addText(String value);
/** Add a value to property thumbnailUrl. */
Builder addThumbnailUrl(URL value);
/** Add a value to property thumbnailUrl. */
Builder addThumbnailUrl(String value);
/** Add a value to property timeRequired. */
Builder addTimeRequired(Duration value);
/** Add a value to property timeRequired. */
Builder addTimeRequired(Duration.Builder value);
/** Add a value to property timeRequired. */
Builder addTimeRequired(String value);
/** Add a value to property translator. */
Builder addTranslator(Organization value);
/** Add a value to property translator. */
Builder addTranslator(Organization.Builder value);
/** Add a value to property translator. */
Builder addTranslator(Person value);
/** Add a value to property translator. */
Builder addTranslator(Person.Builder value);
/** Add a value to property translator. */
Builder addTranslator(String value);
/** Add a value to property typicalAgeRange. */
Builder addTypicalAgeRange(Text value);
/** Add a value to property typicalAgeRange. */
Builder addTypicalAgeRange(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property version. */
Builder addVersion(Number value);
/** Add a value to property version. */
Builder addVersion(String value);
/** Add a value to property video. */
Builder addVideo(VideoObject value);
/** Add a value to property video. */
Builder addVideo(VideoObject.Builder value);
/** Add a value to property video. */
Builder addVideo(String value);
/** Add a value to property workExample. */
Builder addWorkExample(CreativeWork value);
/** Add a value to property workExample. */
Builder addWorkExample(CreativeWork.Builder value);
/** Add a value to property workExample. */
Builder addWorkExample(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link CreativeWorkSeries} object. */
CreativeWorkSeries build();
}
/**
* Returns the value list of property endDate. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEndDateList();
/**
* Returns the value list of property startDate. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getStartDateList();
}
| |
package com.imagepicker.ui.selectedMedia;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.widget.LinearLayoutManager;
import android.util.SparseArray;
import android.view.View;
import android.widget.ImageView;
import com.imagepicker.R;
import com.imagepicker.adapter.MediaPagerAdapter;
import com.imagepicker.adapter.SelectedMediaAdapter;
import com.imagepicker.model.MediaItemBean;
import com.imagepicker.model.MessageEvent;
import com.imagepicker.utils.Constants;
import com.imagepicker.utils.SpacesItemDecoration;
import org.greenrobot.eventbus.EventBus;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import io.reactivex.Single;
import io.reactivex.SingleObserver;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
import static android.app.Activity.RESULT_OK;
/**
* auther Anuj Sharma on 9/21/2017.
*/
public class SelectedMediaPresenterImpl implements SelectedMediaPresenter {
private SelectedMediaActivity selectedMediaActivity;
private SelectedMediaView selectedMediaView;
private ArrayList<MediaItemBean> selectedMediaList;
private SelectedMediaAdapter adapter;
private PagerAdapter pagerAdapter;
SelectedMediaPresenterImpl(SelectedMediaActivity selectedMediaActivity, SelectedMediaView selectedMediaView, SparseArray<MediaItemBean> selectedMediaMap) {
this.selectedMediaActivity = selectedMediaActivity;
this.selectedMediaView = selectedMediaView;
if (this.selectedMediaList == null) this.selectedMediaList = new ArrayList<>();
for (int i = 0; i < selectedMediaMap.size(); i++) {
int key = selectedMediaMap.keyAt(i);
this.selectedMediaList.add(selectedMediaMap.get(key));
}
init();
}
private void init() {
selectedMediaActivity.setSupportActionBar(selectedMediaView.getToolbar());
selectedMediaActivity.getSupportActionBar().setTitle(R.string.title_crop_image);
selectedMediaActivity.getSupportActionBar().setDisplayHomeAsUpEnabled(true);
selectedMediaView.getToolbar().setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
selectedMediaActivity.finish();
}
});
LinearLayoutManager lm = new LinearLayoutManager(selectedMediaActivity);
lm.setOrientation(LinearLayoutManager.HORIZONTAL);
selectedMediaView.getSelectedMediaRecycler().setLayoutManager(lm);
int spacingInPixels = selectedMediaActivity.getResources().getDimensionPixelSize(R.dimen.margin_4);
selectedMediaView.getSelectedMediaRecycler().addItemDecoration(new SpacesItemDecoration(spacingInPixels));
adapter = new SelectedMediaAdapter(selectedMediaActivity, selectedMediaList, this);
selectedMediaView.getSelectedMediaRecycler().setAdapter(adapter);
adapter.selectedItem = 0;
adapter.notifyDataSetChanged();
selectedMediaView.getSelectedViewPager().addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
if (adapter != null) {
//change adapter current position
adapter.selectedItem = position;
adapter.notifyDataSetChanged();
if (selectedMediaList.get(position).getMimeType().equalsIgnoreCase("video/mp4")) {
//hide crop option
selectedMediaActivity.cropMenu.setVisible(false);
} else {
selectedMediaActivity.cropMenu.setVisible(true);
}
}
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
//set pager pagerAdapter
pagerAdapter = new MediaPagerAdapter(selectedMediaActivity,
selectedMediaList, this);
selectedMediaView.getSelectedViewPager().setAdapter(pagerAdapter);
}
MediaItemBean getSelectedMediaObj() {
if (selectedMediaView.getSelectedViewPager() != null && adapter != null) {
return adapter.getList().get(selectedMediaView.getSelectedViewPager().getCurrentItem());
}
return null;
}
/*
Delete Selected current Media
*/
void deleteMedia() {
if (adapter != null && selectedMediaView.getSelectedViewPager() != null && pagerAdapter != null) {
//get current position
int position = selectedMediaView.getSelectedViewPager().getCurrentItem();
MessageEvent obj = new MessageEvent();
obj.setMediaItemBean(adapter.getList().get(position));
selectedMediaList.remove(position);
adapter.notifyItemRemoved(position);
((MediaPagerAdapter) pagerAdapter).updateList(selectedMediaList);
if (adapter.getList().size() == 0) {
//reset all view
selectedMediaActivity.finish();
} else {
//update adapter
if (adapter.selectedItem > 0) {
adapter.selectedItem = position - 1;
selectedMediaView.getSelectedViewPager().setCurrentItem(adapter.selectedItem);
} else {
adapter.selectedItem = 0;
selectedMediaView.getSelectedViewPager().setCurrentItem(0);
}
}
adapter.notifyItemChanged(position);
//set broadcast so that mediaList items can also be removed
EventBus.getDefault().postSticky(obj);
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
if (requestCode == selectedMediaActivity.CROP_IMAGE_REQUEST_CODE) {
if (data.getData() == null) {
return;
}
saveCroppedImage(data.getData().toString());
}
}
}
private void saveCroppedImage(final String croppedPath) {
System.out.println("Cropped Path-> " + croppedPath);
final int position = selectedMediaView.getSelectedViewPager().getCurrentItem();
File croppedFile = saveMediaToDirectory(selectedMediaActivity, selectedMediaList.get(position).getMediaName());
if (croppedFile != null) {
Single.just(croppedFile)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new SingleObserver<File>() {
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onSuccess(File file) {
FileOutputStream out = null;
try {
out = new FileOutputStream(file);
Bitmap bitmap = MediaStore.Images.Media.getBitmap(selectedMediaActivity.getContentResolver(), Uri.parse(croppedPath));
bitmap.compress(Bitmap.CompressFormat.PNG, 100, out); // bmp is your Bitmap instance
// PNG is a lossless format, the compression factor (100) is ignored
//update adapter
selectedMediaList.get(position).setCroppedPath(file.getAbsolutePath());
// selectedMediaList.get(position).setMediaPath(file.getAbsolutePath());
adapter.notifyItemChanged(position);
((MediaPagerAdapter) pagerAdapter).updateList(selectedMediaList);
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
selectedMediaActivity.sendBroadcast(mediaScanIntent);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public void onError(Throwable e) {
}
});
}
}
private File saveMediaToDirectory(Context ctx, String name) {
File dir = new File(Environment.getExternalStorageDirectory() + File.separator +
ctx.getString(R.string.app_name) + File.separator + ctx.getString(R.string.folder_name_crop));
if (!dir.isDirectory()) {
dir.mkdirs();
}
File file = new File(dir, name);
if (file.isFile()) file.delete();
try {
file.createNewFile();
return file;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public void onSaveClick() {
Intent returnIntent = new Intent();
returnIntent.putParcelableArrayListExtra(Constants.SelectedMediaObj, selectedMediaList);
selectedMediaActivity.setResult(Activity.RESULT_OK, returnIntent);
selectedMediaActivity.finish();
}
@Override
public void onMediaClick(MediaItemBean obj, int position) {
if (obj != null) {
selectedMediaView.getSelectedViewPager().setCurrentItem(position);
adapter.notifyItemChanged(position);
}
}
@Override
public void onMediaLongClick(MediaItemBean obj, int position, ImageView view) {
}
@Override
public void onMediaUpSwipe() {
}
@Override
public void onMediaDownSwipe() {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.spi.security.principal;
import java.security.Principal;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import org.apache.jackrabbit.api.security.principal.GroupPrincipal;
import org.apache.jackrabbit.api.security.principal.PrincipalIterator;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class PrincipalManagerImplTest {
private final TestPrincipalProvider provider = new TestPrincipalProvider();
private final PrincipalManagerImpl principalMgr = new PrincipalManagerImpl(provider);
private final Iterable<Principal> testPrincipals = provider.getTestPrincipals();
private static boolean isGroup(Principal p) {
return p instanceof GroupPrincipal;
}
private static void assertIterator(@NotNull Iterator<? extends Principal> expected, @NotNull Iterator<? extends Principal> result) {
assertEquals(ImmutableSet.copyOf(expected), ImmutableSet.copyOf(result));
}
@Test
public void testGetEveryone() {
Principal principal = principalMgr.getEveryone();
assertTrue(isGroup(principal));
}
@Test
public void testGetEveryone2() {
Principal principal = new PrincipalManagerImpl(new TestPrincipalProvider(false)).getEveryone();
assertSame(EveryonePrincipal.getInstance(), principal);
}
@Test
public void testGetPrincipalEveryone() {
assertEquals(EveryonePrincipal.getInstance(), principalMgr.getPrincipal(EveryonePrincipal.NAME));
}
@Test
public void testHasPrincipalEveryone() {
assertTrue(principalMgr.hasPrincipal(EveryonePrincipal.NAME));
}
@Test
public void testHasPrincipal() {
for (Principal pcpl : testPrincipals) {
assertTrue(principalMgr.hasPrincipal(pcpl.getName()));
}
}
@Test
public void testHasPrincipalUnknown() {
assertFalse(principalMgr.hasPrincipal(TestPrincipalProvider.UNKNOWN.getName()));
}
@Test
public void testGetPrincipalUnknown() {
assertNull(principalMgr.getPrincipal(TestPrincipalProvider.UNKNOWN.getName()));
}
@Test
public void testGetPrincipal() {
for (Principal principal : testPrincipals){
Principal pp = principalMgr.getPrincipal(principal.getName());
assertNotNull(pp);
assertEquals("PrincipalManager.getPrincipal returned Principal with different Name", principal.getName(), pp.getName());
assertEquals("PrincipalManager.getPrincipal returned different Principal", principal, pp);
}
}
@Test
public void testGetPrincipalsNonGroup() {
Iterator<? extends Principal> expected = provider.findPrincipals(PrincipalManager.SEARCH_TYPE_NOT_GROUP);
PrincipalIterator it = principalMgr.getPrincipals(PrincipalManager.SEARCH_TYPE_NOT_GROUP);
assertIterator(expected, it);
}
@Test
public void testGetPrincipalsNonGroupContainsNoGroups() {
PrincipalIterator it = principalMgr.getPrincipals(PrincipalManager.SEARCH_TYPE_NOT_GROUP);
while (it.hasNext()) {
Principal p = it.nextPrincipal();
assertFalse(isGroup(p));
}
}
@Test
public void testGetPrincipalsGroup() {
Iterator<? extends Principal> expected = provider.findPrincipals(PrincipalManager.SEARCH_TYPE_GROUP);
PrincipalIterator it = principalMgr.getPrincipals(PrincipalManager.SEARCH_TYPE_GROUP);
assertIterator(expected, it);
}
@Test
public void testGetPrincipalsGroupContainsGroups() {
PrincipalIterator it = principalMgr.getPrincipals(PrincipalManager.SEARCH_TYPE_GROUP);
while (it.hasNext()) {
Principal p = it.nextPrincipal();
assertTrue(isGroup(p));
}
}
@Test
public void testGetPrincipalsAll() {
Iterator<? extends Principal> expected = provider.findPrincipals(PrincipalManager.SEARCH_TYPE_ALL);
PrincipalIterator it = principalMgr.getPrincipals(PrincipalManager.SEARCH_TYPE_ALL);
assertIterator(expected, it);
}
@Test
public void testAllMembersKnown() {
for (Principal p : testPrincipals) {
if (isGroup(p)) {
Enumeration<? extends Principal> en = ((GroupPrincipal) p).members();
while (en.hasMoreElements()) {
Principal memb = en.nextElement();
assertTrue(principalMgr.hasPrincipal(memb.getName()));
}
}
}
}
@Test
public void testGroupMembershipNonGroup() {
assertMembership(principalMgr, PrincipalManager.SEARCH_TYPE_NOT_GROUP);
}
@Test
public void testGroupMembershipGroup() {
assertMembership(principalMgr, PrincipalManager.SEARCH_TYPE_GROUP);
}
@Test
public void testGroupMembershipAll() {
assertMembership(principalMgr, PrincipalManager.SEARCH_TYPE_ALL);
}
private static void assertMembership(@NotNull PrincipalManager mgr, int searchType) {
PrincipalIterator it = mgr.getPrincipals(searchType);
while (it.hasNext()) {
Principal p = it.nextPrincipal();
if (p.equals(EveryonePrincipal.getInstance())) {
continue;
}
boolean atleastEveryone = false;
for (PrincipalIterator membership = mgr.getGroupMembership(p); membership.hasNext();) {
Principal gr = membership.nextPrincipal();
assertTrue(isGroup(gr));
if (gr.equals(EveryonePrincipal.getInstance())) {
atleastEveryone = true;
}
}
assertTrue("All principals (except everyone) must be member of the everyone group.", atleastEveryone);
}
}
@Test
public void testGetGroupMembershipEveryoneEmpty() {
assertFalse(principalMgr.getGroupMembership(EveryonePrincipal.getInstance()).hasNext());
}
@Test
public void testGetGroupMembershipEveryoneWithoutEveryone() {
assertFalse(Iterators.contains(principalMgr.getGroupMembership(EveryonePrincipal.getInstance()), EveryonePrincipal.getInstance()));
}
@Test
public void testGetMembersConsistentWithMembership() {
Principal everyone = principalMgr.getEveryone();
PrincipalIterator it = principalMgr.getPrincipals(PrincipalManager.SEARCH_TYPE_GROUP);
while (it.hasNext()) {
Principal p = it.nextPrincipal();
if (p.equals(everyone)) {
continue;
}
assertTrue(isGroup(p));
Enumeration<? extends Principal> members = ((GroupPrincipal) p).members();
while (members.hasMoreElements()) {
Principal memb = members.nextElement();
Principal group = null;
PrincipalIterator mship = principalMgr.getGroupMembership(memb);
while (mship.hasNext() && group == null) {
Principal gr = mship.nextPrincipal();
if (p.equals(gr)) {
group = gr;
}
}
assertNotNull("Group member " + memb.getName() + "does not reveal group upon getGroupMembership", p.getName());
}
}
}
@Test
public void testFindPrincipal() {
for (Principal pcpl : testPrincipals) {
PrincipalIterator it = principalMgr.findPrincipals(pcpl.getName());
assertTrue("findPrincipals does not find principal with filter '" + pcpl.getName() + '\'', Iterators.contains(it, pcpl));
}
}
@Test
public void testFindPrincipalByTypeGroup() {
for (Principal pcpl : testPrincipals) {
if (isGroup(pcpl)) {
PrincipalIterator it = principalMgr.findPrincipals(pcpl.getName(), PrincipalManager.SEARCH_TYPE_GROUP);
assertTrue("findPrincipals does not find principal with filter '" + pcpl.getName() + '\'', Iterators.contains(it, pcpl));
} else {
PrincipalIterator it = principalMgr.findPrincipals(pcpl.getName(), PrincipalManager.SEARCH_TYPE_NOT_GROUP);
assertTrue("findPrincipals does not find principal with filter '" + pcpl.getName() + '\'', Iterators.contains(it, pcpl));
}
}
}
@Test
public void testFindPrincipalByType() {
for (Principal pcpl : testPrincipals) {
if (isGroup(pcpl)) {
PrincipalIterator it = principalMgr.findPrincipals(pcpl.getName(), PrincipalManager.SEARCH_TYPE_GROUP);
assertTrue("findPrincipals does not find principal with filter '" + pcpl.getName() + '\'', Iterators.contains(it, pcpl));
} else {
PrincipalIterator it = principalMgr.findPrincipals(pcpl.getName(), PrincipalManager.SEARCH_TYPE_NOT_GROUP);
assertTrue("findPrincipals does not find principal with filter '" + pcpl.getName() + '\'', Iterators.contains(it, pcpl));
}
}
}
@Test
public void testFindPrincipalByTypeAll() {
for (Principal pcpl : testPrincipals) {
PrincipalIterator it = principalMgr.findPrincipals(pcpl.getName(), PrincipalManager.SEARCH_TYPE_ALL);
assertTrue("findPrincipals does not find principal with filter '" + pcpl.getName() + '\'', Iterators.contains(it, pcpl));
}
}
@Test
public void testFindEveryone() {
// untyped search -> everyone must be part of the result set
PrincipalIterator it = principalMgr.findPrincipals(EveryonePrincipal.NAME);
assertTrue("findPrincipals does not find principal with filter '" + EveryonePrincipal.NAME + '\'', Iterators.contains(it, EveryonePrincipal.getInstance()));
}
@Test
public void testFindEveryoneTypeGroup() {
// search group only -> everyone must be part of the result set
PrincipalIterator it = principalMgr.findPrincipals(EveryonePrincipal.NAME, PrincipalManager.SEARCH_TYPE_GROUP);
assertTrue("findPrincipals does not find principal with filter '" + EveryonePrincipal.NAME + '\'', Iterators.contains(it, EveryonePrincipal.getInstance()));
}
@Test
public void testFindEveryoneTypeNonGroup() {
// search non-group only -> everyone should not be part of the result set
PrincipalIterator it = principalMgr.findPrincipals(EveryonePrincipal.NAME, PrincipalManager.SEARCH_TYPE_NOT_GROUP);
assertFalse("findPrincipals did find principal with filter '" + EveryonePrincipal.NAME + '\'', Iterators.contains(it, EveryonePrincipal.getInstance()));
}
@Test
public void testFindUnknownByTypeAll() {
String unknownHint = TestPrincipalProvider.UNKNOWN.getName().substring(0, 4);
assertFalse(principalMgr.findPrincipals(unknownHint, PrincipalManager.SEARCH_TYPE_ALL).hasNext());
}
@Test
public void testFindUnknownByTypeGroup() {
String unknownHint = TestPrincipalProvider.UNKNOWN.getName().substring(0, 4);
assertFalse(principalMgr.findPrincipals(unknownHint, PrincipalManager.SEARCH_TYPE_GROUP).hasNext());
}
@Test
public void testFindUnknownByTypeNotGroup() {
String unknownHint = TestPrincipalProvider.UNKNOWN.getName().substring(0, 4);
assertFalse(principalMgr.findPrincipals(unknownHint, PrincipalManager.SEARCH_TYPE_NOT_GROUP).hasNext());
}
@Test
public void testFindPrincipalsWithOffsetLimit() {
PrincipalProvider pp = when(mock(PrincipalProvider.class).findPrincipals(any(), anyBoolean(), anyInt(), anyInt(), anyInt())).thenReturn(Collections.emptyIterator()).getMock();
PrincipalQueryManager pm = new PrincipalManagerImpl(pp);
PrincipalIterator it = pm.findPrincipals("filter", true, PrincipalManager.SEARCH_TYPE_ALL, 5, 2);
assertTrue(it instanceof PrincipalIteratorAdapter);
verify(pp, times(1)).findPrincipals("filter", true, PrincipalManager.SEARCH_TYPE_ALL, 5, 2);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.contrib.streaming.state;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.state.CheckpointListener;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeutils.base.IntSerializer;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.state.CheckpointStorage;
import org.apache.flink.runtime.state.CheckpointableKeyedStateBackend;
import org.apache.flink.runtime.state.IncrementalRemoteKeyedStateHandle;
import org.apache.flink.runtime.state.KeyedStateHandle;
import org.apache.flink.runtime.state.SharedStateRegistry;
import org.apache.flink.runtime.state.SnapshotResult;
import org.apache.flink.runtime.state.StateBackendTestBase;
import org.apache.flink.runtime.state.StateHandleID;
import org.apache.flink.runtime.state.StreamStateHandle;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage;
import org.apache.flink.runtime.util.BlockerCheckpointStreamFactory;
import org.apache.flink.runtime.util.BlockingCheckpointOutputStream;
import org.apache.flink.util.IOUtils;
import org.apache.flink.util.function.SupplierWithException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.junit.After;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ColumnFamilyOptions;
import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksIterator;
import org.rocksdb.RocksObject;
import org.rocksdb.Snapshot;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.RunnableFuture;
import static junit.framework.TestCase.assertNotNull;
import static org.apache.flink.contrib.streaming.state.RocksDBKeyedStateBackendBuilder.DB_INSTANCE_DIR_STRING;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.internal.verification.VerificationModeFactory.times;
import static org.powermock.api.mockito.PowerMockito.spy;
/** Tests for the partitioned state part of {@link EmbeddedRocksDBStateBackend}. */
@RunWith(Parameterized.class)
public class EmbeddedRocksDBStateBackendTest
extends StateBackendTestBase<EmbeddedRocksDBStateBackend> {
private OneShotLatch blocker;
private OneShotLatch waiter;
private BlockerCheckpointStreamFactory testStreamFactory;
private RocksDBKeyedStateBackend<Integer> keyedStateBackend;
private List<RocksObject> allCreatedCloseables;
private ValueState<Integer> testState1;
private ValueState<String> testState2;
@ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder();
@Parameterized.Parameters
public static List<Object[]> modes() {
return Arrays.asList(
new Object[][] {
{
true,
(SupplierWithException<CheckpointStorage, IOException>)
JobManagerCheckpointStorage::new
},
{
false,
(SupplierWithException<CheckpointStorage, IOException>)
() -> {
String checkpointPath =
TEMP_FOLDER.newFolder().toURI().toString();
return new FileSystemCheckpointStorage(checkpointPath);
}
}
});
}
@Parameterized.Parameter(value = 0)
public boolean enableIncrementalCheckpointing;
@Parameterized.Parameter(value = 1)
public SupplierWithException<CheckpointStorage, IOException> storageSupplier;
// Store it because we need it for the cleanup test.
private String dbPath;
private RocksDB db = null;
private ColumnFamilyHandle defaultCFHandle = null;
private final RocksDBResourceContainer optionsContainer = new RocksDBResourceContainer();
public void prepareRocksDB() throws Exception {
String dbPath = new File(TEMP_FOLDER.newFolder(), DB_INSTANCE_DIR_STRING).getAbsolutePath();
ColumnFamilyOptions columnOptions = optionsContainer.getColumnOptions();
ArrayList<ColumnFamilyHandle> columnFamilyHandles = new ArrayList<>(1);
db =
RocksDBOperationUtils.openDB(
dbPath,
Collections.emptyList(),
columnFamilyHandles,
columnOptions,
optionsContainer.getDbOptions());
defaultCFHandle = columnFamilyHandles.remove(0);
}
@Override
protected EmbeddedRocksDBStateBackend getStateBackend() throws IOException {
dbPath = TEMP_FOLDER.newFolder().getAbsolutePath();
EmbeddedRocksDBStateBackend backend =
new EmbeddedRocksDBStateBackend(enableIncrementalCheckpointing);
Configuration configuration = new Configuration();
configuration.set(
RocksDBOptions.TIMER_SERVICE_FACTORY,
EmbeddedRocksDBStateBackend.PriorityQueueStateType.ROCKSDB);
backend = backend.configure(configuration, Thread.currentThread().getContextClassLoader());
backend.setDbStoragePath(dbPath);
return backend;
}
@Override
protected CheckpointStorage getCheckpointStorage() throws Exception {
return storageSupplier.get();
}
@Override
protected boolean isSerializerPresenceRequiredOnRestore() {
return false;
}
@Override
protected boolean supportsAsynchronousSnapshots() {
return true;
}
// small safety net for instance cleanups, so that no native objects are left
@After
public void cleanupRocksDB() {
if (keyedStateBackend != null) {
IOUtils.closeQuietly(keyedStateBackend);
keyedStateBackend.dispose();
}
IOUtils.closeQuietly(defaultCFHandle);
IOUtils.closeQuietly(db);
IOUtils.closeQuietly(optionsContainer);
if (allCreatedCloseables != null) {
for (RocksObject rocksCloseable : allCreatedCloseables) {
verify(rocksCloseable, times(1)).close();
}
allCreatedCloseables = null;
}
}
public void setupRocksKeyedStateBackend() throws Exception {
blocker = new OneShotLatch();
waiter = new OneShotLatch();
testStreamFactory = new BlockerCheckpointStreamFactory(1024 * 1024);
testStreamFactory.setBlockerLatch(blocker);
testStreamFactory.setWaiterLatch(waiter);
testStreamFactory.setAfterNumberInvocations(10);
prepareRocksDB();
keyedStateBackend =
RocksDBTestUtils.builderForTestDB(
TEMP_FOLDER
.newFolder(), // this is not used anyways because the DB is
// injected
IntSerializer.INSTANCE,
spy(db),
defaultCFHandle,
optionsContainer.getColumnOptions())
.setEnableIncrementalCheckpointing(enableIncrementalCheckpointing)
.build();
testState1 =
keyedStateBackend.getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new ValueStateDescriptor<>("TestState-1", Integer.class, 0));
testState2 =
keyedStateBackend.getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new ValueStateDescriptor<>("TestState-2", String.class, ""));
allCreatedCloseables = new ArrayList<>();
doAnswer(
new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock)
throws Throwable {
RocksIterator rocksIterator =
spy((RocksIterator) invocationOnMock.callRealMethod());
allCreatedCloseables.add(rocksIterator);
return rocksIterator;
}
})
.when(keyedStateBackend.db)
.newIterator(any(ColumnFamilyHandle.class), any(ReadOptions.class));
doAnswer(
new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock)
throws Throwable {
Snapshot snapshot =
spy((Snapshot) invocationOnMock.callRealMethod());
allCreatedCloseables.add(snapshot);
return snapshot;
}
})
.when(keyedStateBackend.db)
.getSnapshot();
doAnswer(
new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock)
throws Throwable {
ColumnFamilyHandle snapshot =
spy((ColumnFamilyHandle) invocationOnMock.callRealMethod());
allCreatedCloseables.add(snapshot);
return snapshot;
}
})
.when(keyedStateBackend.db)
.createColumnFamily(any(ColumnFamilyDescriptor.class));
for (int i = 0; i < 100; ++i) {
keyedStateBackend.setCurrentKey(i);
testState1.update(4200 + i);
testState2.update("S-" + (4200 + i));
}
}
@Test
public void testCorrectMergeOperatorSet() throws Exception {
prepareRocksDB();
final ColumnFamilyOptions columnFamilyOptions = spy(new ColumnFamilyOptions());
RocksDBKeyedStateBackend<Integer> test = null;
try {
test =
RocksDBTestUtils.builderForTestDB(
TEMP_FOLDER.newFolder(),
IntSerializer.INSTANCE,
db,
defaultCFHandle,
columnFamilyOptions)
.setEnableIncrementalCheckpointing(enableIncrementalCheckpointing)
.build();
ValueStateDescriptor<String> stubState1 =
new ValueStateDescriptor<>("StubState-1", StringSerializer.INSTANCE);
test.createInternalState(StringSerializer.INSTANCE, stubState1);
ValueStateDescriptor<String> stubState2 =
new ValueStateDescriptor<>("StubState-2", StringSerializer.INSTANCE);
test.createInternalState(StringSerializer.INSTANCE, stubState2);
// The default CF is pre-created so sum up to 2 times (once for each stub state)
verify(columnFamilyOptions, Mockito.times(2))
.setMergeOperatorName(RocksDBKeyedStateBackend.MERGE_OPERATOR_NAME);
} finally {
if (test != null) {
IOUtils.closeQuietly(test);
test.dispose();
}
columnFamilyOptions.close();
}
}
@Test
public void testReleasingSnapshotAfterBackendClosed() throws Exception {
setupRocksKeyedStateBackend();
try {
RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot =
keyedStateBackend.snapshot(
0L,
0L,
testStreamFactory,
CheckpointOptions.forCheckpointWithDefaultLocation());
RocksDB spyDB = keyedStateBackend.db;
if (!enableIncrementalCheckpointing) {
verify(spyDB, times(1)).getSnapshot();
verify(spyDB, times(0)).releaseSnapshot(any(Snapshot.class));
}
// Ensure every RocksObjects not closed yet
for (RocksObject rocksCloseable : allCreatedCloseables) {
verify(rocksCloseable, times(0)).close();
}
snapshot.cancel(true);
this.keyedStateBackend.dispose();
verify(spyDB, times(1)).close();
assertEquals(true, keyedStateBackend.isDisposed());
// Ensure every RocksObjects was closed exactly once
for (RocksObject rocksCloseable : allCreatedCloseables) {
verify(rocksCloseable, times(1)).close();
}
} finally {
keyedStateBackend.dispose();
keyedStateBackend = null;
}
}
@Test
public void testDismissingSnapshot() throws Exception {
setupRocksKeyedStateBackend();
try {
RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot =
keyedStateBackend.snapshot(
0L,
0L,
testStreamFactory,
CheckpointOptions.forCheckpointWithDefaultLocation());
snapshot.cancel(true);
verifyRocksObjectsReleased();
} finally {
this.keyedStateBackend.dispose();
this.keyedStateBackend = null;
}
}
@Test
public void testDismissingSnapshotNotRunnable() throws Exception {
setupRocksKeyedStateBackend();
try {
RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot =
keyedStateBackend.snapshot(
0L,
0L,
testStreamFactory,
CheckpointOptions.forCheckpointWithDefaultLocation());
snapshot.cancel(true);
Thread asyncSnapshotThread = new Thread(snapshot);
asyncSnapshotThread.start();
try {
snapshot.get();
fail();
} catch (Exception ignored) {
}
asyncSnapshotThread.join();
verifyRocksObjectsReleased();
} finally {
this.keyedStateBackend.dispose();
this.keyedStateBackend = null;
}
}
@Test
public void testCompletingSnapshot() throws Exception {
setupRocksKeyedStateBackend();
try {
RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot =
keyedStateBackend.snapshot(
0L,
0L,
testStreamFactory,
CheckpointOptions.forCheckpointWithDefaultLocation());
Thread asyncSnapshotThread = new Thread(snapshot);
asyncSnapshotThread.start();
waiter.await(); // wait for snapshot to run
waiter.reset();
runStateUpdates();
blocker.trigger(); // allow checkpointing to start writing
waiter.await(); // wait for snapshot stream writing to run
SnapshotResult<KeyedStateHandle> snapshotResult = snapshot.get();
KeyedStateHandle keyedStateHandle = snapshotResult.getJobManagerOwnedSnapshot();
assertNotNull(keyedStateHandle);
assertTrue(keyedStateHandle.getStateSize() > 0);
assertEquals(2, keyedStateHandle.getKeyGroupRange().getNumberOfKeyGroups());
for (BlockingCheckpointOutputStream stream : testStreamFactory.getAllCreatedStreams()) {
assertTrue(stream.isClosed());
}
asyncSnapshotThread.join();
verifyRocksObjectsReleased();
} finally {
this.keyedStateBackend.dispose();
this.keyedStateBackend = null;
}
}
@Test
public void testCancelRunningSnapshot() throws Exception {
setupRocksKeyedStateBackend();
try {
RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot =
keyedStateBackend.snapshot(
0L,
0L,
testStreamFactory,
CheckpointOptions.forCheckpointWithDefaultLocation());
Thread asyncSnapshotThread = new Thread(snapshot);
asyncSnapshotThread.start();
waiter.await(); // wait for snapshot to run
waiter.reset();
runStateUpdates();
snapshot.cancel(true);
blocker.trigger(); // allow checkpointing to start writing
for (BlockingCheckpointOutputStream stream : testStreamFactory.getAllCreatedStreams()) {
assertTrue(stream.isClosed());
}
waiter.await(); // wait for snapshot stream writing to run
try {
snapshot.get();
fail();
} catch (Exception ignored) {
}
asyncSnapshotThread.join();
verifyRocksObjectsReleased();
} finally {
this.keyedStateBackend.dispose();
this.keyedStateBackend = null;
}
}
@Test
public void testDisposeDeletesAllDirectories() throws Exception {
CheckpointableKeyedStateBackend<Integer> backend =
createKeyedBackend(IntSerializer.INSTANCE);
Collection<File> allFilesInDbDir =
FileUtils.listFilesAndDirs(
new File(dbPath), new AcceptAllFilter(), new AcceptAllFilter());
try {
ValueStateDescriptor<String> kvId =
new ValueStateDescriptor<>("id", String.class, null);
kvId.initializeSerializerUnlessSet(new ExecutionConfig());
ValueState<String> state =
backend.getPartitionedState(
VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
backend.setCurrentKey(1);
state.update("Hello");
// more than just the root directory
assertTrue(allFilesInDbDir.size() > 1);
} finally {
IOUtils.closeQuietly(backend);
backend.dispose();
}
allFilesInDbDir =
FileUtils.listFilesAndDirs(
new File(dbPath), new AcceptAllFilter(), new AcceptAllFilter());
// just the root directory left
assertEquals(1, allFilesInDbDir.size());
}
@Test
public void testSharedIncrementalStateDeRegistration() throws Exception {
if (enableIncrementalCheckpointing) {
CheckpointableKeyedStateBackend<Integer> backend =
createKeyedBackend(IntSerializer.INSTANCE);
try {
ValueStateDescriptor<String> kvId =
new ValueStateDescriptor<>("id", String.class, null);
kvId.initializeSerializerUnlessSet(new ExecutionConfig());
ValueState<String> state =
backend.getPartitionedState(
VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
Queue<IncrementalRemoteKeyedStateHandle> previousStateHandles = new LinkedList<>();
SharedStateRegistry sharedStateRegistry = spy(new SharedStateRegistry());
for (int checkpointId = 0; checkpointId < 3; ++checkpointId) {
reset(sharedStateRegistry);
backend.setCurrentKey(checkpointId);
state.update("Hello-" + checkpointId);
RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot =
backend.snapshot(
checkpointId,
checkpointId,
createStreamFactory(),
CheckpointOptions.forCheckpointWithDefaultLocation());
snapshot.run();
SnapshotResult<KeyedStateHandle> snapshotResult = snapshot.get();
IncrementalRemoteKeyedStateHandle stateHandle =
(IncrementalRemoteKeyedStateHandle)
snapshotResult.getJobManagerOwnedSnapshot();
Map<StateHandleID, StreamStateHandle> sharedState =
new HashMap<>(stateHandle.getSharedState());
stateHandle.registerSharedStates(sharedStateRegistry);
for (Map.Entry<StateHandleID, StreamStateHandle> e : sharedState.entrySet()) {
verify(sharedStateRegistry)
.registerReference(
stateHandle.createSharedStateRegistryKeyFromFileName(
e.getKey()),
e.getValue());
}
previousStateHandles.add(stateHandle);
((CheckpointListener) backend).notifyCheckpointComplete(checkpointId);
// -----------------------------------------------------------------
if (previousStateHandles.size() > 1) {
checkRemove(previousStateHandles.remove(), sharedStateRegistry);
}
}
while (!previousStateHandles.isEmpty()) {
reset(sharedStateRegistry);
checkRemove(previousStateHandles.remove(), sharedStateRegistry);
}
} finally {
IOUtils.closeQuietly(backend);
backend.dispose();
}
}
}
private void checkRemove(IncrementalRemoteKeyedStateHandle remove, SharedStateRegistry registry)
throws Exception {
for (StateHandleID id : remove.getSharedState().keySet()) {
verify(registry, times(0))
.unregisterReference(remove.createSharedStateRegistryKeyFromFileName(id));
}
remove.discardState();
for (StateHandleID id : remove.getSharedState().keySet()) {
verify(registry)
.unregisterReference(remove.createSharedStateRegistryKeyFromFileName(id));
}
}
private void runStateUpdates() throws Exception {
for (int i = 50; i < 150; ++i) {
if (i % 10 == 0) {
Thread.sleep(1);
}
keyedStateBackend.setCurrentKey(i);
testState1.update(4200 + i);
testState2.update("S-" + (4200 + i));
}
}
private void verifyRocksObjectsReleased() {
// Ensure every RocksObject was closed exactly once
for (RocksObject rocksCloseable : allCreatedCloseables) {
verify(rocksCloseable, times(1)).close();
}
assertNotNull(null, keyedStateBackend.db);
RocksDB spyDB = keyedStateBackend.db;
if (!enableIncrementalCheckpointing) {
verify(spyDB, times(1)).getSnapshot();
verify(spyDB, times(1)).releaseSnapshot(any(Snapshot.class));
}
keyedStateBackend.dispose();
verify(spyDB, times(1)).close();
assertEquals(true, keyedStateBackend.isDisposed());
}
private static class AcceptAllFilter implements IOFileFilter {
@Override
public boolean accept(File file) {
return true;
}
@Override
public boolean accept(File file, String s) {
return true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.text.DecimalFormatSymbols;
import java.text.Format;
import java.text.NumberFormat;
import java.text.DecimalFormat;
import java.util.Locale;
/**
* <p>Abstract class for Number Validation.</p>
*
* <p>This is a <i>base</i> class for building Number
* Validators using format parsing.</p>
*
* @version $Revision$
* @since Validator 1.3.0
*/
public abstract class AbstractNumberValidator extends AbstractFormatValidator {
private static final long serialVersionUID = -3088817875906765463L;
/** Standard <code>NumberFormat</code> type */
public static final int STANDARD_FORMAT = 0;
/** Currency <code>NumberFormat</code> type */
public static final int CURRENCY_FORMAT = 1;
/** Percent <code>NumberFormat</code> type */
public static final int PERCENT_FORMAT = 2;
private final boolean allowFractions;
private final int formatType;
/**
* Construct an instance with specified <i>strict</i>
* and <i>decimal</i> parameters.
*
* @param strict <code>true</code> if strict
* <code>Format</code> parsing should be used.
* @param formatType The <code>NumberFormat</code> type to
* create for validation, default is STANDARD_FORMAT.
* @param allowFractions <code>true</code> if fractions are
* allowed or <code>false</code> if integers only.
*/
public AbstractNumberValidator(boolean strict, int formatType, boolean allowFractions) {
super(strict);
this.allowFractions = allowFractions;
this.formatType = formatType;
}
/**
* <p>Indicates whether the number being validated is
* a decimal or integer.</p>
*
* @return <code>true</code> if decimals are allowed
* or <code>false</code> if the number is an integer.
*/
public boolean isAllowFractions() {
return allowFractions;
}
/**
* <p>Indicates the type of <code>NumberFormat</code> created
* by this validator instance.</p>
*
* @return the format type created.
*/
public int getFormatType() {
return formatType;
}
/**
* <p>Validate using the specified <code>Locale</code>.</p>
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against, or the
* default for the <code>Locale</code> if <code>null</code>.
* @param locale The locale to use for the date format, system default if null.
* @return <code>true</code> if the value is valid.
*/
@Override
public boolean isValid(String value, String pattern, Locale locale) {
Object parsedValue = parse(value, pattern, locale);
return (parsedValue == null ? false : true);
}
/**
* Check if the value is within a specified range.
*
* @param value The value validation is being performed on.
* @param min The minimum value of the range.
* @param max The maximum value of the range.
* @return <code>true</code> if the value is within the
* specified range.
*/
public boolean isInRange(Number value, Number min, Number max) {
return (minValue(value, min) && maxValue(value, max));
}
/**
* Check if the value is greater than or equal to a minimum.
*
* @param value The value validation is being performed on.
* @param min The minimum value.
* @return <code>true</code> if the value is greater than
* or equal to the minimum.
*/
public boolean minValue(Number value, Number min) {
if (isAllowFractions()) {
return (value.doubleValue() >= min.doubleValue());
}
return (value.longValue() >= min.longValue());
}
/**
* Check if the value is less than or equal to a maximum.
*
* @param value The value validation is being performed on.
* @param max The maximum value.
* @return <code>true</code> if the value is less than
* or equal to the maximum.
*/
public boolean maxValue(Number value, Number max) {
if (isAllowFractions()) {
return (value.doubleValue() <= max.doubleValue());
}
return (value.longValue() <= max.longValue());
}
/**
* <p>Parse the value using the specified pattern.</p>
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against, or the
* default for the <code>Locale</code> if <code>null</code>.
* @param locale The locale to use for the date format, system default if null.
* @return The parsed value if valid or <code>null</code> if invalid.
*/
protected Object parse(String value, String pattern, Locale locale) {
value = (value == null ? null : value.trim());
if (value == null || value.isEmpty()) {
return null;
}
Format formatter = getFormat(pattern, locale);
return parse(value, formatter);
}
/**
* <p>Process the parsed value, performing any further validation
* and type conversion required.</p>
*
* @param value The parsed object created.
* @param formatter The Format used to parse the value with.
* @return The parsed value converted to the appropriate type
* if valid or <code>null</code> if invalid.
*/
@Override
protected abstract Object processParsedValue(Object value, Format formatter);
/**
* <p>Returns a <code>NumberFormat</code> for the specified <i>pattern</i>
* and/or <code>Locale</code>.</p>
*
* @param pattern The pattern used to validate the value against or
* <code>null</code> to use the default for the <code>Locale</code>.
* @param locale The locale to use for the currency format, system default if null.
* @return The <code>NumberFormat</code> to created.
*/
@Override
protected Format getFormat(String pattern, Locale locale) {
NumberFormat formatter = null;
boolean usePattern = pattern != null && !pattern.isEmpty();
if (!usePattern) {
formatter = (NumberFormat)getFormat(locale);
} else if (locale == null) {
formatter = new DecimalFormat(pattern);
} else {
DecimalFormatSymbols symbols = new DecimalFormatSymbols(locale);
formatter = new DecimalFormat(pattern, symbols);
}
if (!isAllowFractions()) {
formatter.setParseIntegerOnly(true);
}
return formatter;
}
/**
* <p>Returns the <i>multiplier</i> of the <code>NumberFormat</code>.</p>
*
* @param format The <code>NumberFormat</code> to determine the
* multiplier of.
* @return The multiplying factor for the format..
*/
protected int determineScale(NumberFormat format) {
if (!isStrict()) {
return -1;
}
if (!isAllowFractions() || format.isParseIntegerOnly()) {
return 0;
}
int minimumFraction = format.getMinimumFractionDigits();
int maximumFraction = format.getMaximumFractionDigits();
if (minimumFraction != maximumFraction) {
return -1;
}
int scale = minimumFraction;
if (format instanceof DecimalFormat) {
int multiplier = ((DecimalFormat)format).getMultiplier();
if (multiplier == 100) { // CHECKSTYLE IGNORE MagicNumber
scale += 2; // CHECKSTYLE IGNORE MagicNumber
} else if (multiplier == 1000) { // CHECKSTYLE IGNORE MagicNumber
scale += 3; // CHECKSTYLE IGNORE MagicNumber
}
} else if (formatType == PERCENT_FORMAT) {
scale += 2; // CHECKSTYLE IGNORE MagicNumber
}
return scale;
}
/**
* <p>Returns a <code>NumberFormat</code> for the specified Locale.</p>
*
* @param locale The locale a <code>NumberFormat</code> is required for,
* system default if null.
* @return The <code>NumberFormat</code> to created.
*/
protected Format getFormat(Locale locale) {
NumberFormat formatter = null;
switch (formatType) {
case CURRENCY_FORMAT:
if (locale == null) {
formatter = NumberFormat.getCurrencyInstance();
} else {
formatter = NumberFormat.getCurrencyInstance(locale);
}
break;
case PERCENT_FORMAT:
if (locale == null) {
formatter = NumberFormat.getPercentInstance();
} else {
formatter = NumberFormat.getPercentInstance(locale);
}
break;
default:
if (locale == null) {
formatter = NumberFormat.getInstance();
} else {
formatter = NumberFormat.getInstance(locale);
}
if (!isAllowFractions()) {
formatter.setParseIntegerOnly(true);
}
break;
}
return formatter;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.requests;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.common.internals.Topic;
import org.apache.kafka.common.message.MetadataResponseData;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.protocol.ObjectSerializationCache;
import org.apache.kafka.common.record.RecordBatch;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
public class RequestTestUtils {
public static boolean hasIdempotentRecords(ProduceRequest request) {
return RequestUtils.flag(request, RecordBatch::hasProducerId);
}
public static ByteBuffer serializeRequestHeader(RequestHeader header) {
ObjectSerializationCache serializationCache = new ObjectSerializationCache();
ByteBuffer buffer = ByteBuffer.allocate(header.size(serializationCache));
header.write(buffer, serializationCache);
buffer.flip();
return buffer;
}
public static ByteBuffer serializeResponseWithHeader(AbstractResponse response, short version, int correlationId) {
return response.serializeWithHeader(new ResponseHeader(correlationId,
response.apiKey().responseHeaderVersion(version)), version);
}
public static MetadataResponse metadataResponse(Collection<Node> brokers,
String clusterId, int controllerId,
List<MetadataResponse.TopicMetadata> topicMetadataList) {
return metadataResponse(brokers, clusterId, controllerId, topicMetadataList, ApiKeys.METADATA.latestVersion());
}
public static MetadataResponse metadataResponse(Collection<Node> brokers,
String clusterId, int controllerId,
List<MetadataResponse.TopicMetadata> topicMetadataList,
short responseVersion) {
return metadataResponse(MetadataResponse.DEFAULT_THROTTLE_TIME, brokers, clusterId, controllerId,
topicMetadataList, MetadataResponse.AUTHORIZED_OPERATIONS_OMITTED, responseVersion);
}
public static MetadataResponse metadataResponse(int throttleTimeMs, Collection<Node> brokers,
String clusterId, int controllerId,
List<MetadataResponse.TopicMetadata> topicMetadatas,
int clusterAuthorizedOperations,
short responseVersion) {
List<MetadataResponseData.MetadataResponseTopic> topics = new ArrayList<>();
topicMetadatas.forEach(topicMetadata -> {
MetadataResponseData.MetadataResponseTopic metadataResponseTopic = new MetadataResponseData.MetadataResponseTopic();
metadataResponseTopic
.setErrorCode(topicMetadata.error().code())
.setName(topicMetadata.topic())
.setTopicId(topicMetadata.topicId())
.setIsInternal(topicMetadata.isInternal())
.setTopicAuthorizedOperations(topicMetadata.authorizedOperations());
for (MetadataResponse.PartitionMetadata partitionMetadata : topicMetadata.partitionMetadata()) {
metadataResponseTopic.partitions().add(new MetadataResponseData.MetadataResponsePartition()
.setErrorCode(partitionMetadata.error.code())
.setPartitionIndex(partitionMetadata.partition())
.setLeaderId(partitionMetadata.leaderId.orElse(MetadataResponse.NO_LEADER_ID))
.setLeaderEpoch(partitionMetadata.leaderEpoch.orElse(RecordBatch.NO_PARTITION_LEADER_EPOCH))
.setReplicaNodes(partitionMetadata.replicaIds)
.setIsrNodes(partitionMetadata.inSyncReplicaIds)
.setOfflineReplicas(partitionMetadata.offlineReplicaIds));
}
topics.add(metadataResponseTopic);
});
return MetadataResponse.prepareResponse(responseVersion, throttleTimeMs, brokers, clusterId, controllerId,
topics, clusterAuthorizedOperations); }
public static MetadataResponse metadataUpdateWith(final int numNodes,
final Map<String, Integer> topicPartitionCounts) {
return metadataUpdateWith("kafka-cluster", numNodes, topicPartitionCounts);
}
public static MetadataResponse metadataUpdateWith(final int numNodes,
final Map<String, Integer> topicPartitionCounts,
final Function<TopicPartition, Integer> epochSupplier) {
return metadataUpdateWith("kafka-cluster", numNodes, Collections.emptyMap(),
topicPartitionCounts, epochSupplier, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(), Collections.emptyMap());
}
public static MetadataResponse metadataUpdateWith(final String clusterId,
final int numNodes,
final Map<String, Integer> topicPartitionCounts) {
return metadataUpdateWith(clusterId, numNodes, Collections.emptyMap(),
topicPartitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(), Collections.emptyMap());
}
public static MetadataResponse metadataUpdateWith(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts) {
return metadataUpdateWith(clusterId, numNodes, topicErrors,
topicPartitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(), Collections.emptyMap());
}
public static MetadataResponse metadataUpdateWith(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts,
final short responseVersion) {
return metadataUpdateWith(clusterId, numNodes, topicErrors,
topicPartitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, responseVersion, Collections.emptyMap());
}
public static MetadataResponse metadataUpdateWith(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts,
final Function<TopicPartition, Integer> epochSupplier) {
return metadataUpdateWith(clusterId, numNodes, topicErrors,
topicPartitionCounts, epochSupplier, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(), Collections.emptyMap());
}
public static MetadataResponse metadataUpdateWithIds(final int numNodes,
final Map<String, Integer> topicPartitionCounts,
final Map<String, Uuid> topicIds) {
return metadataUpdateWithIds("kafka-cluster", numNodes, topicPartitionCounts, topicIds);
}
public static MetadataResponse metadataUpdateWithIds(final int numNodes,
final Map<String, Integer> topicPartitionCounts,
final Function<TopicPartition, Integer> epochSupplier,
final Map<String, Uuid> topicIds) {
return metadataUpdateWith("kafka-cluster", numNodes, Collections.emptyMap(),
topicPartitionCounts, epochSupplier, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(),
topicIds);
}
public static MetadataResponse metadataUpdateWithIds(final String clusterId,
final int numNodes,
final Map<String, Integer> topicPartitionCounts,
final Map<String, Uuid> topicIds) {
return metadataUpdateWith(clusterId, numNodes, Collections.emptyMap(),
topicPartitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(),
topicIds);
}
public static MetadataResponse metadataUpdateWithIds(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts,
final Map<String, Uuid> topicIds) {
return metadataUpdateWith(clusterId, numNodes, topicErrors,
topicPartitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(), topicIds);
}
public static MetadataResponse metadataUpdateWithIds(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts,
final short responseVersion,
final Map<String, Uuid> topicIds) {
return metadataUpdateWith(clusterId, numNodes, topicErrors,
topicPartitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, responseVersion, topicIds);
}
public static MetadataResponse metadataUpdateWithIds(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts,
final Function<TopicPartition, Integer> epochSupplier,
final Map<String, Uuid> topicIds) {
return metadataUpdateWith(clusterId, numNodes, topicErrors,
topicPartitionCounts, epochSupplier, MetadataResponse.PartitionMetadata::new, ApiKeys.METADATA.latestVersion(), topicIds);
}
public static MetadataResponse metadataUpdateWith(final String clusterId,
final int numNodes,
final Map<String, Errors> topicErrors,
final Map<String, Integer> topicPartitionCounts,
final Function<TopicPartition, Integer> epochSupplier,
final PartitionMetadataSupplier partitionSupplier,
final short responseVersion,
final Map<String, Uuid> topicIds) {
final List<Node> nodes = new ArrayList<>(numNodes);
for (int i = 0; i < numNodes; i++)
nodes.add(new Node(i, "localhost", 1969 + i));
List<MetadataResponse.TopicMetadata> topicMetadata = new ArrayList<>();
for (Map.Entry<String, Integer> topicPartitionCountEntry : topicPartitionCounts.entrySet()) {
String topic = topicPartitionCountEntry.getKey();
int numPartitions = topicPartitionCountEntry.getValue();
List<MetadataResponse.PartitionMetadata> partitionMetadata = new ArrayList<>(numPartitions);
for (int i = 0; i < numPartitions; i++) {
TopicPartition tp = new TopicPartition(topic, i);
Node leader = nodes.get(i % nodes.size());
List<Integer> replicaIds = Collections.singletonList(leader.id());
partitionMetadata.add(partitionSupplier.supply(
Errors.NONE, tp, Optional.of(leader.id()), Optional.ofNullable(epochSupplier.apply(tp)),
replicaIds, replicaIds, replicaIds));
}
topicMetadata.add(new MetadataResponse.TopicMetadata(Errors.NONE, topic, topicIds.getOrDefault(topic, Uuid.ZERO_UUID),
Topic.isInternal(topic), partitionMetadata, MetadataResponse.AUTHORIZED_OPERATIONS_OMITTED));
}
for (Map.Entry<String, Errors> topicErrorEntry : topicErrors.entrySet()) {
String topic = topicErrorEntry.getKey();
topicMetadata.add(new MetadataResponse.TopicMetadata(topicErrorEntry.getValue(), topic,
Topic.isInternal(topic), Collections.emptyList()));
}
return metadataResponse(nodes, clusterId, 0, topicMetadata, responseVersion);
}
@FunctionalInterface
public interface PartitionMetadataSupplier {
MetadataResponse.PartitionMetadata supply(Errors error,
TopicPartition partition,
Optional<Integer> leaderId,
Optional<Integer> leaderEpoch,
List<Integer> replicas,
List<Integer> isr,
List<Integer> offlineReplicas);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment;
import static com.google.common.collect.Sets.newHashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.segment.memory.MemoryStore;
import org.junit.Test;
public class SegmentIdTableTest {
private static SegmentIdFactory newSegmentIdMaker(final SegmentStore store) {
return new SegmentIdFactory() {
@Nonnull
@Override
public SegmentId newSegmentId(long msb, long lsb) {
return new SegmentId(store, msb, lsb);
}
};
}
private static SegmentIdFactory newSegmentIdMaker() throws IOException {
return newSegmentIdMaker(new MemoryStore());
}
/**
* OAK-2752
*/
@Test
public void endlessSearchLoop() throws IOException {
final SegmentIdFactory maker = newSegmentIdMaker();
final SegmentIdTable tbl = new SegmentIdTable();
List<SegmentId> refs = new ArrayList<SegmentId>();
for (int i = 0; i < 1024; i++) {
refs.add(tbl.newSegmentId(i, i % 64, maker));
}
Callable<SegmentId> c = new Callable<SegmentId>() {
@Override
public SegmentId call() throws Exception {
// (2,1) doesn't exist
return tbl.newSegmentId(2, 1, maker);
}
};
Future<SegmentId> f = Executors.newSingleThreadExecutor().submit(c);
SegmentId s = null;
try {
s = f.get(5, TimeUnit.SECONDS);
} catch (Exception e) {
fail(e.getMessage());
}
assertNotNull(s);
assertEquals(2, s.getMostSignificantBits());
assertEquals(1, s.getLeastSignificantBits());
}
@Test
public void randomized() throws IOException {
SegmentIdFactory maker = newSegmentIdMaker();
final SegmentIdTable tbl = new SegmentIdTable();
List<SegmentId> refs = new ArrayList<SegmentId>();
Random r = new Random(1);
for (int i = 0; i < 16 * 1024; i++) {
refs.add(tbl.newSegmentId(r.nextLong(), r.nextLong(), maker));
}
assertEquals(16 * 1024, tbl.getEntryCount());
assertEquals(16 * 2048, tbl.getMapSize());
assertEquals(5, tbl.getMapRebuildCount());
r = new Random(1);
for (int i = 0; i < 16 * 1024; i++) {
refs.add(tbl.newSegmentId(r.nextLong(), r.nextLong(), maker));
assertEquals(16 * 1024, tbl.getEntryCount());
assertEquals(16 * 2048, tbl.getMapSize());
assertEquals(5, tbl.getMapRebuildCount());
}
}
@Test
public void clearTable() throws IOException {
SegmentIdFactory maker = newSegmentIdMaker();
final SegmentIdTable tbl = new SegmentIdTable();
List<SegmentId> refs = new ArrayList<SegmentId>();
for (int i = 0; i < 8; i++) {
refs.add(tbl.newSegmentId(i, i % 2, maker));
}
Set<UUID> reclaimed = newHashSet();
for (SegmentId id : refs) {
if (id.getMostSignificantBits() < 4) {
reclaimed.add(id.asUUID());
}
}
assertEquals(0, tbl.getMapRebuildCount());
tbl.clearSegmentIdTables(reclaimed, "TestGcInfo");
for (SegmentId id : refs) {
if (id.getMostSignificantBits() < 4) {
assertEquals("TestGcInfo", id.getGcInfo());
} else {
assertNull(id.getGcInfo());
}
}
}
@Test
public void justHashCollisions() throws IOException {
SegmentIdFactory maker = newSegmentIdMaker();
final SegmentIdTable tbl = new SegmentIdTable();
List<SegmentId> refs = new ArrayList<SegmentId>();
int originalCount = 1024;
for (int i = 0; i < originalCount; i++) {
// modulo 128 to ensure we have conflicts
refs.add(tbl.newSegmentId(i, i % 128, maker));
}
assertEquals(originalCount, tbl.getEntryCount());
assertEquals(1, tbl.getMapRebuildCount());
List<SegmentId> refs2 = new ArrayList<SegmentId>();
tbl.collectReferencedIds(refs2);
assertEquals(refs.size(), refs2.size());
assertEquals(originalCount, tbl.getEntryCount());
// we don't expect that there was a refresh,
// because there were just hash collisions
assertEquals(1, tbl.getMapRebuildCount());
}
@Test
public void gc() throws IOException {
SegmentIdFactory maker = newSegmentIdMaker();
final SegmentIdTable tbl = new SegmentIdTable();
List<SegmentId> refs = new ArrayList<SegmentId>();
int originalCount = 1024;
for (int i = 0; i < originalCount; i++) {
// modulo 128 to ensure we have conflicts
refs.add(tbl.newSegmentId(i, i % 128, maker));
}
assertEquals(originalCount, tbl.getEntryCount());
assertEquals(1, tbl.getMapRebuildCount());
for (int i = 0; i < refs.size() / 2; i++) {
// we need to remove the first entries,
// because if we remove the last entries, then
// getSegmentId would not detect that entries were freed up
refs.remove(0);
}
for (int gcCalls = 0;; gcCalls++) {
// needed here, so some entries can be garbage collected
System.gc();
for (SegmentId id : refs) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
SegmentId id2 = tbl.newSegmentId(msb, lsb, maker);
assertTrue(id2 == id);
}
// because we found each entry, we expect the refresh count is the same
assertEquals(1, tbl.getMapRebuildCount());
// even thought this does not increase the entry count a lot,
// it is supposed to detect that entries were removed,
// and force a refresh, which would get rid of the unreferenced ids
for (int i = 0; i < 10; i++) {
tbl.newSegmentId(i, i, maker);
}
if (tbl.getEntryCount() < originalCount) {
break;
} else if (gcCalls > 10) {
fail("No entries were garbage collected after 10 times System.gc()");
}
}
assertEquals(2, tbl.getMapRebuildCount());
}
}
| |
package es.thesinsprods.zagastales.juegozagas.jugar.master.npc4;
import java.awt.EventQueue;
import javax.swing.JFrame;
import javax.swing.JLabel;
import java.awt.Font;
import java.awt.Color;
import javax.swing.SwingConstants;
import javax.swing.JTextField;
import javax.swing.JButton;
import javax.swing.border.BevelBorder;
import es.thesinsprods.resources.font.MorpheusFont;
import es.thesinsprods.zagastales.juegozagas.jugar.master.JugarOnline;
import javax.swing.ImageIcon;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.ActionEvent;
import java.awt.Toolkit;
public class MagiaJugadores {
private JFrame frmHistoriasDeZagas;
private JTextField textField;
private JTextField textField_1;
private JTextField textField_2;
public JFrame getFrmHistoriasDeZagas() {
return frmHistoriasDeZagas;
}
public void setFrmHistoriasDeZagas(JFrame frmHistoriasDeZagas) {
this.frmHistoriasDeZagas = frmHistoriasDeZagas;
}
private JTextField textField_3;
private JTextField textField_4;
private JTextField textField_5;
private JTextField textField_6;
private JTextField textField_7;
MorpheusFont mf = new MorpheusFont();
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
MagiaJugadores window = new MagiaJugadores();
window.frmHistoriasDeZagas.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public MagiaJugadores() {
initialize();
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frmHistoriasDeZagas = new JFrame();
frmHistoriasDeZagas.setIconImage(Toolkit.getDefaultToolkit().getImage(MagiaJugadores.class.getResource("/images/Historias de Zagas, logo.png")));
frmHistoriasDeZagas.setTitle("Historias de Zagas");
frmHistoriasDeZagas.setResizable(false);
frmHistoriasDeZagas.setBounds(100, 100, 337, 495);
frmHistoriasDeZagas.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
frmHistoriasDeZagas.getContentPane().setLayout(null);
JLabel lblConocimientos = new JLabel("Magia");
lblConocimientos.setHorizontalAlignment(SwingConstants.CENTER);
lblConocimientos.setForeground(Color.WHITE);
lblConocimientos.setFont(mf.MyFont(0, 36));
lblConocimientos.setBounds(10, 0, 312, 60);
frmHistoriasDeZagas.getContentPane().add(lblConocimientos);
JLabel lblArteDeLa = new JLabel("Fuego:");
lblArteDeLa.setForeground(Color.WHITE);
lblArteDeLa.setFont(mf.MyFont(0,18));
lblArteDeLa.setBounds(10, 66, 179, 30);
frmHistoriasDeZagas.getContentPane().add(lblArteDeLa);
textField = new JTextField();
textField.setText(""+JugarOnline.npc4.getMagicSkills().getFire());
textField.setHorizontalAlignment(SwingConstants.CENTER);
textField.setEditable(false);
textField.setColumns(10);
textField.setBounds(248, 66, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField);
JLabel lblDiplomacia = new JLabel("Agua:");
lblDiplomacia.setForeground(Color.WHITE);
lblDiplomacia.setFont(mf.MyFont(0,18));
lblDiplomacia.setBounds(10, 107, 179, 30);
frmHistoriasDeZagas.getContentPane().add(lblDiplomacia);
textField_1 = new JTextField();
textField_1.setText(""+JugarOnline.npc4.getMagicSkills().getWater());
textField_1.setHorizontalAlignment(SwingConstants.CENTER);
textField_1.setEditable(false);
textField_1.setColumns(10);
textField_1.setBounds(248, 107, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_1);
JLabel lblEtiqueta = new JLabel("Tierra:");
lblEtiqueta.setForeground(Color.WHITE);
lblEtiqueta.setFont(mf.MyFont(0,18));
lblEtiqueta.setBounds(10, 148, 164, 30);
frmHistoriasDeZagas.getContentPane().add(lblEtiqueta);
textField_2 = new JTextField();
textField_2.setText(""+JugarOnline.npc4.getMagicSkills().getEarth());
textField_2.setHorizontalAlignment(SwingConstants.CENTER);
textField_2.setEditable(false);
textField_2.setColumns(10);
textField_2.setBounds(248, 148, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_2);
JLabel lblMedicina = new JLabel("Viento:");
lblMedicina.setForeground(Color.WHITE);
lblMedicina.setFont(mf.MyFont(0,18));
lblMedicina.setBounds(10, 189, 179, 30);
frmHistoriasDeZagas.getContentPane().add(lblMedicina);
textField_3 = new JTextField();
textField_3.setText(""+JugarOnline.npc4.getMagicSkills().getWind());
textField_3.setHorizontalAlignment(SwingConstants.CENTER);
textField_3.setEditable(false);
textField_3.setColumns(10);
textField_3.setBounds(248, 189, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_3);
JLabel lblOcultismo = new JLabel("Dru\u00EDdica:");
lblOcultismo.setForeground(Color.WHITE);
lblOcultismo.setFont(mf.MyFont(0,18));
lblOcultismo.setBounds(10, 230, 100, 30);
frmHistoriasDeZagas.getContentPane().add(lblOcultismo);
textField_4 = new JTextField();
textField_4.setText(""+JugarOnline.npc4.getMagicSkills().getDruidic());
textField_4.setHorizontalAlignment(SwingConstants.CENTER);
textField_4.setEditable(false);
textField_4.setColumns(10);
textField_4.setBounds(248, 230, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_4);
JLabel lblNegociacin = new JLabel("Blanca:");
lblNegociacin.setForeground(Color.WHITE);
lblNegociacin.setFont(mf.MyFont(0,18));
lblNegociacin.setBounds(10, 271, 164, 30);
frmHistoriasDeZagas.getContentPane().add(lblNegociacin);
textField_5 = new JTextField();
textField_5.setText(""+JugarOnline.npc4.getMagicSkills().getWhite());
textField_5.setHorizontalAlignment(SwingConstants.CENTER);
textField_5.setEditable(false);
textField_5.setColumns(10);
textField_5.setBounds(248, 271, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_5);
JLabel lblNegociacin_1 = new JLabel("Negra:");
lblNegociacin_1.setForeground(Color.WHITE);
lblNegociacin_1.setFont(mf.MyFont(0,18));
lblNegociacin_1.setBounds(10, 312, 179, 30);
frmHistoriasDeZagas.getContentPane().add(lblNegociacin_1);
textField_6 = new JTextField();
textField_6.setText(""+JugarOnline.npc4.getMagicSkills().getBlack());
textField_6.setHorizontalAlignment(SwingConstants.CENTER);
textField_6.setEditable(false);
textField_6.setColumns(10);
textField_6.setBounds(248, 312, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_6);
JLabel lblConocimientosSecretos = new JLabel("Arcana:");
lblConocimientosSecretos.setForeground(Color.WHITE);
lblConocimientosSecretos.setFont(mf.MyFont(0,18));
lblConocimientosSecretos.setBounds(10, 353, 206, 30);
frmHistoriasDeZagas.getContentPane().add(lblConocimientosSecretos);
textField_7 = new JTextField();
textField_7.setText(""+JugarOnline.npc4.getMagicSkills().getArcane());
textField_7.setHorizontalAlignment(SwingConstants.CENTER);
textField_7.setEditable(false);
textField_7.setColumns(10);
textField_7.setBounds(248, 353, 50, 30);
frmHistoriasDeZagas.getContentPane().add(textField_7);
final JButton button = new JButton("");
button.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
button.setIcon(new ImageIcon(AtributosJugadores.class.getResource("/images/boton atras2.png")));
}
@Override
public void mouseReleased(MouseEvent e) {
button.setIcon(new ImageIcon(AtributosJugadores.class.getResource("/images/boton atras.png")));
}
});
button.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
frmHistoriasDeZagas.dispose();
}
});
button.setIcon(new ImageIcon(MagiaJugadores.class.getResource("/images/boton atras.png")));
button.setOpaque(false);
button.setForeground(Color.WHITE);
button.setFocusPainted(false);
button.setContentAreaFilled(false);
button.setBorderPainted(false);
button.setBorder(new BevelBorder(BevelBorder.RAISED, null, null,
null, null));
button.setBackground(new Color(139, 69, 19));
button.setBounds(10, 420, 105, 35);
frmHistoriasDeZagas.getContentPane().add(button);
JLabel label = new JLabel("");
label.setIcon(new ImageIcon(MagiaJugadores.class.getResource("/images/background-jugar.jpg")));
label.setBounds(0, 0, 331, 466);
frmHistoriasDeZagas.getContentPane().add(label);
}
}
| |
/*
* Copyright 2017 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.reactive4javaflow.impl.operators;
import hu.akarnokd.reactive4javaflow.*;
import hu.akarnokd.reactive4javaflow.fused.*;
import hu.akarnokd.reactive4javaflow.impl.*;
import hu.akarnokd.reactive4javaflow.impl.util.*;
import java.lang.invoke.*;
import java.util.concurrent.Flow;
import java.util.concurrent.atomic.AtomicInteger;
public final class FolyamOnBackpressureBufferAll<T> extends Folyam<T> {
final Folyam<T> source;
final int capacityHint;
final boolean bounded;
public FolyamOnBackpressureBufferAll(Folyam<T> source, int capacityHint, boolean bounded) {
this.source = source;
this.capacityHint = capacityHint;
this.bounded = bounded;
}
@Override
protected void subscribeActual(FolyamSubscriber<? super T> s) {
if (s instanceof ConditionalSubscriber) {
source.subscribe(new OnBackpressureBufferConditionalSubscriber<>((ConditionalSubscriber<? super T>)s, capacityHint, bounded));
} else {
source.subscribe(new OnBackpressureBufferSubscriber<>(s, capacityHint, bounded));
}
}
static abstract class AbstractOnBackpressureBuffer<T> extends AtomicInteger implements FolyamSubscriber<T>, FusedSubscription<T> {
final PlainQueue<T> queue;
Flow.Subscription upstream;
boolean outputFused;
volatile boolean cancelled;
boolean done;
static final VarHandle DONE = VH.find(MethodHandles.lookup(), AbstractOnBackpressureBuffer.class, "done", boolean.class);
Throwable error;
long requested;
static final VarHandle REQUESTED = VH.find(MethodHandles.lookup(), AbstractOnBackpressureBuffer.class, "requested", long.class);
long emitted;
AbstractOnBackpressureBuffer(int capacityHint, boolean bounded) {
queue = bounded ? new SpscArrayQueue<>(capacityHint) : new SpscLinkedArrayQueue<>(capacityHint);
}
@Override
public final void onSubscribe(Flow.Subscription subscription) {
upstream = subscription;
onStart();
subscription.request(Long.MAX_VALUE);
}
abstract void onStart();
@Override
public final void onNext(T item) {
if (!queue.offer(item)) {
upstream.cancel();
onError(new IllegalStateException("The consumer is not ready to receive items"));
return;
}
drain();
}
@Override
public final void onError(Throwable throwable) {
error = throwable;
DONE.setRelease(this, true);
drain();
}
@Override
public final void onComplete() {
DONE.setRelease(this, true);
drain();
}
final void drain() {
if (getAndIncrement() == 0) {
if (outputFused) {
drainFused();
} else {
drainLoop();
}
}
}
abstract void drainLoop();
abstract void drainFused();
@Override
public final void request(long n) {
SubscriptionHelper.addRequested(this, REQUESTED, n);
drain();
}
@Override
public final void cancel() {
cancelled = true;
upstream.cancel();
if (getAndIncrement() == 0) {
queue.clear();
}
}
@Override
public final int requestFusion(int mode) {
if ((mode & ASYNC) != 0) {
outputFused = true;
return ASYNC;
}
return NONE;
}
@Override
public final T poll() throws Throwable {
return queue.poll();
}
@Override
public final boolean isEmpty() {
return queue.isEmpty();
}
@Override
public final void clear() {
queue.clear();
}
}
static final class OnBackpressureBufferSubscriber<T> extends AbstractOnBackpressureBuffer<T> {
final FolyamSubscriber<? super T> actual;
OnBackpressureBufferSubscriber(FolyamSubscriber<? super T> actual, int capacityHint, boolean bounded) {
super(capacityHint, bounded);
this.actual = actual;
}
@Override
void onStart() {
actual.onSubscribe(this);
}
@Override
void drainLoop() {
int missed = 1;
FolyamSubscriber<? super T> a = actual;
PlainQueue<T> q = queue;
long e = emitted;
for (;;) {
long r = (long)REQUESTED.getAcquire(this);
while (e != r) {
if (cancelled) {
q.clear();
return;
}
boolean d = (boolean)DONE.getAcquire(this);
T v = q.poll();
boolean empty = v == null;
if (d && empty) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
} else {
a.onComplete();
}
return;
}
if (empty) {
break;
}
a.onNext(v);
e++;
}
if (e == r) {
if (cancelled) {
q.clear();
return;
}
if ((boolean)DONE.getAcquire(this) && q.isEmpty()) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
} else {
a.onComplete();
}
return;
}
}
emitted = e;
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
@Override
void drainFused() {
int missed = 1;
FolyamSubscriber<? super T> a = actual;
PlainQueue<T> q = queue;
for (;;) {
if (cancelled) {
q.clear();
return;
}
boolean d = (boolean)DONE.getAcquire(this);
if (!q.isEmpty()) {
a.onNext(null);
}
if (d) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
} else {
a.onComplete();
}
return;
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
}
static final class OnBackpressureBufferConditionalSubscriber<T> extends AbstractOnBackpressureBuffer<T> {
final ConditionalSubscriber<? super T> actual;
OnBackpressureBufferConditionalSubscriber(ConditionalSubscriber<? super T> actual, int capacityHint, boolean bounded) {
super(capacityHint, bounded);
this.actual = actual;
}
@Override
void onStart() {
actual.onSubscribe(this);
}
@Override
void drainLoop() {
int missed = 1;
ConditionalSubscriber<? super T> a = actual;
PlainQueue<T> q = queue;
long e = emitted;
for (;;) {
long r = (long)REQUESTED.getAcquire(this);
while (e != r) {
if (cancelled) {
q.clear();
return;
}
boolean d = (boolean)DONE.getAcquire(this);
T v = q.poll();
boolean empty = v == null;
if (d && empty) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
} else {
a.onComplete();
}
return;
}
if (empty) {
break;
}
if (a.tryOnNext(v)) {
e++;
}
}
if (e == r) {
if (cancelled) {
q.clear();
return;
}
if ((boolean)DONE.getAcquire(this) && q.isEmpty()) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
} else {
a.onComplete();
}
return;
}
}
emitted = e;
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
@Override
void drainFused() {
int missed = 1;
ConditionalSubscriber<? super T> a = actual;
PlainQueue<T> q = queue;
for (;;) {
if (cancelled) {
q.clear();
return;
}
boolean d = (boolean)DONE.getAcquire(this);
if (!q.isEmpty()) {
a.tryOnNext(null);
}
if (d) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
} else {
a.onComplete();
}
return;
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.metamodel.jdbc.integrationtests;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.swing.table.TableModel;
import org.apache.metamodel.BatchUpdateScript;
import org.apache.metamodel.DataContext;
import org.apache.metamodel.UpdateCallback;
import org.apache.metamodel.UpdateScript;
import org.apache.metamodel.data.DataSet;
import org.apache.metamodel.data.DataSetTableModel;
import org.apache.metamodel.insert.RowInsertionBuilder;
import org.apache.metamodel.jdbc.JdbcDataContext;
import org.apache.metamodel.jdbc.JdbcTestTemplates;
import org.apache.metamodel.jdbc.QuerySplitter;
import org.apache.metamodel.query.FilterItem;
import org.apache.metamodel.query.FunctionType;
import org.apache.metamodel.query.OperatorType;
import org.apache.metamodel.query.OrderByItem;
import org.apache.metamodel.query.Query;
import org.apache.metamodel.query.SelectItem;
import org.apache.metamodel.schema.Column;
import org.apache.metamodel.schema.ColumnType;
import org.apache.metamodel.schema.Relationship;
import org.apache.metamodel.schema.Schema;
import org.apache.metamodel.schema.Table;
import org.junit.Ignore;
/**
* Test case that tests postgresql interaction. The test requires the
* "dellstore2" sample database that can be found at pgfoundry.
*
* @see http://mirrors.dotsrc.org/postgresql/projects/pgFoundry/dbsamples/
*/
public class PostgresqlTest extends AbstractJdbIntegrationTest {
private static final String PROPERTY_LONGRUNNINGTESTS = "jdbc.postgresql.longrunningtests";
private static final double DELTA = 1E-15;
@Override
protected String getPropertyPrefix() {
return "postgresql";
}
public void testTimestampValueInsertSelect() throws Exception {
if (!isConfigured()) {
return;
}
final Connection connection = getConnection();
JdbcTestTemplates.timestampValueInsertSelect(connection, TimeUnit.MICROSECONDS);
}
public void testCreateInsertAndUpdate() throws Exception {
if (!isConfigured()) {
return;
}
JdbcTestTemplates.simpleCreateInsertUpdateAndDrop(getDataContext(), "metamodel_test_simple");
}
public void testCompositePrimaryKeyCreation() throws Exception {
if (!isConfigured()) {
return;
}
JdbcTestTemplates.compositeKeyCreation(getDataContext(), "metamodel_test_composite_keys");
}
public void testInterpretationOfNull() throws Exception {
if (!isConfigured()) {
return;
}
JdbcTestTemplates.interpretationOfNulls(getConnection());
}
private JdbcDataContext createLimitAndOffsetTestData() {
final JdbcDataContext dc = new JdbcDataContext(getConnection());
if (dc.getTableByQualifiedLabel("test_table") != null) {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.dropTable("test_table").execute();
}
});
}
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
Table table = callback.createTable(dc.getDefaultSchema(), "test_table").withColumn("foo")
.ofType(ColumnType.INTEGER).withColumn("bar").ofType(ColumnType.VARCHAR).execute();
callback.insertInto(table).value("foo", 1).value("bar", "hello").execute();
callback.insertInto(table).value("foo", 2).value("bar", "there").execute();
callback.insertInto(table).value("foo", 3).value("bar", "world").execute();
}
});
dc.refreshSchemas();
return dc;
}
public void testLimit() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = createLimitAndOffsetTestData();
Schema schema = dc.getDefaultSchema();
Table productsTable = schema.getTableByName("test_table");
DataSet ds = dc.query().from(productsTable).select("foo").limit(2).execute();
assertTrue(ds.next());
assertEquals("Row[values=[1]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[2]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
}
public void testOffset() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = createLimitAndOffsetTestData();
Schema schema = dc.getDefaultSchema();
Table productsTable = schema.getTableByName("test_table");
DataSet ds = dc.query().from(productsTable).select("foo").offset(1).execute();
assertTrue(ds.next());
assertEquals("Row[values=[2]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[3]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
}
public void testLimitAndOffset() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = createLimitAndOffsetTestData();
Schema schema = dc.getDefaultSchema();
Table productsTable = schema.getTableByName("test_table");
DataSet ds = dc.query().from(productsTable).select("foo").limit(1).offset(1).execute();
assertTrue(ds.next());
assertEquals("Row[values=[2]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
}
public void testQuotedInsertSyntax() throws Exception {
if (!isConfigured()) {
return;
}
final Connection connection = getConnection();
try {
connection.createStatement().execute("DROP TABLE my_table");
} catch (Exception e) {
// do nothing
}
JdbcDataContext dc = new JdbcDataContext(connection);
final Schema schema = dc.getDefaultSchema();
// create table
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").asPrimaryKey()
.ofType(ColumnType.INTEGER).ofNativeType("SERIAL").nullable(false).withColumn("name")
.ofType(ColumnType.VARCHAR).ofSize(10).withColumn("foo").ofType(ColumnType.BOOLEAN)
.nullable(true).withColumn("bar").ofType(ColumnType.BOOLEAN).nullable(true).execute();
assertEquals("my_table", table.getName());
}
});
assertTrue(dc.getColumnByQualifiedLabel("my_table.id").isPrimaryKey());
assertFalse(dc.getColumnByQualifiedLabel("my_table.name").isPrimaryKey());
// insert records
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
RowInsertionBuilder builder = callback.insertInto("my_table").value("name", "row 1").value("foo", true);
try {
Method method = builder.getClass().getDeclaredMethod("createSqlStatement");
method.setAccessible(true);
Object result = method.invoke(builder);
assertEquals("INSERT INTO \"public\".\"my_table\" (name,foo) VALUES (?,?)", result.toString());
} catch (Exception e) {
throw new RuntimeException(e);
}
builder.execute();
callback.insertInto("my_table").value("name", "row 2").value("foo", false).execute();
}
});
// query
DataSet ds = dc.query().from("my_table").select("name").where("foo").eq(true).execute();
assertTrue(ds.next());
assertEquals("Row[values=[row 1]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
// drop
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.dropTable("my_table").execute();
}
});
}
public void testInsertOfDifferentTypes() throws Exception {
if (!isConfigured()) {
return;
}
final Connection connection = getConnection();
try {
connection.createStatement().execute("DROP TABLE my_table");
} catch (Exception e) {
// do nothing
}
JdbcDataContext dc = new JdbcDataContext(connection);
final Schema schema = dc.getDefaultSchema();
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("name").ofType(ColumnType.VARCHAR)
.ofSize(10).withColumn("foo").ofType(ColumnType.BOOLEAN).nullable(true).withColumn("bar")
.ofType(ColumnType.BOOLEAN).nullable(true).execute();
assertEquals("my_table", table.getName());
}
});
try {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.insertInto("my_table").value("name", "row 1").value("foo", true).execute();
callback.insertInto("my_table").value("name", "row 2").value("bar", true).execute();
callback.insertInto("my_table").value("name", "row 3").value("foo", true).execute();
callback.insertInto("my_table").value("name", "row 4").value("foo", true).execute();
callback.insertInto("my_table").value("name", "row 5").value("bar", true).execute();
callback.insertInto("my_table").value("name", "row 6").value("foo", true).value("bar", true)
.execute();
callback.insertInto("my_table").value("name", "row 7").value("foo", true).value("bar", true)
.execute();
callback.insertInto("my_table").value("name", "row 8").value("foo", false).value("bar", false)
.execute();
}
});
DataSet ds = dc.query().from("my_table").select("id").and("name").execute();
assertTrue(ds.next());
assertEquals("Row[values=[1, row 1]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[2, row 2]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[3, row 3]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[4, row 4]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[5, row 5]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[6, row 6]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[7, row 7]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[8, row 8]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
} finally {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.dropTable("my_table").execute();
}
});
}
}
/**
* Tests some inconsistencies dealing with booleans.
*
* @see http://eobjects.org/trac/ticket/829
*/
public void testBoolean() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("some_bool").ofType(ColumnType.BOOLEAN)
.nullable(false).execute();
assertEquals("my_table", table.getName());
cb.insertInto(table).value("id", 1).value("some_bool", true).execute();
cb.insertInto(table).value("id", 2).value("some_bool", false).execute();
}
});
DataSet ds = dc.query().from("my_table").select("some_bool").execute();
assertTrue(ds.next());
assertEquals("Row[values=[true]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[false]]", ds.getRow().toString());
assertFalse(ds.next());
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
cb.dropTable("my_table").execute();
}
});
}
/**
* Tests type rewriting for double type.
*
* @see https://issues.apache.org/jira/browse/METAMODEL-151
*/
public void testDouble() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("some_double").ofType(ColumnType.DOUBLE)
.nullable(false).execute();
assertEquals("my_table", table.getName());
cb.insertInto(table).value("id", 1).value("some_double", Double.MIN_VALUE).execute();
cb.insertInto(table).value("id", 2).value("some_double", Double.MAX_VALUE).execute();
cb.insertInto(table).value("id", 3).value("some_double", Double.NEGATIVE_INFINITY).execute();
cb.insertInto(table).value("id", 4).value("some_double", Double.POSITIVE_INFINITY).execute();
cb.insertInto(table).value("id", 5).value("some_double", Double.NaN).execute();
}
});
try {
DataSet ds = dc.query().from("my_table").select("some_double").execute();
assertTrue(ds.next());
Double minVal = (Double) ds.getRow().getValue(ds.getSelectItems()[0]);
assertTrue(ds.next());
Double maxVal = (Double) ds.getRow().getValue(ds.getSelectItems()[0]);
assertTrue(ds.next());
Double negInf = (Double) ds.getRow().getValue(ds.getSelectItems()[0]);
assertTrue(ds.next());
Double posInf = (Double) ds.getRow().getValue(ds.getSelectItems()[0]);
assertTrue(ds.next());
Double nAn = (Double) ds.getRow().getValue(ds.getSelectItems()[0]);
assertFalse(ds.next());
assertEquals(Double.MIN_VALUE, minVal, DELTA);
assertEquals(Double.MAX_VALUE, maxVal, DELTA);
assertTrue(Double.isInfinite(negInf));
assertTrue(Double.isInfinite(posInf));
assertTrue(Double.isNaN(nAn));
} finally {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
cb.dropTable("my_table").execute();
}
});
}
}
public void testBlob() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("some_bytes").ofType(ColumnType.BLOB)
.execute();
assertEquals("my_table", table.getName());
}
});
try {
dc.refreshSchemas();
final Column column = dc.getColumnByQualifiedLabel("my_table.some_bytes");
assertEquals("Column[name=some_bytes,columnNumber=1,type=BINARY,nullable=true,"
+ "nativeType=bytea,columnSize=2147483647]", column.toString());
final Table table = column.getTable();
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.insertInto(table).value(column, new byte[] { 1, 2, 3 }).execute();
callback.insertInto(table).value(column, "hello world".getBytes()).execute();
}
});
byte[] bytes;
DataSet ds = dc.query().from(table).select(table.getColumns()).execute();
assertTrue(ds.next());
assertEquals(1, ds.getRow().getValue(0));
bytes = (byte[]) ds.getRow().getValue(1);
assertEquals(3, bytes.length);
assertEquals(1, bytes[0]);
assertEquals(2, bytes[1]);
assertEquals(3, bytes[2]);
assertTrue(ds.next());
assertEquals(2, ds.getRow().getValue(0));
bytes = (byte[]) ds.getRow().getValue(1);
assertEquals("hello world", new String(bytes));
assertFalse(ds.next());
} finally {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
cb.dropTable("my_table").execute();
}
});
}
}
public void testCreateInsertAndUpdateDateTypes() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dataContext = getDataContext();
JdbcTestTemplates.createInsertAndUpdateDateTypes(dataContext, dataContext.getDefaultSchema(),
"metamodel_postgresql_test");
}
public void testCreateTableAndWriteRecords() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
try {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("person name").ofSize(255)
.withColumn("age").ofType(ColumnType.INTEGER).execute();
assertEquals("[id, person name, age]", Arrays.toString(table.getColumnNames()));
assertEquals(
"Column[name=id,columnNumber=0,type=INTEGER,nullable=false,nativeType=serial,columnSize=10]",
table.getColumnByName("id").toString());
assertEquals(
"Column[name=person name,columnNumber=1,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=255]",
table.getColumnByName("person name").toString());
assertEquals(
"Column[name=age,columnNumber=2,type=INTEGER,nullable=true,nativeType=int4,columnSize=10]",
table.getColumnByName("age").toString());
cb.insertInto(table).value("person name", "John Doe").value("age", 42).execute();
cb.insertInto(table).value("age", 43).value("person name", "Jane Doe").execute();
}
});
final Table table = schema.getTableByName("my_table");
Query query = dc.query().from(table).select(table.getColumns()).toQuery();
DataSet ds = dc.executeQuery(query);
assertTrue(ds.next());
assertEquals("Row[values=[1, John Doe, 42]]", ds.getRow().toString());
assertTrue(ds.next());
assertEquals("Row[values=[2, Jane Doe, 43]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.update(table).value("age", 102).where("id").eq(1).execute();
callback.deleteFrom(table).where("id").eq(2).execute();
}
});
ds = dc.executeQuery(query);
assertTrue(ds.next());
assertEquals("Row[values=[1, John Doe, 102]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
} finally {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback callback) {
callback.dropTable("my_table").execute();
}
});
assertNull(dc.getTableByQualifiedLabel("my_table"));
}
}
public void testCreateTableInsertValueFloatForIntColumn() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
try {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("person name").ofSize(255)
.withColumn("age").ofType(ColumnType.INTEGER).execute();
assertEquals("[id, person name, age]", Arrays.toString(table.getColumnNames()));
assertEquals(
"Column[name=id,columnNumber=0,type=INTEGER,nullable=false,nativeType=serial,columnSize=10]",
table.getColumnByName("id").toString());
assertEquals(
"Column[name=person name,columnNumber=1,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=255]",
table.getColumnByName("person name").toString());
assertEquals(
"Column[name=age,columnNumber=2,type=INTEGER,nullable=true,nativeType=int4,columnSize=10]",
table.getColumnByName("age").toString());
cb.insertInto(table).value("person name", "John Doe").value("age", 42.4673).execute();
cb.insertInto(table).value("age", 43.5673).value("person name", "Jane Doe").execute();
}
});
Table table = schema.getTableByName("my_table");
Query query = dc.query().from(table).select(table.getColumns()).toQuery();
DataSet ds = dc.executeQuery(query);
assertTrue(ds.next());
// Float value input will be rounded down into integer number.
assertEquals("Row[values=[1, John Doe, 42]]", ds.getRow().toString());
assertTrue(ds.next());
// The age will be incremented as float value input will be rounded
// up.
assertEquals("Row[values=[2, Jane Doe, 44]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
} finally {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
cb.dropTable("my_table").execute();
}
});
}
}
public void testInsertFailureForStringValueForIntegerColumn() throws Exception {
if (!isConfigured()) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
try {
dc.executeUpdate(new BatchUpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("person name").ofSize(255)
.withColumn("age").ofType(ColumnType.INTEGER).execute();
assertEquals("[id, person name, age]", Arrays.toString(table.getColumnNames()));
assertEquals(
"Column[name=id,columnNumber=0,type=INTEGER,nullable=false,nativeType=serial,columnSize=10]",
table.getColumnByName("id").toString());
assertEquals(
"Column[name=person name,columnNumber=1,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=255]",
table.getColumnByName("person name").toString());
assertEquals(
"Column[name=age,columnNumber=2,type=INTEGER,nullable=true,nativeType=int4,columnSize=10]",
table.getColumnByName("age").toString());
cb.insertInto(table).value("person name", "John Doe").value("age", "42").execute();
}
});
} catch (Exception e) {
String message = e.getMessage().replaceAll("\n", " ");
assertEquals(
"Could not execute batch: INSERT INTO \"public\".\"my_table\" (\"person name\",age) VALUES ('John Doe','42'): Batch entry 0 INSERT INTO \"public\".\"my_table\" (\"person name\",age) VALUES ('John Doe','42') was aborted. Call getNextException to see the cause.",
message);
} finally {
dc.refreshSchemas();
if (dc.getTableByQualifiedLabel("my_table") != null) {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
cb.dropTable("my_table").execute();
}
});
}
}
}
public void testDatabaseProductName() throws Exception {
if (!isConfigured()) {
return;
}
String databaseProductName = getConnection().getMetaData().getDatabaseProductName();
assertEquals(JdbcDataContext.DATABASE_PRODUCT_POSTGRESQL, databaseProductName);
}
public void testGetDefaultSchema() throws Exception {
if (!isConfigured()) {
return;
}
DataContext dc = new JdbcDataContext(getConnection());
Schema schema = dc.getDefaultSchema();
assertEquals("public", schema.getName());
}
public void testGetSchema() throws Exception {
if (!isConfigured()) {
return;
}
DataContext dc = new JdbcDataContext(getConnection());
Schema[] schemas = dc.getSchemas();
assertTrue(schemas.length >= 3);
assertNotNull(dc.getSchemaByName("information_schema"));
assertNotNull(dc.getSchemaByName("pg_catalog"));
assertNotNull(dc.getSchemaByName("public"));
Schema schema = dc.getSchemaByName("public");
assertEquals("[Table[name=categories,type=TABLE,remarks=null], "
+ "Table[name=cust_hist,type=TABLE,remarks=null], " + "Table[name=customers,type=TABLE,remarks=null], "
+ "Table[name=inventory,type=TABLE,remarks=null], "
+ "Table[name=orderlines,type=TABLE,remarks=null], " + "Table[name=orders,type=TABLE,remarks=null], "
+ "Table[name=products,type=TABLE,remarks=null], " + "Table[name=reorder,type=TABLE,remarks=null]]",
Arrays.toString(schema.getTables()));
Table productsTable = schema.getTableByName("products");
assertEquals(
"[Column[name=prod_id,columnNumber=0,type=INTEGER,nullable=false,nativeType=serial,columnSize=10], "
+ "Column[name=category,columnNumber=1,type=INTEGER,nullable=false,nativeType=int4,columnSize=10], "
+ "Column[name=title,columnNumber=2,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=actor,columnNumber=3,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=price,columnNumber=4,type=NUMERIC,nullable=false,nativeType=numeric,columnSize=12], "
+ "Column[name=special,columnNumber=5,type=SMALLINT,nullable=true,nativeType=int2,columnSize=5], "
+ "Column[name=common_prod_id,columnNumber=6,type=INTEGER,nullable=false,nativeType=int4,columnSize=10]]",
Arrays.toString(productsTable.getColumns()));
Table customersTable = schema.getTableByName("customers");
assertEquals(
"[Column[name=customerid,columnNumber=0,type=INTEGER,nullable=false,nativeType=serial,columnSize=10], "
+ "Column[name=firstname,columnNumber=1,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=lastname,columnNumber=2,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=address1,columnNumber=3,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=address2,columnNumber=4,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=50], "
+ "Column[name=city,columnNumber=5,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=state,columnNumber=6,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=50], "
+ "Column[name=zip,columnNumber=7,type=INTEGER,nullable=true,nativeType=int4,columnSize=10], "
+ "Column[name=country,columnNumber=8,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=region,columnNumber=9,type=SMALLINT,nullable=false,nativeType=int2,columnSize=5], "
+ "Column[name=email,columnNumber=10,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=50], "
+ "Column[name=phone,columnNumber=11,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=50], "
+ "Column[name=creditcardtype,columnNumber=12,type=INTEGER,nullable=false,nativeType=int4,columnSize=10], "
+ "Column[name=creditcard,columnNumber=13,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=creditcardexpiration,columnNumber=14,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=username,columnNumber=15,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=password,columnNumber=16,type=VARCHAR,nullable=false,nativeType=varchar,columnSize=50], "
+ "Column[name=age,columnNumber=17,type=SMALLINT,nullable=true,nativeType=int2,columnSize=5], "
+ "Column[name=income,columnNumber=18,type=INTEGER,nullable=true,nativeType=int4,columnSize=10], "
+ "Column[name=gender,columnNumber=19,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=1]]",
Arrays.toString(customersTable.getColumns()));
Relationship[] relations = customersTable.getRelationships();
assertEquals(2, relations.length);
assertEquals(
"[Relationship[primaryTable=customers,primaryColumns=[customerid],foreignTable=cust_hist,foreignColumns=[customerid]], "
+ "Relationship[primaryTable=customers,primaryColumns=[customerid],foreignTable=orders,foreignColumns=[customerid]]]",
Arrays.toString(relations));
assertEquals("Table[name=customers,type=TABLE,remarks=null]", relations[0].getPrimaryTable().toString());
assertEquals("Table[name=cust_hist,type=TABLE,remarks=null]", relations[0].getForeignTable().toString());
assertEquals("Table[name=customers,type=TABLE,remarks=null]", relations[1].getPrimaryTable().toString());
assertEquals("Table[name=orders,type=TABLE,remarks=null]", relations[1].getForeignTable().toString());
Table ordersTable = schema.getTableByName("orderlines");
assertEquals(
"[Column[name=orderlineid,columnNumber=0,type=INTEGER,nullable=false,nativeType=int4,columnSize=10], "
+ "Column[name=orderid,columnNumber=1,type=INTEGER,nullable=false,nativeType=int4,columnSize=10], "
+ "Column[name=prod_id,columnNumber=2,type=INTEGER,nullable=false,nativeType=int4,columnSize=10], "
+ "Column[name=quantity,columnNumber=3,type=SMALLINT,nullable=false,nativeType=int2,columnSize=5], "
+ "Column[name=orderdate,columnNumber=4,type=DATE,nullable=false,nativeType=date,columnSize=13]]",
Arrays.toString(ordersTable.getColumns()));
}
public void testExecuteQueryInPublicSchema() throws Exception {
if (!isConfigured()) {
return;
}
DataContext dc = new JdbcDataContext(getConnection());
Query q = new Query();
Schema schema = dc.getSchemaByName("public");
Table productsTable = schema.getTableByName("products");
q.from(productsTable);
Column titleColumn = productsTable.getColumnByName("title");
Column productPriceColumn = productsTable.getColumnByName("price");
q.select(titleColumn, productPriceColumn);
q.getSelectClause().getItem(0).setAlias("product-title");
DataSet data = dc.executeQuery(q);
TableModel tableModel = new DataSetTableModel(data);
assertEquals(2, tableModel.getColumnCount());
assertEquals(10000, tableModel.getRowCount());
assertEquals("ACADEMY ACADEMY", tableModel.getValueAt(0, 0).toString());
assertEquals("25.99", tableModel.getValueAt(0, 1).toString());
assertEquals("ACADEMY HORN", tableModel.getValueAt(432, 0).toString());
assertEquals("16.99", tableModel.getValueAt(6346, 1).toString());
assertEquals("ALADDIN ZORRO", tableModel.getValueAt(9999, 0).toString());
assertEquals("10.99", tableModel.getValueAt(9999, 1).toString());
data = null;
tableModel = null;
Column prodIdColumn = productsTable.getColumnByName("prod_id");
Table orderlinesTable = schema.getTableByName("orderlines");
Column commonProdIdColumn = orderlinesTable.getColumnByName("prod_id");
Column quantityColumn = orderlinesTable.getColumnByName("quantity");
q.from(orderlinesTable);
q.where(new FilterItem(new SelectItem(prodIdColumn), OperatorType.EQUALS_TO, new SelectItem(commonProdIdColumn)));
q.groupBy(titleColumn);
q.getSelectClause().removeItem(q.getSelectClause().getSelectItem(productPriceColumn));
SelectItem quantitySum = new SelectItem(FunctionType.SUM, quantityColumn).setAlias("orderAmount");
q.select(quantitySum);
q.having(new FilterItem(quantitySum, OperatorType.GREATER_THAN, 25));
q.orderBy(new OrderByItem(q.getSelectClause().getItem(0)));
assertEquals("SELECT \"products\".\"title\" AS product-title, SUM(\"orderlines\".\"quantity\") AS orderAmount "
+ "FROM public.\"products\", public.\"orderlines\" "
+ "WHERE \"products\".\"prod_id\" = \"orderlines\".\"prod_id\" " + "GROUP BY \"products\".\"title\" "
+ "HAVING SUM(\"orderlines\".\"quantity\") > 25 " + "ORDER BY \"products\".\"title\" ASC", q.toString());
data = dc.executeQuery(q);
tableModel = new DataSetTableModel(data);
assertEquals(2, tableModel.getColumnCount());
assertEquals(136, tableModel.getRowCount());
assertEquals("ACADEMY ALABAMA", tableModel.getValueAt(0, 0).toString());
assertEquals("27", tableModel.getValueAt(0, 1).toString());
assertEquals("AIRPORT MOURNING", tableModel.getValueAt(99, 0).toString());
assertEquals("29", tableModel.getValueAt(99, 1).toString());
assertEquals("ALADDIN WORKER", tableModel.getValueAt(135, 0).toString());
assertEquals("27", tableModel.getValueAt(135, 1).toString());
}
public void testWhiteSpaceColumns() throws Exception {
if (!isConfigured()) {
return;
}
DatabaseMetaData metaData = getConnection().getMetaData();
assertEquals("\"", metaData.getIdentifierQuoteString());
}
public void testCreateTableAndInsert1MRecords() throws Exception {
if (!isConfigured()) {
return;
}
if (!"true".equalsIgnoreCase(getProperties().getProperty(PROPERTY_LONGRUNNINGTESTS))) {
return;
}
JdbcDataContext dc = new JdbcDataContext(getConnection());
final Schema schema = dc.getDefaultSchema();
try {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
Table table = cb.createTable(schema, "my_table").withColumn("id").ofType(ColumnType.INTEGER)
.ofNativeType("SERIAL").nullable(false).withColumn("person name").ofSize(255)
.withColumn("age").ofType(ColumnType.INTEGER).execute();
assertEquals("[id, person name, age]", Arrays.toString(table.getColumnNames()));
assertEquals(
"Column[name=id,columnNumber=0,type=INTEGER,nullable=false,nativeType=serial,columnSize=10]",
table.getColumnByName("id").toString());
assertEquals(
"Column[name=person name,columnNumber=1,type=VARCHAR,nullable=true,nativeType=varchar,columnSize=255]",
table.getColumnByName("person name").toString());
assertEquals(
"Column[name=age,columnNumber=2,type=INTEGER,nullable=true,nativeType=int4,columnSize=10]",
table.getColumnByName("age").toString());
for (int i = 0; i < 1000000; i++) {
cb.insertInto(table).value("person name", "John Doe").value("age", i + 10).execute();
}
}
});
Table table = schema.getTableByName("my_table");
Query query = dc.query().from(table).selectCount().toQuery();
DataSet ds = dc.executeQuery(query);
assertTrue(ds.next());
assertEquals("Row[values=[1000000]]", ds.getRow().toString());
assertFalse(ds.next());
ds.close();
} finally {
dc.executeUpdate(new UpdateScript() {
@Override
public void run(UpdateCallback cb) {
cb.dropTable("my_table").execute();
}
});
}
}
public void testCharOfSizeOne() throws Exception {
if (!isConfigured()) {
return;
}
JdbcTestTemplates.meaningOfOneSizeChar(getConnection());
}
/**
* Splits a huge query into 146 pieces and executes them to test that the
* collective result are equal to the original one in size
*/
@Ignore
public void testSplitHugeQueryExecute146() throws Exception {
if (!isConfigured()) {
return;
}
if (!"true".equalsIgnoreCase(getProperties().getProperty(PROPERTY_LONGRUNNINGTESTS))) {
return;
}
DataContext dc = new JdbcDataContext(getConnection());
Query q = new Query();
Schema schema = dc.getSchemaByName("public");
Table productsTable = schema.getTableByName("products");
Table customerTable = schema.getTableByName("customers");
q.from(productsTable, "p").from(customerTable, "c");
Column titleColumn = productsTable.getColumnByName("title");
Column priceColumn = productsTable.getColumnByName("price");
Column cityColumn = customerTable.getColumnByName("city");
Column ageColumn = customerTable.getColumnByName("age");
q.select(titleColumn, priceColumn, cityColumn);
q.where(new FilterItem(new SelectItem(priceColumn), OperatorType.GREATER_THAN, 27));
q.where(new FilterItem(new SelectItem(ageColumn), OperatorType.GREATER_THAN, 55));
assertEquals(
"SELECT p.\"title\", p.\"price\", c.\"city\" FROM public.\"products\" p, public.\"customers\" c WHERE p.\"price\" > 27 AND c.\"age\" > 55",
q.toString());
QuerySplitter qs = new QuerySplitter(dc, q);
qs.setMaxRows(100000);
assertEquals(14072278, qs.getRowCount());
List<Query> splitQueries = qs.splitQuery();
assertEquals(146, splitQueries.size());
assertEquals(
"SELECT p.\"title\", p.\"price\", c.\"city\" FROM public.\"products\" p, public.\"customers\" c WHERE p.\"price\" > 27 AND c.\"age\" > 55 AND (c.\"customerid\" < 143 OR c.\"customerid\" IS NULL) AND (p.\"category\" < 8 OR p.\"category\" IS NULL)",
splitQueries.get(0).toString());
assertEquals(
"SELECT p.\"title\", p.\"price\", c.\"city\" FROM public.\"products\" p, public.\"customers\" c WHERE p.\"price\" > 27 AND c.\"age\" > 55 AND (c.\"customerid\" > 19739 OR c.\"customerid\" = 19739)",
splitQueries.get(145).toString());
assertEquals(
"[45954, 55752, 52122, 55480, 49770, 53410, 60434, 51590, 97284, 94336, 86966, 76648, 98758, 84018, 98758, 95810, 92862, 91388, 39798, 79596, "
+ "91388, 48642, 60434, 106128, 94336, 94336, 86966, 79596, 85492, 94336, 104654, 97284, 84018, 101706, 109076, 89914, 110550, 107602, 98758, "
+ "112024, 100232, 101706, 95810, 92862, 107602, 100232, 86966, 98758, 106128, 91388, 107602, 104654, 107602, 81070, 114972, 79596, 100232, 97284, "
+ "103180, 98758, 113498, 103180, 89914, 104654, 97284, 109076, 114972, 103180, 86966, 106128, 101706, 95810, 103180, 88440, 112024, 91388, 106128, "
+ "82544, 122342, 98758, 104654, 103180, 104654, 89914, 106128, 88440, 103180, 100232, 98758, 100232, 89914, 101706, 100232, 107602, 88440, 89914, "
+ "91388, 103180, 100232, 104654, 120868, 106128, 100232, 107602, 97284, 103180, 106128, 91388, 100232, 106128, 100232, 109076, 94336, 106128, 94336, "
+ "106128, 104654, 116446, 98758, 113498, 107602, 104654, 107602, 88440, 100232, 92862, 89914, 110550, 109076, 100232, 92862, 100232, 104654, 103180, "
+ "89914, 103180, 103180, 107602, 85492, 112024, 85492, 101706, 92862, 86966, 104654, 201938]",
Arrays.toString(getCounts(dc, splitQueries)));
assertSameCount(dc, qs, splitQueries);
DataSet data = qs.executeQueries(splitQueries);
int count = 0;
while (data.next()) {
count++;
}
data.close();
assertEquals(14072278, count);
System.out.println("Successfully iterated 14072278 rows! :)");
}
/**
* Utility method for asserting that a query and it's splitted queries have
* the same total count
*/
private void assertSameCount(DataContext dc, QuerySplitter qs, List<Query> queries) {
long count1 = qs.getRowCount();
long count2 = 0;
for (Query q : queries) {
count2 += getCount(dc, q);
}
assertEquals(count1, count2);
}
public long[] getCounts(DataContext dc, List<Query> queries) {
long[] result = new long[queries.size()];
for (int i = 0; i < result.length; i++) {
result[i] = getCount(dc, queries.get(i));
}
return result;
}
/**
* Gets the count of a query
*/
private long getCount(DataContext dc, Query query) {
return new QuerySplitter(dc, query).getRowCount();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.internals.PartitionAssignor;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.TopologyWrapper;
import org.apache.kafka.streams.kstream.JoinWindows;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.ValueJoiner;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.assignment.AssignmentInfo;
import org.apache.kafka.streams.processor.internals.assignment.SubscriptionInfo;
import org.apache.kafka.streams.state.HostInfo;
import org.apache.kafka.test.MockClientSupplier;
import org.apache.kafka.test.MockInternalTopicManager;
import org.apache.kafka.test.MockKeyValueStoreBuilder;
import org.apache.kafka.test.MockProcessorSupplier;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import static java.time.Duration.ofMillis;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
@SuppressWarnings("unchecked")
public class StreamsPartitionAssignorTest {
private final TopicPartition t1p0 = new TopicPartition("topic1", 0);
private final TopicPartition t1p1 = new TopicPartition("topic1", 1);
private final TopicPartition t1p2 = new TopicPartition("topic1", 2);
private final TopicPartition t1p3 = new TopicPartition("topic1", 3);
private final TopicPartition t2p0 = new TopicPartition("topic2", 0);
private final TopicPartition t2p1 = new TopicPartition("topic2", 1);
private final TopicPartition t2p2 = new TopicPartition("topic2", 2);
private final TopicPartition t2p3 = new TopicPartition("topic2", 3);
private final TopicPartition t3p0 = new TopicPartition("topic3", 0);
private final TopicPartition t3p1 = new TopicPartition("topic3", 1);
private final TopicPartition t3p2 = new TopicPartition("topic3", 2);
private final TopicPartition t3p3 = new TopicPartition("topic3", 3);
private final Set<String> allTopics = Utils.mkSet("topic1", "topic2");
private final List<PartitionInfo> infos = asList(
new PartitionInfo("topic1", 0, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic1", 1, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic1", 2, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 0, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 1, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 2, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic3", 0, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic3", 1, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic3", 2, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic3", 3, Node.noNode(), new Node[0], new Node[0])
);
private final Cluster metadata = new Cluster(
"cluster",
Collections.singletonList(Node.noNode()),
infos,
Collections.emptySet(),
Collections.emptySet());
private final TaskId task0 = new TaskId(0, 0);
private final TaskId task1 = new TaskId(0, 1);
private final TaskId task2 = new TaskId(0, 2);
private final TaskId task3 = new TaskId(0, 3);
private final StreamsPartitionAssignor partitionAssignor = new StreamsPartitionAssignor();
private final MockClientSupplier mockClientSupplier = new MockClientSupplier();
private final InternalTopologyBuilder builder = new InternalTopologyBuilder();
private final StreamsConfig streamsConfig = new StreamsConfig(configProps());
private final String userEndPoint = "localhost:8080";
private final String applicationId = "stream-partition-assignor-test";
private final TaskManager taskManager = EasyMock.createNiceMock(TaskManager.class);
private Map<String, Object> configProps() {
final Map<String, Object> configurationMap = new HashMap<>();
configurationMap.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
configurationMap.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, userEndPoint);
configurationMap.put(StreamsConfig.InternalConfig.TASK_MANAGER_FOR_PARTITION_ASSIGNOR, taskManager);
configurationMap.put(StreamsConfig.InternalConfig.ASSIGNMENT_ERROR_CODE, new AtomicInteger());
return configurationMap;
}
private void configurePartitionAssignor(final Map<String, Object> props) {
final Map<String, Object> configurationMap = configProps();
configurationMap.putAll(props);
partitionAssignor.configure(configurationMap);
}
private void mockTaskManager(final Set<TaskId> prevTasks,
final Set<TaskId> cachedTasks,
final UUID processId,
final InternalTopologyBuilder builder) {
EasyMock.expect(taskManager.builder()).andReturn(builder).anyTimes();
EasyMock.expect(taskManager.prevActiveTaskIds()).andReturn(prevTasks).anyTimes();
EasyMock.expect(taskManager.cachedTasksIds()).andReturn(cachedTasks).anyTimes();
EasyMock.expect(taskManager.processId()).andReturn(processId).anyTimes();
EasyMock.replay(taskManager);
}
@Test
public void shouldInterleaveTasksByGroupId() {
final TaskId taskIdA0 = new TaskId(0, 0);
final TaskId taskIdA1 = new TaskId(0, 1);
final TaskId taskIdA2 = new TaskId(0, 2);
final TaskId taskIdA3 = new TaskId(0, 3);
final TaskId taskIdB0 = new TaskId(1, 0);
final TaskId taskIdB1 = new TaskId(1, 1);
final TaskId taskIdB2 = new TaskId(1, 2);
final TaskId taskIdC0 = new TaskId(2, 0);
final TaskId taskIdC1 = new TaskId(2, 1);
final List<TaskId> expectedSubList1 = asList(taskIdA0, taskIdA3, taskIdB2);
final List<TaskId> expectedSubList2 = asList(taskIdA1, taskIdB0, taskIdC0);
final List<TaskId> expectedSubList3 = asList(taskIdA2, taskIdB1, taskIdC1);
final List<List<TaskId>> embeddedList = asList(expectedSubList1, expectedSubList2, expectedSubList3);
final List<TaskId> tasks = asList(taskIdC0, taskIdC1, taskIdB0, taskIdB1, taskIdB2, taskIdA0, taskIdA1, taskIdA2, taskIdA3);
Collections.shuffle(tasks);
final List<List<TaskId>> interleavedTaskIds = partitionAssignor.interleaveTasksByGroupId(tasks, 3);
assertThat(interleavedTaskIds, equalTo(embeddedList));
}
@Test
public void testSubscription() {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2");
final Set<TaskId> prevTasks = Utils.mkSet(
new TaskId(0, 1), new TaskId(1, 1), new TaskId(2, 1));
final Set<TaskId> cachedTasks = Utils.mkSet(
new TaskId(0, 1), new TaskId(1, 1), new TaskId(2, 1),
new TaskId(0, 2), new TaskId(1, 2), new TaskId(2, 2));
final UUID processId = UUID.randomUUID();
mockTaskManager(prevTasks, cachedTasks, processId, builder);
configurePartitionAssignor(Collections.emptyMap());
final PartitionAssignor.Subscription subscription = partitionAssignor.subscription(Utils.mkSet("topic1", "topic2"));
Collections.sort(subscription.topics());
assertEquals(asList("topic1", "topic2"), subscription.topics());
final Set<TaskId> standbyTasks = new HashSet<>(cachedTasks);
standbyTasks.removeAll(prevTasks);
final SubscriptionInfo info = new SubscriptionInfo(processId, prevTasks, standbyTasks, null);
assertEquals(info.encode(), subscription.userData());
}
@Test
public void testAssignBasic() {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2");
final List<String> topics = asList("topic1", "topic2");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final Set<TaskId> prevTasks10 = Utils.mkSet(task0);
final Set<TaskId> prevTasks11 = Utils.mkSet(task1);
final Set<TaskId> prevTasks20 = Utils.mkSet(task2);
final Set<TaskId> standbyTasks10 = Utils.mkSet(task1);
final Set<TaskId> standbyTasks11 = Utils.mkSet(task2);
final Set<TaskId> standbyTasks20 = Utils.mkSet(task0);
final UUID uuid1 = UUID.randomUUID();
final UUID uuid2 = UUID.randomUUID();
mockTaskManager(prevTasks10, standbyTasks10, uuid1, builder);
configurePartitionAssignor(Collections.emptyMap());
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks10, standbyTasks10, userEndPoint).encode()));
subscriptions.put("consumer11",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks11, standbyTasks11, userEndPoint).encode()));
subscriptions.put("consumer20",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid2, prevTasks20, standbyTasks20, userEndPoint).encode()));
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
// check assigned partitions
assertEquals(Utils.mkSet(Utils.mkSet(t1p0, t2p0), Utils.mkSet(t1p1, t2p1)),
Utils.mkSet(new HashSet<>(assignments.get("consumer10").partitions()), new HashSet<>(assignments.get("consumer11").partitions())));
assertEquals(Utils.mkSet(t1p2, t2p2), new HashSet<>(assignments.get("consumer20").partitions()));
// check assignment info
// the first consumer
final AssignmentInfo info10 = checkAssignment(allTopics, assignments.get("consumer10"));
final Set<TaskId> allActiveTasks = new HashSet<>(info10.activeTasks());
// the second consumer
final AssignmentInfo info11 = checkAssignment(allTopics, assignments.get("consumer11"));
allActiveTasks.addAll(info11.activeTasks());
assertEquals(Utils.mkSet(task0, task1), allActiveTasks);
// the third consumer
final AssignmentInfo info20 = checkAssignment(allTopics, assignments.get("consumer20"));
allActiveTasks.addAll(info20.activeTasks());
assertEquals(3, allActiveTasks.size());
assertEquals(allTasks, new HashSet<>(allActiveTasks));
assertEquals(3, allActiveTasks.size());
assertEquals(allTasks, allActiveTasks);
}
@Test
public void shouldAssignEvenlyAcrossConsumersOneClientMultipleThreads() {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor", new MockProcessorSupplier(), "source1");
builder.addProcessor("processorII", new MockProcessorSupplier(), "source2");
final List<PartitionInfo> localInfos = asList(
new PartitionInfo("topic1", 0, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic1", 1, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic1", 2, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic1", 3, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 0, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 1, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 2, Node.noNode(), new Node[0], new Node[0]),
new PartitionInfo("topic2", 3, Node.noNode(), new Node[0], new Node[0])
);
final Cluster localMetadata = new Cluster(
"cluster",
Collections.singletonList(Node.noNode()),
localInfos,
Collections.emptySet(),
Collections.emptySet());
final List<String> topics = asList("topic1", "topic2");
final TaskId taskIdA0 = new TaskId(0, 0);
final TaskId taskIdA1 = new TaskId(0, 1);
final TaskId taskIdA2 = new TaskId(0, 2);
final TaskId taskIdA3 = new TaskId(0, 3);
final TaskId taskIdB0 = new TaskId(1, 0);
final TaskId taskIdB1 = new TaskId(1, 1);
final TaskId taskIdB2 = new TaskId(1, 2);
final TaskId taskIdB3 = new TaskId(1, 3);
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(new HashSet<>(), new HashSet<>(), uuid1, builder);
configurePartitionAssignor(Collections.emptyMap());
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, new HashSet<>(), new HashSet<>(), userEndPoint).encode()));
subscriptions.put("consumer11",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, new HashSet<>(), new HashSet<>(), userEndPoint).encode()));
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(localMetadata, subscriptions);
// check assigned partitions
assertEquals(Utils.mkSet(Utils.mkSet(t2p2, t1p0, t1p2, t2p0), Utils.mkSet(t1p1, t2p1, t1p3, t2p3)),
Utils.mkSet(new HashSet<>(assignments.get("consumer10").partitions()), new HashSet<>(assignments.get("consumer11").partitions())));
// the first consumer
final AssignmentInfo info10 = AssignmentInfo.decode(assignments.get("consumer10").userData());
final List<TaskId> expectedInfo10TaskIds = asList(taskIdA1, taskIdA3, taskIdB1, taskIdB3);
assertEquals(expectedInfo10TaskIds, info10.activeTasks());
// the second consumer
final AssignmentInfo info11 = AssignmentInfo.decode(assignments.get("consumer11").userData());
final List<TaskId> expectedInfo11TaskIds = asList(taskIdA0, taskIdA2, taskIdB0, taskIdB2);
assertEquals(expectedInfo11TaskIds, info11.activeTasks());
}
@Test
public void testAssignWithPartialTopology() {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
builder.addStateStore(new MockKeyValueStoreBuilder("store1", false), "processor1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor2", new MockProcessorSupplier(), "source2");
builder.addStateStore(new MockKeyValueStoreBuilder("store2", false), "processor2");
final List<String> topics = asList("topic1", "topic2");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(Collections.emptySet(), Collections.emptySet(), uuid1, builder);
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.PARTITION_GROUPER_CLASS_CONFIG, SingleGroupPartitionGrouperStub.class));
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, Collections.emptySet(), Collections.emptySet(), userEndPoint).encode()));
// will throw exception if it fails
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
// check assignment info
final AssignmentInfo info10 = checkAssignment(Utils.mkSet("topic1"), assignments.get("consumer10"));
final Set<TaskId> allActiveTasks = new HashSet<>(info10.activeTasks());
assertEquals(3, allActiveTasks.size());
assertEquals(allTasks, new HashSet<>(allActiveTasks));
}
@Test
public void testAssignEmptyMetadata() {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2");
final List<String> topics = asList("topic1", "topic2");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final Set<TaskId> prevTasks10 = Utils.mkSet(task0);
final Set<TaskId> standbyTasks10 = Utils.mkSet(task1);
final Cluster emptyMetadata = new Cluster("cluster", Collections.singletonList(Node.noNode()),
Collections.emptySet(),
Collections.emptySet(),
Collections.emptySet());
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(prevTasks10, standbyTasks10, uuid1, builder);
configurePartitionAssignor(Collections.emptyMap());
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks10, standbyTasks10, userEndPoint).encode()));
// initially metadata is empty
Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(emptyMetadata, subscriptions);
// check assigned partitions
assertEquals(Collections.emptySet(),
new HashSet<>(assignments.get("consumer10").partitions()));
// check assignment info
AssignmentInfo info10 = checkAssignment(Collections.emptySet(), assignments.get("consumer10"));
final Set<TaskId> allActiveTasks = new HashSet<>(info10.activeTasks());
assertEquals(0, allActiveTasks.size());
assertEquals(Collections.emptySet(), new HashSet<>(allActiveTasks));
// then metadata gets populated
assignments = partitionAssignor.assign(metadata, subscriptions);
// check assigned partitions
assertEquals(Utils.mkSet(Utils.mkSet(t1p0, t2p0, t1p0, t2p0, t1p1, t2p1, t1p2, t2p2)),
Utils.mkSet(new HashSet<>(assignments.get("consumer10").partitions())));
// the first consumer
info10 = checkAssignment(allTopics, assignments.get("consumer10"));
allActiveTasks.addAll(info10.activeTasks());
assertEquals(3, allActiveTasks.size());
assertEquals(allTasks, new HashSet<>(allActiveTasks));
assertEquals(3, allActiveTasks.size());
assertEquals(allTasks, allActiveTasks);
}
@Test
public void testAssignWithNewTasks() {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addSource(null, "source3", null, null, null, "topic3");
builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2", "source3");
final List<String> topics = asList("topic1", "topic2", "topic3");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2, task3);
// assuming that previous tasks do not have topic3
final Set<TaskId> prevTasks10 = Utils.mkSet(task0);
final Set<TaskId> prevTasks11 = Utils.mkSet(task1);
final Set<TaskId> prevTasks20 = Utils.mkSet(task2);
final UUID uuid1 = UUID.randomUUID();
final UUID uuid2 = UUID.randomUUID();
mockTaskManager(prevTasks10, Collections.emptySet(), uuid1, builder);
configurePartitionAssignor(Collections.emptyMap());
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks10, Collections.emptySet(), userEndPoint).encode()));
subscriptions.put("consumer11",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks11, Collections.emptySet(), userEndPoint).encode()));
subscriptions.put("consumer20",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid2, prevTasks20, Collections.emptySet(), userEndPoint).encode()));
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
// check assigned partitions: since there is no previous task for topic 3 it will be assigned randomly so we cannot check exact match
// also note that previously assigned partitions / tasks may not stay on the previous host since we may assign the new task first and
// then later ones will be re-assigned to other hosts due to load balancing
AssignmentInfo info = AssignmentInfo.decode(assignments.get("consumer10").userData());
final Set<TaskId> allActiveTasks = new HashSet<>(info.activeTasks());
final Set<TopicPartition> allPartitions = new HashSet<>(assignments.get("consumer10").partitions());
info = AssignmentInfo.decode(assignments.get("consumer11").userData());
allActiveTasks.addAll(info.activeTasks());
allPartitions.addAll(assignments.get("consumer11").partitions());
info = AssignmentInfo.decode(assignments.get("consumer20").userData());
allActiveTasks.addAll(info.activeTasks());
allPartitions.addAll(assignments.get("consumer20").partitions());
assertEquals(allTasks, allActiveTasks);
assertEquals(Utils.mkSet(t1p0, t1p1, t1p2, t2p0, t2p1, t2p2, t3p0, t3p1, t3p2, t3p3), allPartitions);
}
@Test
public void testAssignWithStates() {
builder.setApplicationId(applicationId);
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor-1", new MockProcessorSupplier(), "source1");
builder.addStateStore(new MockKeyValueStoreBuilder("store1", false), "processor-1");
builder.addProcessor("processor-2", new MockProcessorSupplier(), "source2");
builder.addStateStore(new MockKeyValueStoreBuilder("store2", false), "processor-2");
builder.addStateStore(new MockKeyValueStoreBuilder("store3", false), "processor-2");
final List<String> topics = asList("topic1", "topic2");
final TaskId task00 = new TaskId(0, 0);
final TaskId task01 = new TaskId(0, 1);
final TaskId task02 = new TaskId(0, 2);
final TaskId task10 = new TaskId(1, 0);
final TaskId task11 = new TaskId(1, 1);
final TaskId task12 = new TaskId(1, 2);
final List<TaskId> tasks = asList(task00, task01, task02, task10, task11, task12);
final UUID uuid1 = UUID.randomUUID();
final UUID uuid2 = UUID.randomUUID();
mockTaskManager(
Collections.emptySet(),
Collections.emptySet(),
uuid1,
builder);
configurePartitionAssignor(Collections.emptyMap());
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, Collections.emptySet(), Collections.emptySet(), userEndPoint).encode()));
subscriptions.put("consumer11",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, Collections.emptySet(), Collections.emptySet(), userEndPoint).encode()));
subscriptions.put("consumer20",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid2, Collections.emptySet(), Collections.emptySet(), userEndPoint).encode()));
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
// check assigned partition size: since there is no previous task and there are two sub-topologies the assignment is random so we cannot check exact match
assertEquals(2, assignments.get("consumer10").partitions().size());
assertEquals(2, assignments.get("consumer11").partitions().size());
assertEquals(2, assignments.get("consumer20").partitions().size());
final AssignmentInfo info10 = AssignmentInfo.decode(assignments.get("consumer10").userData());
final AssignmentInfo info11 = AssignmentInfo.decode(assignments.get("consumer11").userData());
final AssignmentInfo info20 = AssignmentInfo.decode(assignments.get("consumer20").userData());
assertEquals(2, info10.activeTasks().size());
assertEquals(2, info11.activeTasks().size());
assertEquals(2, info20.activeTasks().size());
final Set<TaskId> allTasks = new HashSet<>();
allTasks.addAll(info10.activeTasks());
allTasks.addAll(info11.activeTasks());
allTasks.addAll(info20.activeTasks());
assertEquals(new HashSet<>(tasks), allTasks);
// check tasks for state topics
final Map<Integer, InternalTopologyBuilder.TopicsInfo> topicGroups = builder.topicGroups();
assertEquals(Utils.mkSet(task00, task01, task02), tasksForState("store1", tasks, topicGroups));
assertEquals(Utils.mkSet(task10, task11, task12), tasksForState("store2", tasks, topicGroups));
assertEquals(Utils.mkSet(task10, task11, task12), tasksForState("store3", tasks, topicGroups));
}
private Set<TaskId> tasksForState(final String storeName,
final List<TaskId> tasks,
final Map<Integer, InternalTopologyBuilder.TopicsInfo> topicGroups) {
final String changelogTopic = ProcessorStateManager.storeChangelogTopic(applicationId, storeName);
final Set<TaskId> ids = new HashSet<>();
for (final Map.Entry<Integer, InternalTopologyBuilder.TopicsInfo> entry : topicGroups.entrySet()) {
final Set<String> stateChangelogTopics = entry.getValue().stateChangelogTopics.keySet();
if (stateChangelogTopics.contains(changelogTopic)) {
for (final TaskId id : tasks) {
if (id.topicGroupId == entry.getKey()) {
ids.add(id);
}
}
}
}
return ids;
}
@Test
public void testAssignWithStandbyReplicas() {
final Map<String, Object> props = configProps();
props.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, "1");
final StreamsConfig streamsConfig = new StreamsConfig(props);
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2");
final List<String> topics = asList("topic1", "topic2");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final Set<TaskId> prevTasks00 = Utils.mkSet(task0);
final Set<TaskId> prevTasks01 = Utils.mkSet(task1);
final Set<TaskId> prevTasks02 = Utils.mkSet(task2);
final Set<TaskId> standbyTasks01 = Utils.mkSet(task1);
final Set<TaskId> standbyTasks02 = Utils.mkSet(task2);
final Set<TaskId> standbyTasks00 = Utils.mkSet(task0);
final UUID uuid1 = UUID.randomUUID();
final UUID uuid2 = UUID.randomUUID();
mockTaskManager(prevTasks00, standbyTasks01, uuid1, builder);
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1));
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks00, standbyTasks01, userEndPoint).encode()));
subscriptions.put("consumer11",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, prevTasks01, standbyTasks02, userEndPoint).encode()));
subscriptions.put("consumer20",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid2, prevTasks02, standbyTasks00, "any:9097").encode()));
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
// the first consumer
final AssignmentInfo info10 = checkAssignment(allTopics, assignments.get("consumer10"));
final Set<TaskId> allActiveTasks = new HashSet<>(info10.activeTasks());
final Set<TaskId> allStandbyTasks = new HashSet<>(info10.standbyTasks().keySet());
// the second consumer
final AssignmentInfo info11 = checkAssignment(allTopics, assignments.get("consumer11"));
allActiveTasks.addAll(info11.activeTasks());
allStandbyTasks.addAll(info11.standbyTasks().keySet());
assertNotEquals("same processId has same set of standby tasks", info11.standbyTasks().keySet(), info10.standbyTasks().keySet());
// check active tasks assigned to the first client
assertEquals(Utils.mkSet(task0, task1), new HashSet<>(allActiveTasks));
assertEquals(Utils.mkSet(task2), new HashSet<>(allStandbyTasks));
// the third consumer
final AssignmentInfo info20 = checkAssignment(allTopics, assignments.get("consumer20"));
allActiveTasks.addAll(info20.activeTasks());
allStandbyTasks.addAll(info20.standbyTasks().keySet());
// all task ids are in the active tasks and also in the standby tasks
assertEquals(3, allActiveTasks.size());
assertEquals(allTasks, allActiveTasks);
assertEquals(3, allStandbyTasks.size());
assertEquals(allTasks, allStandbyTasks);
}
@Test
public void testOnAssignment() {
configurePartitionAssignor(Collections.emptyMap());
final List<TaskId> activeTaskList = asList(task0, task3);
final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>();
final Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>();
final Map<HostInfo, Set<TopicPartition>> hostState = Collections.singletonMap(
new HostInfo("localhost", 9090),
Utils.mkSet(t3p0, t3p3));
activeTasks.put(task0, Utils.mkSet(t3p0));
activeTasks.put(task3, Utils.mkSet(t3p3));
standbyTasks.put(task1, Utils.mkSet(t3p1));
standbyTasks.put(task2, Utils.mkSet(t3p2));
final AssignmentInfo info = new AssignmentInfo(activeTaskList, standbyTasks, hostState);
final PartitionAssignor.Assignment assignment = new PartitionAssignor.Assignment(asList(t3p0, t3p3), info.encode());
final Capture<Cluster> capturedCluster = EasyMock.newCapture();
taskManager.setPartitionsByHostState(hostState);
EasyMock.expectLastCall();
taskManager.setAssignmentMetadata(activeTasks, standbyTasks);
EasyMock.expectLastCall();
taskManager.setClusterMetadata(EasyMock.capture(capturedCluster));
EasyMock.expectLastCall();
EasyMock.replay(taskManager);
partitionAssignor.onAssignment(assignment);
EasyMock.verify(taskManager);
assertEquals(Collections.singleton(t3p0.topic()), capturedCluster.getValue().topics());
assertEquals(2, capturedCluster.getValue().partitionsForTopic(t3p0.topic()).size());
}
@Test
public void testAssignWithInternalTopics() {
builder.setApplicationId(applicationId);
builder.addInternalTopic("topicX");
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
builder.addSink("sink1", "topicX", null, null, null, "processor1");
builder.addSource(null, "source2", null, null, null, "topicX");
builder.addProcessor("processor2", new MockProcessorSupplier(), "source2");
final List<String> topics = asList("topic1", applicationId + "-topicX");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(Collections.emptySet(), Collections.emptySet(), uuid1, builder);
configurePartitionAssignor(Collections.emptyMap());
final MockInternalTopicManager internalTopicManager = new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer);
partitionAssignor.setInternalTopicManager(internalTopicManager);
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, emptyTasks, emptyTasks, userEndPoint).encode()));
partitionAssignor.assign(metadata, subscriptions);
// check prepared internal topics
assertEquals(1, internalTopicManager.readyTopics.size());
assertEquals(allTasks.size(), (long) internalTopicManager.readyTopics.get(applicationId + "-topicX"));
}
@Test
public void testAssignWithInternalTopicThatsSourceIsAnotherInternalTopic() {
final String applicationId = "test";
builder.setApplicationId(applicationId);
builder.addInternalTopic("topicX");
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
builder.addSink("sink1", "topicX", null, null, null, "processor1");
builder.addSource(null, "source2", null, null, null, "topicX");
builder.addInternalTopic("topicZ");
builder.addProcessor("processor2", new MockProcessorSupplier(), "source2");
builder.addSink("sink2", "topicZ", null, null, null, "processor2");
builder.addSource(null, "source3", null, null, null, "topicZ");
final List<String> topics = asList("topic1", "test-topicX", "test-topicZ");
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(Collections.emptySet(), Collections.emptySet(), uuid1, builder);
configurePartitionAssignor(Collections.emptyMap());
final MockInternalTopicManager internalTopicManager = new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer);
partitionAssignor.setInternalTopicManager(internalTopicManager);
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put("consumer10",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, emptyTasks, emptyTasks, userEndPoint).encode()));
partitionAssignor.assign(metadata, subscriptions);
// check prepared internal topics
assertEquals(2, internalTopicManager.readyTopics.size());
assertEquals(allTasks.size(), (long) internalTopicManager.readyTopics.get("test-topicZ"));
}
@Test
public void shouldGenerateTasksForAllCreatedPartitions() {
final StreamsBuilder builder = new StreamsBuilder();
// KStream with 3 partitions
final KStream<Object, Object> stream1 = builder
.stream("topic1")
// force creation of internal repartition topic
.map((KeyValueMapper<Object, Object, KeyValue<Object, Object>>) KeyValue::new);
// KTable with 4 partitions
final KTable<Object, Long> table1 = builder
.table("topic3")
// force creation of internal repartition topic
.groupBy(KeyValue::new)
.count();
// joining the stream and the table
// this triggers the enforceCopartitioning() routine in the StreamsPartitionAssignor,
// forcing the stream.map to get repartitioned to a topic with four partitions.
stream1.join(
table1,
(ValueJoiner) (value1, value2) -> null);
final UUID uuid = UUID.randomUUID();
final String client = "client1";
final InternalTopologyBuilder internalTopologyBuilder = TopologyWrapper.getInternalTopologyBuilder(builder.build());
internalTopologyBuilder.setApplicationId(applicationId);
mockTaskManager(
Collections.emptySet(),
Collections.emptySet(),
UUID.randomUUID(),
internalTopologyBuilder);
configurePartitionAssignor(Collections.emptyMap());
final MockInternalTopicManager mockInternalTopicManager = new MockInternalTopicManager(
streamsConfig,
mockClientSupplier.restoreConsumer);
partitionAssignor.setInternalTopicManager(mockInternalTopicManager);
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put(
client,
new PartitionAssignor.Subscription(
asList("topic1", "topic3"),
new SubscriptionInfo(uuid, emptyTasks, emptyTasks, userEndPoint).encode()
)
);
final Map<String, PartitionAssignor.Assignment> assignment = partitionAssignor.assign(metadata, subscriptions);
final Map<String, Integer> expectedCreatedInternalTopics = new HashMap<>();
expectedCreatedInternalTopics.put(applicationId + "-KTABLE-AGGREGATE-STATE-STORE-0000000006-repartition", 4);
expectedCreatedInternalTopics.put(applicationId + "-KTABLE-AGGREGATE-STATE-STORE-0000000006-changelog", 4);
expectedCreatedInternalTopics.put(applicationId + "-topic3-STATE-STORE-0000000002-changelog", 4);
expectedCreatedInternalTopics.put(applicationId + "-KSTREAM-MAP-0000000001-repartition", 4);
// check if all internal topics were created as expected
assertThat(mockInternalTopicManager.readyTopics, equalTo(expectedCreatedInternalTopics));
final List<TopicPartition> expectedAssignment = asList(
new TopicPartition("topic1", 0),
new TopicPartition("topic1", 1),
new TopicPartition("topic1", 2),
new TopicPartition("topic3", 0),
new TopicPartition("topic3", 1),
new TopicPartition("topic3", 2),
new TopicPartition("topic3", 3),
new TopicPartition(applicationId + "-KTABLE-AGGREGATE-STATE-STORE-0000000006-repartition", 0),
new TopicPartition(applicationId + "-KTABLE-AGGREGATE-STATE-STORE-0000000006-repartition", 1),
new TopicPartition(applicationId + "-KTABLE-AGGREGATE-STATE-STORE-0000000006-repartition", 2),
new TopicPartition(applicationId + "-KTABLE-AGGREGATE-STATE-STORE-0000000006-repartition", 3),
new TopicPartition(applicationId + "-KSTREAM-MAP-0000000001-repartition", 0),
new TopicPartition(applicationId + "-KSTREAM-MAP-0000000001-repartition", 1),
new TopicPartition(applicationId + "-KSTREAM-MAP-0000000001-repartition", 2),
new TopicPartition(applicationId + "-KSTREAM-MAP-0000000001-repartition", 3)
);
// check if we created a task for all expected topicPartitions.
assertThat(new HashSet<>(assignment.get(client).partitions()), equalTo(new HashSet<>(expectedAssignment)));
}
@Test
public void shouldAddUserDefinedEndPointToSubscription() {
builder.setApplicationId(applicationId);
builder.addSource(null, "source", null, null, null, "input");
builder.addProcessor("processor", new MockProcessorSupplier(), "source");
builder.addSink("sink", "output", null, null, null, "processor");
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(
Collections.emptySet(),
Collections.emptySet(),
uuid1,
builder);
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.APPLICATION_SERVER_CONFIG, userEndPoint));
final PartitionAssignor.Subscription subscription = partitionAssignor.subscription(Utils.mkSet("input"));
final SubscriptionInfo subscriptionInfo = SubscriptionInfo.decode(subscription.userData());
assertEquals("localhost:8080", subscriptionInfo.userEndPoint());
}
@Test
public void shouldMapUserEndPointToTopicPartitions() {
builder.setApplicationId(applicationId);
builder.addSource(null, "source", null, null, null, "topic1");
builder.addProcessor("processor", new MockProcessorSupplier(), "source");
builder.addSink("sink", "output", null, null, null, "processor");
final List<String> topics = Collections.singletonList("topic1");
final UUID uuid1 = UUID.randomUUID();
mockTaskManager(Collections.emptySet(), Collections.emptySet(), uuid1, builder);
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.APPLICATION_SERVER_CONFIG, userEndPoint));
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put("consumer1",
new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, emptyTasks, emptyTasks, userEndPoint).encode()));
final Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
final PartitionAssignor.Assignment consumerAssignment = assignments.get("consumer1");
final AssignmentInfo assignmentInfo = AssignmentInfo.decode(consumerAssignment.userData());
final Set<TopicPartition> topicPartitions = assignmentInfo.partitionsByHost().get(new HostInfo("localhost", 8080));
assertEquals(
Utils.mkSet(
new TopicPartition("topic1", 0),
new TopicPartition("topic1", 1),
new TopicPartition("topic1", 2)),
topicPartitions);
}
@Test
public void shouldThrowExceptionIfApplicationServerConfigIsNotHostPortPair() {
builder.setApplicationId(applicationId);
mockTaskManager(Collections.emptySet(), Collections.emptySet(), UUID.randomUUID(), builder);
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer));
try {
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost"));
fail("expected to an exception due to invalid config");
} catch (final ConfigException e) {
// pass
}
}
@Test
public void shouldThrowExceptionIfApplicationServerConfigPortIsNotAnInteger() {
builder.setApplicationId(applicationId);
try {
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost:j87yhk"));
fail("expected to an exception due to invalid config");
} catch (final ConfigException e) {
// pass
}
}
@Test
public void shouldNotLoopInfinitelyOnMissingMetadataAndShouldNotCreateRelatedTasks() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Object, Object> stream1 = builder
// Task 1 (should get created):
.stream("topic1")
// force repartitioning for aggregation
.selectKey((key, value) -> null)
.groupByKey()
// Task 2 (should get created):
// create repartioning and changelog topic as task 1 exists
.count(Materialized.as("count"))
// force repartitioning for join, but second join input topic unknown
// -> internal repartitioning topic should not get created
.toStream()
.map((KeyValueMapper<Object, Long, KeyValue<Object, Object>>) (key, value) -> null);
builder
// Task 3 (should not get created because input topic unknown)
.stream("unknownTopic")
// force repartitioning for join, but input topic unknown
// -> thus should not create internal repartitioning topic
.selectKey((key, value) -> null)
// Task 4 (should not get created because input topics unknown)
// should not create any of both input repartition topics or any of both changelog topics
.join(
stream1,
(ValueJoiner) (value1, value2) -> null,
JoinWindows.of(ofMillis(0))
);
final UUID uuid = UUID.randomUUID();
final String client = "client1";
final InternalTopologyBuilder internalTopologyBuilder = TopologyWrapper.getInternalTopologyBuilder(builder.build());
internalTopologyBuilder.setApplicationId(applicationId);
mockTaskManager(
Collections.emptySet(),
Collections.emptySet(),
UUID.randomUUID(),
internalTopologyBuilder);
configurePartitionAssignor(Collections.emptyMap());
final MockInternalTopicManager mockInternalTopicManager = new MockInternalTopicManager(
streamsConfig,
mockClientSupplier.restoreConsumer);
partitionAssignor.setInternalTopicManager(mockInternalTopicManager);
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put(
client,
new PartitionAssignor.Subscription(
Collections.singletonList("unknownTopic"),
new SubscriptionInfo(uuid, emptyTasks, emptyTasks, userEndPoint).encode()
)
);
final Map<String, PartitionAssignor.Assignment> assignment = partitionAssignor.assign(metadata, subscriptions);
assertThat(mockInternalTopicManager.readyTopics.isEmpty(), equalTo(true));
assertThat(assignment.get(client).partitions().isEmpty(), equalTo(true));
}
@Test
public void shouldUpdateClusterMetadataAndHostInfoOnAssignment() {
final TopicPartition partitionOne = new TopicPartition("topic", 1);
final TopicPartition partitionTwo = new TopicPartition("topic", 2);
final Map<HostInfo, Set<TopicPartition>> hostState = Collections.singletonMap(
new HostInfo("localhost", 9090), Utils.mkSet(partitionOne, partitionTwo));
configurePartitionAssignor(Collections.emptyMap());
taskManager.setPartitionsByHostState(hostState);
EasyMock.expectLastCall();
EasyMock.replay(taskManager);
partitionAssignor.onAssignment(createAssignment(hostState));
EasyMock.verify(taskManager);
}
@Test
public void shouldNotAddStandbyTaskPartitionsToPartitionsForHost() {
final StreamsBuilder builder = new StreamsBuilder();
builder.stream("topic1").groupByKey().count();
final InternalTopologyBuilder internalTopologyBuilder = TopologyWrapper.getInternalTopologyBuilder(builder.build());
internalTopologyBuilder.setApplicationId(applicationId);
final UUID uuid = UUID.randomUUID();
mockTaskManager(
Collections.emptySet(),
Collections.emptySet(),
uuid,
internalTopologyBuilder);
final Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, userEndPoint);
configurePartitionAssignor(props);
partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(
streamsConfig,
mockClientSupplier.restoreConsumer));
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put(
"consumer1",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
new SubscriptionInfo(uuid, emptyTasks, emptyTasks, userEndPoint).encode()
)
);
subscriptions.put(
"consumer2",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
new SubscriptionInfo(UUID.randomUUID(), emptyTasks, emptyTasks, "other:9090").encode()
)
);
final Set<TopicPartition> allPartitions = Utils.mkSet(t1p0, t1p1, t1p2);
final Map<String, PartitionAssignor.Assignment> assign = partitionAssignor.assign(metadata, subscriptions);
final PartitionAssignor.Assignment consumer1Assignment = assign.get("consumer1");
final AssignmentInfo assignmentInfo = AssignmentInfo.decode(consumer1Assignment.userData());
final Set<TopicPartition> consumer1partitions = assignmentInfo.partitionsByHost().get(new HostInfo("localhost", 8080));
final Set<TopicPartition> consumer2Partitions = assignmentInfo.partitionsByHost().get(new HostInfo("other", 9090));
final HashSet<TopicPartition> allAssignedPartitions = new HashSet<>(consumer1partitions);
allAssignedPartitions.addAll(consumer2Partitions);
assertThat(consumer1partitions, not(allPartitions));
assertThat(consumer2Partitions, not(allPartitions));
assertThat(allAssignedPartitions, equalTo(allPartitions));
}
@Test
public void shouldThrowKafkaExceptionIfTaskMangerNotConfigured() {
final Map<String, Object> config = configProps();
config.remove(StreamsConfig.InternalConfig.TASK_MANAGER_FOR_PARTITION_ASSIGNOR);
try {
partitionAssignor.configure(config);
fail("Should have thrown KafkaException");
} catch (final KafkaException expected) {
assertThat(expected.getMessage(), equalTo("TaskManager is not specified"));
}
}
@Test
public void shouldThrowKafkaExceptionIfTaskMangerConfigIsNotTaskManagerInstance() {
final Map<String, Object> config = configProps();
config.put(StreamsConfig.InternalConfig.TASK_MANAGER_FOR_PARTITION_ASSIGNOR, "i am not a task manager");
try {
partitionAssignor.configure(config);
fail("Should have thrown KafkaException");
} catch (final KafkaException expected) {
assertThat(expected.getMessage(),
equalTo("java.lang.String is not an instance of org.apache.kafka.streams.processor.internals.TaskManager"));
}
}
@Test
public void shouldThrowKafkaExceptionAssignmentErrorCodeNotConfigured() {
final Map<String, Object> config = configProps();
config.remove(StreamsConfig.InternalConfig.ASSIGNMENT_ERROR_CODE);
try {
partitionAssignor.configure(config);
fail("Should have thrown KafkaException");
} catch (final KafkaException expected) {
assertThat(expected.getMessage(), equalTo("assignmentErrorCode is not specified"));
}
}
@Test
public void shouldThrowKafkaExceptionIfVersionProbingFlagConfigIsNotAtomicInteger() {
final Map<String, Object> config = configProps();
config.put(StreamsConfig.InternalConfig.ASSIGNMENT_ERROR_CODE, "i am not an AtomicInteger");
try {
partitionAssignor.configure(config);
fail("Should have thrown KafkaException");
} catch (final KafkaException expected) {
assertThat(expected.getMessage(),
equalTo("java.lang.String is not an instance of java.util.concurrent.atomic.AtomicInteger"));
}
}
@Test
public void shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersionsV1V2() {
shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersions(1, 2);
}
@Test
public void shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersionsV1V3() {
shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersions(1, 3);
}
@Test
public void shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersionsV2V3() {
shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersions(2, 3);
}
private void shouldReturnLowestAssignmentVersionForDifferentSubscriptionVersions(final int smallestVersion,
final int otherVersion) {
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put(
"consumer1",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
new SubscriptionInfo(smallestVersion, UUID.randomUUID(), emptyTasks, emptyTasks, null).encode()
)
);
subscriptions.put(
"consumer2",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
new SubscriptionInfo(otherVersion, UUID.randomUUID(), emptyTasks, emptyTasks, null).encode()
)
);
mockTaskManager(
emptyTasks,
emptyTasks,
UUID.randomUUID(),
builder);
partitionAssignor.configure(configProps());
final Map<String, PartitionAssignor.Assignment> assignment = partitionAssignor.assign(metadata, subscriptions);
assertThat(assignment.size(), equalTo(2));
assertThat(AssignmentInfo.decode(assignment.get("consumer1").userData()).version(), equalTo(smallestVersion));
assertThat(AssignmentInfo.decode(assignment.get("consumer2").userData()).version(), equalTo(smallestVersion));
}
@Test
public void shouldDownGradeSubscriptionToVersion1() {
final Set<TaskId> emptyTasks = Collections.emptySet();
mockTaskManager(
emptyTasks,
emptyTasks,
UUID.randomUUID(),
builder);
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.UPGRADE_FROM_CONFIG, StreamsConfig.UPGRADE_FROM_0100));
final PartitionAssignor.Subscription subscription = partitionAssignor.subscription(Utils.mkSet("topic1"));
assertThat(SubscriptionInfo.decode(subscription.userData()).version(), equalTo(1));
}
@Test
public void shouldDownGradeSubscriptionToVersion2For0101() {
shouldDownGradeSubscriptionToVersion2(StreamsConfig.UPGRADE_FROM_0101);
}
@Test
public void shouldDownGradeSubscriptionToVersion2For0102() {
shouldDownGradeSubscriptionToVersion2(StreamsConfig.UPGRADE_FROM_0102);
}
@Test
public void shouldDownGradeSubscriptionToVersion2For0110() {
shouldDownGradeSubscriptionToVersion2(StreamsConfig.UPGRADE_FROM_0110);
}
@Test
public void shouldDownGradeSubscriptionToVersion2For10() {
shouldDownGradeSubscriptionToVersion2(StreamsConfig.UPGRADE_FROM_10);
}
@Test
public void shouldDownGradeSubscriptionToVersion2For11() {
shouldDownGradeSubscriptionToVersion2(StreamsConfig.UPGRADE_FROM_11);
}
private void shouldDownGradeSubscriptionToVersion2(final Object upgradeFromValue) {
final Set<TaskId> emptyTasks = Collections.emptySet();
mockTaskManager(
emptyTasks,
emptyTasks,
UUID.randomUUID(),
builder);
configurePartitionAssignor(Collections.singletonMap(StreamsConfig.UPGRADE_FROM_CONFIG, upgradeFromValue));
final PartitionAssignor.Subscription subscription = partitionAssignor.subscription(Utils.mkSet("topic1"));
assertThat(SubscriptionInfo.decode(subscription.userData()).version(), equalTo(2));
}
@Test
public void shouldReturnUnchangedAssignmentForOldInstancesAndEmptyAssignmentForFutureInstances() {
builder.addSource(null, "source1", null, null, null, "topic1");
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
final Set<TaskId> activeTasks = Utils.mkSet(task0, task1);
final Set<TaskId> standbyTasks = Utils.mkSet(task2);
final Map<TaskId, Set<TopicPartition>> standbyTaskMap = new HashMap<TaskId, Set<TopicPartition>>() {
{
put(task2, Collections.singleton(t1p2));
}
};
subscriptions.put(
"consumer1",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
new SubscriptionInfo(UUID.randomUUID(), activeTasks, standbyTasks, null).encode()
)
);
subscriptions.put(
"future-consumer",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
encodeFutureSubscription()
)
);
mockTaskManager(
allTasks,
allTasks,
UUID.randomUUID(),
builder);
partitionAssignor.configure(configProps());
final Map<String, PartitionAssignor.Assignment> assignment = partitionAssignor.assign(metadata, subscriptions);
assertThat(assignment.size(), equalTo(2));
assertThat(
AssignmentInfo.decode(assignment.get("consumer1").userData()),
equalTo(new AssignmentInfo(
new ArrayList<>(activeTasks),
standbyTaskMap,
Collections.emptyMap()
)));
assertThat(assignment.get("consumer1").partitions(), equalTo(asList(t1p0, t1p1)));
assertThat(AssignmentInfo.decode(assignment.get("future-consumer").userData()), equalTo(new AssignmentInfo()));
assertThat(assignment.get("future-consumer").partitions().size(), equalTo(0));
}
@Test
public void shouldThrowIfV1SubscriptionAndFutureSubscriptionIsMixed() {
shouldThrowIfPreVersionProbingSubscriptionAndFutureSubscriptionIsMixed(1);
}
@Test
public void shouldThrowIfV2SubscriptionAndFutureSubscriptionIsMixed() {
shouldThrowIfPreVersionProbingSubscriptionAndFutureSubscriptionIsMixed(2);
}
private ByteBuffer encodeFutureSubscription() {
final ByteBuffer buf = ByteBuffer.allocate(4 /* used version */
+ 4 /* supported version */);
buf.putInt(SubscriptionInfo.LATEST_SUPPORTED_VERSION + 1);
buf.putInt(SubscriptionInfo.LATEST_SUPPORTED_VERSION + 1);
return buf;
}
private void shouldThrowIfPreVersionProbingSubscriptionAndFutureSubscriptionIsMixed(final int oldVersion) {
final Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
final Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put(
"consumer1",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
new SubscriptionInfo(oldVersion, UUID.randomUUID(), emptyTasks, emptyTasks, null).encode()
)
);
subscriptions.put(
"future-consumer",
new PartitionAssignor.Subscription(
Collections.singletonList("topic1"),
encodeFutureSubscription()
)
);
mockTaskManager(
emptyTasks,
emptyTasks,
UUID.randomUUID(),
builder);
partitionAssignor.configure(configProps());
try {
partitionAssignor.assign(metadata, subscriptions);
fail("Should have thrown IllegalStateException");
} catch (final IllegalStateException expected) {
// pass
}
}
private PartitionAssignor.Assignment createAssignment(final Map<HostInfo, Set<TopicPartition>> firstHostState) {
final AssignmentInfo info = new AssignmentInfo(Collections.emptyList(),
Collections.emptyMap(),
firstHostState);
return new PartitionAssignor.Assignment(
Collections.emptyList(), info.encode());
}
private AssignmentInfo checkAssignment(final Set<String> expectedTopics,
final PartitionAssignor.Assignment assignment) {
// This assumed 1) DefaultPartitionGrouper is used, and 2) there is an only one topic group.
final AssignmentInfo info = AssignmentInfo.decode(assignment.userData());
// check if the number of assigned partitions == the size of active task id list
assertEquals(assignment.partitions().size(), info.activeTasks().size());
// check if active tasks are consistent
final List<TaskId> activeTasks = new ArrayList<>();
final Set<String> activeTopics = new HashSet<>();
for (final TopicPartition partition : assignment.partitions()) {
// since default grouper, taskid.partition == partition.partition()
activeTasks.add(new TaskId(0, partition.partition()));
activeTopics.add(partition.topic());
}
assertEquals(activeTasks, info.activeTasks());
// check if active partitions cover all topics
assertEquals(expectedTopics, activeTopics);
// check if standby tasks are consistent
final Set<String> standbyTopics = new HashSet<>();
for (final Map.Entry<TaskId, Set<TopicPartition>> entry : info.standbyTasks().entrySet()) {
final TaskId id = entry.getKey();
final Set<TopicPartition> partitions = entry.getValue();
for (final TopicPartition partition : partitions) {
// since default grouper, taskid.partition == partition.partition()
assertEquals(id.partition, partition.partition());
standbyTopics.add(partition.topic());
}
}
if (info.standbyTasks().size() > 0) {
// check if standby partitions cover all topics
assertEquals(expectedTopics, standbyTopics);
}
return info;
}
}
| |
// HSSource
//
//Copyright (c) 2014 HelpStack (http://helpstack.io)
//
//Permission is hereby granted, free of charge, to any person obtaining a copy
//of this software and associated documentation files (the "Software"), to deal
//in the Software without restriction, including without limitation the rights
//to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
//copies of the Software, and to permit persons to whom the Software is
//furnished to do so, subject to the following conditions:
//
//The above copyright notice and this permission notice shall be included in
//all copies or substantial portions of the Software.
//
//THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
//IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
//FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
//AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
//LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
//OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
//THE SOFTWARE.
package com.tenmiles.helpstack.logic;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager.NameNotFoundException;
import android.os.Build;
import android.text.Html;
import android.text.SpannableString;
import android.util.Log;
import com.android.volley.RequestQueue;
import com.android.volley.Response.ErrorListener;
import com.android.volley.VolleyError;
import com.google.gson.Gson;
import com.tenmiles.helpstack.HSHelpStack;
import com.tenmiles.helpstack.activities.HSActivityManager;
import com.tenmiles.helpstack.fragments.HSFragmentParent;
import com.tenmiles.helpstack.model.HSAttachment;
import com.tenmiles.helpstack.model.HSCachedTicket;
import com.tenmiles.helpstack.model.HSCachedUser;
import com.tenmiles.helpstack.model.HSDraft;
import com.tenmiles.helpstack.model.HSKBItem;
import com.tenmiles.helpstack.model.HSTicket;
import com.tenmiles.helpstack.model.HSTicketUpdate;
import com.tenmiles.helpstack.model.HSUploadAttachment;
import com.tenmiles.helpstack.model.HSUser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.Calendar;
public class HSSource {
private static final String TAG = HSSource.class.getSimpleName();
private static final String HELPSTACK_DIRECTORY = "helpstack";
private static final String HELPSTACK_TICKETS_FILE_NAME = "tickets";
private static final String HELPSTACK_TICKETS_USER_DATA = "user_credential";
private static final String HELPSTACK_DRAFT = "draft";
private static HSSource singletonInstance = null;
/**
*
* @param context
* @return singleton instance of this class.
*/
public static HSSource getInstance(Context context) {
if (singletonInstance == null) {
synchronized (HSSource.class) { // 1
if (singletonInstance == null) // 2
{
Log.d(TAG, "New Instance");
singletonInstance = new HSSource(
context.getApplicationContext()); // 3
}
}
}
// As this singleton can be called even before gear is set, refreshing it
singletonInstance.setGear(HSHelpStack.getInstance(context).getGear());
return singletonInstance;
}
private HSGear gear;
private Context mContext;
private RequestQueue mRequestQueue;
private HSCachedTicket cachedTicket;
private HSCachedUser cachedUser;
private HSDraft draftObject;
private HSSource(Context context) {
this.mContext = context;
setGear(HSHelpStack.getInstance(context).getGear());
mRequestQueue = HSHelpStack.getInstance(context).getRequestQueue();
refreshFieldsFromCache();
}
public void requestKBArticle(String cancelTag, HSKBItem section, OnFetchedArraySuccessListener success, ErrorListener errorListener ) {
if (gear.haveImplementedKBFetching()) {
gear.fetchKBArticle(cancelTag, section,mRequestQueue, new SuccessWrapper(success) {
@Override
public void onSuccess(Object[] successObject) {
assert successObject != null : "It seems requestKBArticle was not implemented in gear" ;
// Do your work here, may be caching, data validation etc.
super.onSuccess(successObject);
}
}, new ErrorWrapper("Fetching KB articles", errorListener));
}
else {
try {
HSArticleReader reader = new HSArticleReader(gear.getLocalArticleResourceId());
success.onSuccess(reader.readArticlesFromResource(mContext));
} catch (XmlPullParserException e) {
e.printStackTrace();
throwError(errorListener, "Unable to parse local article XML");
} catch (IOException e) {
e.printStackTrace();
throwError(errorListener, "Unable to read local article XML");
}
}
}
public void requestAllTickets(OnFetchedArraySuccessListener success, ErrorListener error ) {
if (cachedTicket == null) {
success.onSuccess(new HSTicket[0]);
}
else {
success.onSuccess(cachedTicket.getTickets());
}
}
public void checkForUserDetailsValidity(String cancelTag, String firstName, String lastName, String email,OnFetchedSuccessListener success, ErrorListener errorListener) {
gear.registerNewUser(cancelTag, firstName, lastName, email, mRequestQueue, success, new ErrorWrapper("Registering New User", errorListener));
}
public void createNewTicket(String cancelTag, HSUser user, String subject, String message, HSAttachment[] attachment, OnNewTicketFetchedSuccessListener successListener, ErrorListener errorListener) {
HSUploadAttachment[] upload_attachments = convertAttachmentArrayToUploadAttachment(attachment);
message = message + getDeviceInformation(mContext);
if (gear.canUploadMessageAsHtmlString()) {
message = Html.toHtml(new SpannableString(message));
}
gear.createNewTicket(cancelTag, user, subject, message, upload_attachments, mRequestQueue, new NewTicketSuccessWrapper(successListener) {
@Override
public void onSuccess(HSUser udpatedUserDetail, HSTicket ticket) {
// Save ticket and user details in cache
// Save properties also later.
doSaveNewTicketPropertiesForGearInCache(ticket);
doSaveNewUserPropertiesForGearInCache(udpatedUserDetail);
super.onSuccess(udpatedUserDetail, ticket);
}
}, new ErrorWrapper("Creating New Ticket", errorListener));
}
public void requestAllUpdatesOnTicket(String cancelTag, HSTicket ticket, OnFetchedArraySuccessListener success, ErrorListener errorListener ) {
gear.fetchAllUpdateOnTicket(cancelTag, ticket, cachedUser.getUser(), mRequestQueue, success, new ErrorWrapper("Fetching updates on Ticket", errorListener));
}
public void addReplyOnATicket(String cancelTag, String message, HSAttachment[] attachments, HSTicket ticket, OnFetchedSuccessListener success, ErrorListener errorListener) {
if (gear.canUploadMessageAsHtmlString()) {
message = Html.toHtml(new SpannableString(message));
}
gear.addReplyOnATicket(cancelTag, message, convertAttachmentArrayToUploadAttachment(attachments), ticket, getUser(), mRequestQueue, new OnFetchedSuccessListenerWrapper(success, message, attachments) {
@Override
public void onSuccess(Object successObject) {
if (gear.canIgnoreTicketUpdateInformationAfterAddingReply()) {
HSTicketUpdate update = HSTicketUpdate.createUpdateByUser(null, null, this.message, Calendar.getInstance().getTime(), this.attachments);
super.onSuccess(update);
}
else {
super.onSuccess(successObject);
}
}
}, new ErrorWrapper("Adding reply to a ticket", errorListener));
}
public HSGear getGear() {
return gear;
}
private void setGear(HSGear gear) {
this.gear = gear;
}
public boolean isNewUser() {
return cachedUser.getUser() == null;
}
public void refreshUser() {
doReadUserFromCache();
}
public HSUser getUser() {
return cachedUser.getUser();
}
public String getDraftSubject() {
if(draftObject != null) {
return draftObject.getSubject();
}
return null;
}
public String getDraftMessage() {
if(draftObject != null) {
return draftObject.getMessage();
}
return null;
}
public HSUser getDraftUser() {
if(draftObject != null) {
return draftObject.getDraftUser();
}
return null;
}
public HSAttachment[] getDraftAttachments() {
if(draftObject != null) {
return draftObject.getAttachments();
}
return null;
}
public String getDraftReplyMessage() {
if(draftObject != null) {
return draftObject.getDraftReplyMessage();
}
return null;
}
public HSAttachment[] getDraftReplyAttachments() {
if(draftObject != null) {
return draftObject.getDraftReplyAttachments();
}
return null;
}
public void saveTicketDetailsInDraft(String subject, String message, HSAttachment[] attachmentsArray) {
doSaveTicketDraftForGearInCache(subject, message, attachmentsArray);
}
public void saveUserDetailsInDraft(HSUser user) {
doSaveUserDraftForGearInCache(user);
}
public void saveReplyDetailsInDraft(String message, HSAttachment[] attachmentsArray) {
doSaveReplyDraftForGearInCache(message, attachmentsArray);
}
public boolean haveImplementedTicketFetching() {
return gear.haveImplementedTicketFetching();
}
public String getSupportEmailAddress() {
return gear.getCompanySupportEmailAddress();
}
/***
*
* Depending on the setting set on gear, it launches new ticket activity.
*
* if email : launches email [Done]
* else:
* if user logged in : launches user details [Done]
* else: launches new ticket [Done]
*
* @param fragment
* @param requestCode
*/
public void launchCreateNewTicketScreen(HSFragmentParent fragment, int requestCode) {
if (haveImplementedTicketFetching()) {
if(isNewUser()) {
HSActivityManager.startNewIssueActivity(fragment, null, requestCode);
}
else {
HSActivityManager.startNewIssueActivity(fragment, getUser(), requestCode);
}
}
else {
launchEmailAppWithEmailAddress(fragment.getActivity());
}
}
public void launchEmailAppWithEmailAddress(Activity activity) {
Intent emailIntent = new Intent(android.content.Intent.ACTION_SEND);
emailIntent.setType("plain/text");
emailIntent.putExtra(android.content.Intent.EXTRA_EMAIL, new String[]{ getSupportEmailAddress()});
emailIntent.putExtra(android.content.Intent.EXTRA_SUBJECT, "");
emailIntent.putExtra(android.content.Intent.EXTRA_TEXT, getDeviceInformation(activity));
activity.startActivity(Intent.createChooser(emailIntent, "Email"));
}
private static String getDeviceInformation(Context activity) {
StringBuilder builder = new StringBuilder();
builder.append("\n\n\n");
builder.append("========");
builder.append("\nDevice brand: ");
builder.append(Build.MODEL);
builder.append("\nAndroid version: ");
builder.append(Build.VERSION.SDK_INT);
builder.append("\nApp package: ");
try {
builder.append(activity.getPackageManager().getPackageInfo(activity.getPackageName(),0).packageName);
} catch (NameNotFoundException e) {
builder.append("NA");
}
builder.append("\nApp version: ");
try {
builder.append(activity.getPackageManager().getPackageInfo(activity.getPackageName(),0).versionCode);
} catch (NameNotFoundException e) {
builder.append("NA");
}
return builder.toString();
}
public void cancelOperation(String cancelTag) {
mRequestQueue.cancelAll(cancelTag);
}
/////////////////////////////////////////////////
//////// Utility Functions /////////////////
/////////////////////////////////////////////////
public void refreshFieldsFromCache() {
// read the ticket data from cache and maintain here
doReadTicketsFromCache();
doReadUserFromCache();
doReadDraftFromCache();
}
/**
* Opens a file and read its content. Return null if any error occured or file not found
* @param file
* @return
*/
private String readJsonFromFile(File file) {
if (!file.exists()) {
return null;
}
String json = null;
FileInputStream inputStream;
try {
StringBuilder datax = new StringBuilder();
inputStream = new FileInputStream(file);
InputStreamReader isr = new InputStreamReader(inputStream);
BufferedReader bufferReader = new BufferedReader(isr);
String readString = bufferReader.readLine();
while (readString != null) {
datax.append(readString);
readString = bufferReader.readLine();
}
isr.close();
json = datax.toString();
return json;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private void writeJsonIntoFile (File file, String json) {
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(file);
outputStream.write(json.getBytes());
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
protected void doSaveNewTicketPropertiesForGearInCache(HSTicket ticket) {
cachedTicket.addTicketAtStart(ticket);
Gson gson = new Gson();
String ticketsgson = gson.toJson(cachedTicket);
File ticketFile = new File(getProjectDirectory(), HELPSTACK_TICKETS_FILE_NAME);
writeJsonIntoFile(ticketFile, ticketsgson);
}
protected void doSaveNewUserPropertiesForGearInCache(HSUser user) {
cachedUser.setUser(user);
Gson gson = new Gson();
String userjson = gson.toJson(cachedUser);
File userFile = new File(getProjectDirectory(), HELPSTACK_TICKETS_USER_DATA);
writeJsonIntoFile(userFile, userjson);
}
protected void doReadTicketsFromCache() {
File ticketFile = new File(getProjectDirectory(), HELPSTACK_TICKETS_FILE_NAME);
String json = readJsonFromFile(ticketFile);
if (json == null) {
cachedTicket = new HSCachedTicket();
}
else {
Gson gson = new Gson();
cachedTicket = gson.fromJson(json, HSCachedTicket.class);
}
}
protected void doReadUserFromCache() {
File userFile = new File(getProjectDirectory(), HELPSTACK_TICKETS_USER_DATA);
String json = readJsonFromFile(userFile);
if (json == null) {
cachedUser = new HSCachedUser();
}
else {
Gson gson = new Gson();
cachedUser = gson.fromJson(json, HSCachedUser.class);
}
}
protected void doReadDraftFromCache() {
File draftFile = new File(getProjectDirectory(), HELPSTACK_DRAFT);
String json = readJsonFromFile(draftFile);
if (json == null) {
draftObject = new HSDraft();
}
else {
Gson gson = new Gson();
draftObject = gson.fromJson(json, HSDraft.class);
}
}
protected void doSaveTicketDraftForGearInCache(String subject, String message, HSAttachment[] attachmentsArray) {
draftObject.setDraftSubject(subject);
draftObject.setDraftMessage(message);
draftObject.setDraftAttachments(attachmentsArray);
writeDraftIntoFile();
}
protected void doSaveUserDraftForGearInCache(HSUser user) {
draftObject.setDraftUser(user);
writeDraftIntoFile();
}
protected void doSaveReplyDraftForGearInCache(String message, HSAttachment[] attachmentsArray) {
draftObject.setDraftReplyMessage(message);
draftObject.setDraftReplyAttachments(attachmentsArray);
writeDraftIntoFile();
}
private void writeDraftIntoFile() {
Gson gson = new Gson();
String draftJson = gson.toJson(draftObject);
File draftFile = new File(getProjectDirectory(), HELPSTACK_DRAFT);
writeJsonIntoFile(draftFile, draftJson);
}
protected File getProjectDirectory() {
File projDir = new File(mContext.getFilesDir(), HELPSTACK_DIRECTORY);
if (!projDir.exists())
projDir.mkdirs();
return projDir;
}
public void clearTicketDraft() {
saveTicketDetailsInDraft("", "", null);
}
public void clearReplyDraft() {
saveReplyDetailsInDraft("", null);
}
private class NewTicketSuccessWrapper implements OnNewTicketFetchedSuccessListener {
private OnNewTicketFetchedSuccessListener lastListener;
public NewTicketSuccessWrapper(OnNewTicketFetchedSuccessListener lastListener) {
this.lastListener = lastListener;
}
@Override
public void onSuccess(HSUser udpatedUserDetail, HSTicket ticket) {
if (lastListener != null)
lastListener.onSuccess(udpatedUserDetail, ticket);
}
}
protected HSUploadAttachment[] convertAttachmentArrayToUploadAttachment(HSAttachment[] attachment) {
HSUploadAttachment[] upload_attachments = new HSUploadAttachment[0];
if (attachment != null && attachment.length > 0) {
int attachmentCount = gear.getNumberOfAttachmentGearCanHandle();
assert attachmentCount >= attachment.length : "Gear cannot handle more than "+attachmentCount+" attachments";
upload_attachments = new HSUploadAttachment[attachment.length];
for (int i = 0; i < upload_attachments.length; i++) {
upload_attachments[i] = new HSUploadAttachment(mContext, attachment[i]);
}
}
return upload_attachments;
}
private class SuccessWrapper implements OnFetchedArraySuccessListener {
private OnFetchedArraySuccessListener lastListener;
public SuccessWrapper(OnFetchedArraySuccessListener lastListener) {
this.lastListener = lastListener;
}
@Override
public void onSuccess(Object[] successObject) {
if (lastListener != null)
lastListener.onSuccess(successObject);
}
}
private class OnFetchedSuccessListenerWrapper implements OnFetchedSuccessListener {
private OnFetchedSuccessListener listener;
protected String message;
protected HSAttachment[] attachments;
private OnFetchedSuccessListenerWrapper(OnFetchedSuccessListener listener, String message, HSAttachment[] attachments) {
this.listener = listener;
this.message = message;
this.attachments = attachments;
}
@Override
public void onSuccess(Object successObject) {
if (this.listener != null) {
this.listener.onSuccess(successObject);
}
}
}
private class ErrorWrapper implements ErrorListener {
private ErrorListener errorListener;
private String methodName;
public ErrorWrapper(String methodName, ErrorListener errorListener) {
this.errorListener = errorListener;
this.methodName = methodName;
}
@Override
public void onErrorResponse(VolleyError error) {
printErrorDescription(methodName, error);
this.errorListener.onErrorResponse(error);
}
}
public static void throwError(ErrorListener errorListener, String error) {
VolleyError volleyError = new VolleyError(error);
printErrorDescription(null, volleyError);
errorListener.onErrorResponse(volleyError);
}
private static void printErrorDescription (String methodName, VolleyError error) {
if (methodName == null) {
Log.e(HSHelpStack.LOG_TAG, "Error occurred in HelpStack");
}
else {
Log.e(HSHelpStack.LOG_TAG, "Error occurred when executing " + methodName);
}
Log.e(HSHelpStack.LOG_TAG, error.toString());
if (error.getMessage() != null) {
Log.e(HSHelpStack.LOG_TAG, error.getMessage());
}
if (error.networkResponse != null && error.networkResponse.data != null) {
try {
Log.e(HSHelpStack.LOG_TAG, new String(error.networkResponse.data, "utf-8"));
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
error.printStackTrace();
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.networking.nio;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.logging.ILogger;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.util.AbstractSet;
import java.util.Iterator;
import java.util.NoSuchElementException;
import static com.hazelcast.util.Preconditions.checkNotNull;
import static java.lang.Class.forName;
import static java.lang.System.arraycopy;
/**
* The SelectorOptimizer optimizes the Selector so less litter is being created.
* The Selector uses a HashSet, but this creates an object for every add of a
* selection key. With this SelectorOptimizer a SelectionKeysSet, which contains
* an array, is being used since every key is going to be inserted only once.
*
* This trick comes from Netty.
*/
public final class SelectorOptimizer {
static final String SELECTOR_IMPL = "sun.nio.ch.SelectorImpl";
private SelectorOptimizer() {
}
/**
* Creates a new Selector and will optimize it if possible.
*
* @param logger the logger used for the optimization process.
* @return the created Selector.
* @throws NullPointerException if logger is null.
*/
static Selector newSelector(ILogger logger) {
checkNotNull(logger, "logger");
Selector selector;
try {
selector = Selector.open();
} catch (IOException e) {
throw new HazelcastException("Failed to open a Selector", e);
}
boolean optimize = Boolean.parseBoolean(System.getProperty("hazelcast.io.optimizeselector", "true"));
if (optimize) {
optimize(selector, logger);
}
return selector;
}
/**
* Tries to optimize the provided Selector.
*
* @param selector the selector to optimize
* @return an FastSelectionKeySet if the optimization was a success, null otherwise.
* @throws NullPointerException if selector or logger is null.
*/
static SelectionKeysSet optimize(Selector selector, ILogger logger) {
checkNotNull(selector, "selector");
checkNotNull(logger, "logger");
try {
SelectionKeysSet set = new SelectionKeysSet();
Class<?> selectorImplClass = findOptimizableSelectorClass(selector);
if (selectorImplClass == null) {
return null;
}
Field selectedKeysField = selectorImplClass.getDeclaredField("selectedKeys");
selectedKeysField.setAccessible(true);
Field publicSelectedKeysField = selectorImplClass.getDeclaredField("publicSelectedKeys");
publicSelectedKeysField.setAccessible(true);
selectedKeysField.set(selector, set);
publicSelectedKeysField.set(selector, set);
logger.finest("Optimized Selector: " + selector.getClass().getName());
return set;
} catch (Throwable t) {
// we don't want to print at warning level because it could very well be that the target JVM doesn't
// support this optimization. That is why we print on finest
logger.finest("Failed to optimize Selector: " + selector.getClass().getName(), t);
return null;
}
}
static Class<?> findOptimizableSelectorClass(Selector selector) throws ClassNotFoundException {
Class<?> selectorImplClass = forName(SELECTOR_IMPL, false, SelectorOptimizer.class.getClassLoader());
// Ensure the current selector implementation is what we can instrument.
if (!selectorImplClass.isAssignableFrom(selector.getClass())) {
return null;
}
return selectorImplClass;
}
static class SelectionKeysSet extends AbstractSet<SelectionKey> {
// the active SelectionKeys is the one where is being added to.
SelectionKeys activeKeys = new SelectionKeys();
// the passive SelectionKeys is one that is being read using the iterator.
SelectionKeys passiveKeys = new SelectionKeys();
// the iterator is recycled.
private final IteratorImpl iterator = new IteratorImpl();
SelectionKeysSet() {
}
@Override
public boolean add(SelectionKey o) {
return activeKeys.add(o);
}
@Override
public int size() {
return activeKeys.size;
}
@Override
public Iterator<SelectionKey> iterator() {
iterator.init(flip());
return iterator;
}
private SelectionKey[] flip() {
SelectionKeys tmp = activeKeys;
activeKeys = passiveKeys;
passiveKeys = tmp;
activeKeys.size = 0;
return passiveKeys.keys;
}
@Override
public boolean remove(Object o) {
return false;
}
@Override
public boolean contains(Object o) {
return false;
}
}
static final class SelectionKeys {
static final int INITIAL_CAPACITY = 32;
SelectionKey[] keys = new SelectionKey[INITIAL_CAPACITY];
int size;
private boolean add(SelectionKey key) {
if (key == null) {
return false;
}
ensureCapacity();
keys[size] = key;
size++;
return true;
}
private void ensureCapacity() {
if (size < keys.length) {
return;
}
SelectionKey[] newKeys = new SelectionKey[keys.length * 2];
arraycopy(keys, 0, newKeys, 0, size);
keys = newKeys;
}
}
static final class IteratorImpl implements Iterator<SelectionKey> {
SelectionKey[] keys;
int index;
private void init(SelectionKey[] keys) {
this.keys = keys;
this.index = -1;
}
@Override
public boolean hasNext() {
if (index >= keys.length - 1) {
return false;
}
return keys[index + 1] != null;
}
@Override
public SelectionKey next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
index++;
return keys[index];
}
@Override
public void remove() {
if (index == -1 || index >= keys.length || keys[index] == null) {
throw new IllegalStateException();
}
keys[index] = null;
}
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputFileCache;
import com.google.devtools.build.lib.actions.ActionStatusMessage;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.ExecutionStrategy;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.SpawnActionContext;
import com.google.devtools.build.lib.actions.Spawns;
import com.google.devtools.build.lib.actions.UserExecException;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.exec.SpawnInputExpander;
import com.google.devtools.build.lib.remote.ContentDigests.ActionKey;
import com.google.devtools.build.lib.remote.RemoteProtocol.Action;
import com.google.devtools.build.lib.remote.RemoteProtocol.ActionResult;
import com.google.devtools.build.lib.remote.RemoteProtocol.Command;
import com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecuteReply;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecuteRequest;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecutionStatus;
import com.google.devtools.build.lib.remote.RemoteProtocol.Platform;
import com.google.devtools.build.lib.remote.TreeNodeRepository.TreeNode;
import com.google.devtools.build.lib.rules.fileset.FilesetActionContext;
import com.google.devtools.build.lib.runtime.AuthAndTLSOptions;
import com.google.devtools.build.lib.standalone.StandaloneSpawnStrategy;
import com.google.devtools.build.lib.util.io.FileOutErr;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.protobuf.TextFormat;
import com.google.protobuf.TextFormat.ParseException;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeSet;
/**
* Strategy that uses a distributed cache for sharing action input and output files. Optionally this
* strategy also support offloading the work to a remote worker.
*/
@ExecutionStrategy(
name = {"remote"},
contextType = SpawnActionContext.class
)
final class RemoteSpawnStrategy implements SpawnActionContext {
private final Path execRoot;
private final StandaloneSpawnStrategy standaloneStrategy;
private final boolean verboseFailures;
private final RemoteOptions remoteOptions;
// TODO(olaola): This will be set on a per-action basis instead.
private final Platform platform;
private final ChannelOptions channelOptions;
private final SpawnInputExpander spawnInputExpander = new SpawnInputExpander(/*strict=*/ false);
RemoteSpawnStrategy(
Map<String, String> clientEnv,
Path execRoot,
RemoteOptions remoteOptions,
AuthAndTLSOptions authTlsOptions,
boolean verboseFailures,
String productName) {
this.execRoot = execRoot;
this.standaloneStrategy = new StandaloneSpawnStrategy(execRoot, verboseFailures, productName);
this.verboseFailures = verboseFailures;
this.remoteOptions = remoteOptions;
channelOptions = ChannelOptions.create(authTlsOptions, remoteOptions.grpcMaxChunkSizeBytes);
if (remoteOptions.experimentalRemotePlatformOverride != null) {
Platform.Builder platformBuilder = Platform.newBuilder();
try {
TextFormat.getParser().merge(remoteOptions.experimentalRemotePlatformOverride,
platformBuilder);
} catch (ParseException e) {
throw new IllegalArgumentException(
"Failed to parse --experimental_remote_platform_override", e);
}
platform = platformBuilder.build();
} else {
platform = null;
}
}
private Action buildAction(
Collection<? extends ActionInput> outputs, ContentDigest command, ContentDigest inputRoot) {
Action.Builder action = Action.newBuilder();
action.setCommandDigest(command);
action.setInputRootDigest(inputRoot);
// Somewhat ugly: we rely on the stable order of outputs here for remote action caching.
for (ActionInput output : outputs) {
action.addOutputPath(output.getExecPathString());
}
if (platform != null) {
action.setPlatform(platform);
}
return action.build();
}
private Command buildCommand(List<String> arguments, ImmutableMap<String, String> environment) {
Command.Builder command = Command.newBuilder();
command.addAllArgv(arguments);
// Sorting the environment pairs by variable name.
TreeSet<String> variables = new TreeSet<>(environment.keySet());
for (String var : variables) {
command.addEnvironmentBuilder().setVariable(var).setValue(environment.get(var));
}
return command.build();
}
/**
* Fallback: execute the spawn locally. If an ActionKey is provided, try to upload results to
* remote action cache.
*/
private void execLocally(
Spawn spawn,
ActionExecutionContext actionExecutionContext,
RemoteActionCache actionCache,
ActionKey actionKey)
throws ExecException, InterruptedException {
standaloneStrategy.exec(spawn, actionExecutionContext);
if (remoteOptions.remoteUploadLocalResults && actionCache != null && actionKey != null) {
ArrayList<Path> outputFiles = new ArrayList<>();
for (ActionInput output : spawn.getOutputFiles()) {
Path outputFile = execRoot.getRelative(output.getExecPathString());
// Ignore non-existent files.
// TODO(ulfjack): This is not ideal - in general, all spawn strategies should stat the
// output files and return a list of existing files. We shouldn't re-stat the files here.
if (!outputFile.exists()) {
continue;
}
outputFiles.add(outputFile);
}
try {
ActionResult.Builder result = ActionResult.newBuilder();
actionCache.uploadAllResults(execRoot, outputFiles, result);
FileOutErr outErr = actionExecutionContext.getFileOutErr();
if (outErr.getErrorPath().exists()) {
ContentDigest stderr = actionCache.uploadFileContents(outErr.getErrorPath());
result.setStderrDigest(stderr);
}
if (outErr.getOutputPath().exists()) {
ContentDigest stdout = actionCache.uploadFileContents(outErr.getOutputPath());
result.setStdoutDigest(stdout);
}
actionCache.setCachedActionResult(actionKey, result.build());
// Handle all cache errors here.
} catch (IOException e) {
throw new UserExecException("Unexpected IO error.", e);
} catch (UnsupportedOperationException e) {
actionExecutionContext
.getExecutor()
.getEventHandler()
.handle(
Event.warn(
spawn.getMnemonic() + " unsupported operation for action cache (" + e + ")"));
} catch (StatusRuntimeException e) {
actionExecutionContext
.getExecutor()
.getEventHandler()
.handle(Event.warn(spawn.getMnemonic() + " failed uploading results (" + e + ")"));
}
}
}
private static void passRemoteOutErr(
RemoteActionCache cache, ActionResult result, FileOutErr outErr) {
try {
ImmutableList<byte[]> streams =
cache.downloadBlobs(ImmutableList.of(result.getStdoutDigest(), result.getStderrDigest()));
outErr.printOut(new String(streams.get(0), UTF_8));
outErr.printErr(new String(streams.get(1), UTF_8));
} catch (CacheNotFoundException e) {
// Ignoring.
}
}
@Override
public String toString() {
return "remote";
}
/** Executes the given {@code spawn}. */
@Override
public void exec(Spawn spawn, ActionExecutionContext actionExecutionContext)
throws ExecException, InterruptedException {
ActionKey actionKey = null;
String mnemonic = spawn.getMnemonic();
Executor executor = actionExecutionContext.getExecutor();
EventHandler eventHandler = executor.getEventHandler();
RemoteActionCache actionCache = null;
GrpcRemoteExecutor workExecutor = null;
if (spawn.isRemotable()) {
// Initialize remote cache and execution handlers. We use separate handlers for every
// action to enable server-side parallelism (need a different gRPC channel per action).
if (SimpleBlobStoreFactory.isRemoteCacheOptions(remoteOptions)) {
actionCache = new SimpleBlobStoreActionCache(SimpleBlobStoreFactory.create(remoteOptions));
} else if (GrpcActionCache.isRemoteCacheOptions(remoteOptions)) {
actionCache = new GrpcActionCache(remoteOptions, channelOptions);
}
// Otherwise actionCache remains null and remote caching/execution are disabled.
if (actionCache != null && GrpcRemoteExecutor.isRemoteExecutionOptions(remoteOptions)) {
workExecutor =
new GrpcRemoteExecutor(
RemoteUtils.createChannel(remoteOptions.remoteExecutor, channelOptions),
channelOptions,
remoteOptions);
}
}
if (!spawn.isRemotable() || actionCache == null) {
standaloneStrategy.exec(spawn, actionExecutionContext);
return;
}
if (executor.reportsSubcommands()) {
executor.reportSubcommand(spawn);
}
executor.getEventBus().post(
ActionStatusMessage.runningStrategy(spawn.getResourceOwner(), "remote"));
try {
// Temporary hack: the TreeNodeRepository should be created and maintained upstream!
ActionInputFileCache inputFileCache = actionExecutionContext.getActionInputFileCache();
TreeNodeRepository repository = new TreeNodeRepository(execRoot, inputFileCache);
SortedMap<PathFragment, ActionInput> inputMap =
spawnInputExpander.getInputMapping(
spawn,
actionExecutionContext.getArtifactExpander(),
actionExecutionContext.getActionInputFileCache(),
actionExecutionContext.getExecutor().getContext(FilesetActionContext.class));
TreeNode inputRoot = repository.buildFromActionInputs(inputMap);
repository.computeMerkleDigests(inputRoot);
Command command = buildCommand(spawn.getArguments(), spawn.getEnvironment());
Action action =
buildAction(
spawn.getOutputFiles(),
ContentDigests.computeDigest(command),
repository.getMerkleDigest(inputRoot));
// Look up action cache, and reuse the action output if it is found.
actionKey = ContentDigests.computeActionKey(action);
ActionResult result = this.remoteOptions.remoteAcceptCached
? actionCache.getCachedActionResult(actionKey)
: null;
boolean acceptCachedResult = this.remoteOptions.remoteAcceptCached;
if (result != null) {
// We don't cache failed actions, so we know the outputs exist.
// For now, download all outputs locally; in the future, we can reuse the digests to
// just update the TreeNodeRepository and continue the build.
try {
actionCache.downloadAllResults(result, execRoot);
passRemoteOutErr(actionCache, result, actionExecutionContext.getFileOutErr());
return;
} catch (CacheNotFoundException e) {
acceptCachedResult = false; // Retry the action remotely and invalidate the results.
}
}
if (workExecutor == null) {
execLocally(spawn, actionExecutionContext, actionCache, actionKey);
return;
}
// Upload the command and all the inputs into the remote cache.
actionCache.uploadBlob(command.toByteArray());
// TODO(olaola): this should use the ActionInputFileCache for SHA1 digests!
actionCache.uploadTree(repository, execRoot, inputRoot);
// TODO(olaola): set BuildInfo and input total bytes as well.
ExecuteRequest.Builder request =
ExecuteRequest.newBuilder()
.setAction(action)
.setAcceptCached(acceptCachedResult)
.setTotalInputFileCount(inputMap.size())
.setTimeoutMillis(1000 * Spawns.getTimeoutSeconds(spawn, 120));
// TODO(olaola): set sensible local and remote timouts.
ExecuteReply reply = workExecutor.executeRemotely(request.build());
ExecutionStatus status = reply.getStatus();
result = reply.getResult();
// We do not want to pass on the remote stdout and strerr if we are going to retry the
// action.
if (status.getSucceeded()) {
passRemoteOutErr(actionCache, result, actionExecutionContext.getFileOutErr());
actionCache.downloadAllResults(result, execRoot);
return;
}
if (status.getError() == ExecutionStatus.ErrorCode.EXEC_FAILED
|| !remoteOptions.remoteLocalFallback) {
passRemoteOutErr(actionCache, result, actionExecutionContext.getFileOutErr());
throw new UserExecException(status.getErrorDetail());
}
// For now, we retry locally on all other remote errors.
// TODO(olaola): add remote retries on cache miss errors.
execLocally(spawn, actionExecutionContext, actionCache, actionKey);
} catch (IOException e) {
throw new UserExecException("Unexpected IO error.", e);
} catch (InterruptedException e) {
eventHandler.handle(Event.warn(mnemonic + " remote work interrupted (" + e + ")"));
Thread.currentThread().interrupt();
throw e;
} catch (StatusRuntimeException e) {
String stackTrace = "";
if (verboseFailures) {
stackTrace = "\n" + Throwables.getStackTraceAsString(e);
}
eventHandler.handle(Event.warn(mnemonic + " remote work failed (" + e + ")" + stackTrace));
if (remoteOptions.remoteLocalFallback) {
execLocally(spawn, actionExecutionContext, actionCache, actionKey);
} else {
throw new UserExecException(e);
}
} catch (CacheNotFoundException e) {
eventHandler.handle(Event.warn(mnemonic + " remote work results cache miss (" + e + ")"));
if (remoteOptions.remoteLocalFallback) {
execLocally(spawn, actionExecutionContext, actionCache, actionKey);
} else {
throw new UserExecException(e);
}
} catch (UnsupportedOperationException e) {
eventHandler.handle(
Event.warn(mnemonic + " unsupported operation for action cache (" + e + ")"));
}
}
@Override
public boolean shouldPropagateExecException() {
return false;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserException;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.PathUtilsForTesting;
import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.PosixPermissionsResetter;
import org.junit.After;
import org.junit.Before;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystem;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.GroupPrincipal;
import java.nio.file.attribute.PosixFileAttributeView;
import java.nio.file.attribute.PosixFileAttributes;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.UserPrincipal;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.elasticsearch.test.hamcrest.RegexMatcher.matches;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.not;
@LuceneTestCase.SuppressFileSystems("*")
public class InstallPluginCommandTests extends ESTestCase {
private InstallPluginCommand skipJarHellCommand;
private InstallPluginCommand defaultCommand;
private final Function<String, Path> temp;
private final MockTerminal terminal = new MockTerminal();
private final FileSystem fs;
private final boolean isPosix;
private final boolean isReal;
private final String javaIoTmpdir;
@SuppressForbidden(reason = "sets java.io.tmpdir")
public InstallPluginCommandTests(FileSystem fs, Function<String, Path> temp) {
this.fs = fs;
this.temp = temp;
this.isPosix = fs.supportedFileAttributeViews().contains("posix");
this.isReal = fs == PathUtils.getDefaultFileSystem();
PathUtilsForTesting.installMock(fs);
javaIoTmpdir = System.getProperty("java.io.tmpdir");
System.setProperty("java.io.tmpdir", temp.apply("tmpdir").toString());
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
skipJarHellCommand = new InstallPluginCommand() {
@Override
void jarHellCheck(PluginInfo candidateInfo, Path candidate, Path pluginsDir, Path modulesDir) throws Exception {
// no jarhell check
}
};
defaultCommand = new InstallPluginCommand();
terminal.reset();
}
@Override
@After
@SuppressForbidden(reason = "resets java.io.tmpdir")
public void tearDown() throws Exception {
defaultCommand.close();
skipJarHellCommand.close();
System.setProperty("java.io.tmpdir", javaIoTmpdir);
PathUtilsForTesting.teardown();
super.tearDown();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
class Parameter {
private final FileSystem fileSystem;
private final Function<String, Path> temp;
Parameter(FileSystem fileSystem, String root) {
this(fileSystem, s -> {
try {
return Files.createTempDirectory(fileSystem.getPath(root), s);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
Parameter(FileSystem fileSystem, Function<String, Path> temp) {
this.fileSystem = fileSystem;
this.temp = temp;
}
}
List<Parameter> parameters = new ArrayList<>();
parameters.add(new Parameter(Jimfs.newFileSystem(Configuration.windows()), "c:\\"));
parameters.add(new Parameter(Jimfs.newFileSystem(toPosix(Configuration.osX())), "/"));
parameters.add(new Parameter(Jimfs.newFileSystem(toPosix(Configuration.unix())), "/"));
parameters.add(new Parameter(PathUtils.getDefaultFileSystem(), LuceneTestCase::createTempDir ));
return parameters.stream().map(p -> new Object[] { p.fileSystem, p.temp }).collect(Collectors.toList());
}
private static Configuration toPosix(Configuration configuration) {
return configuration.toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build();
}
/** Creates a test environment with bin, config and plugins directories. */
static Tuple<Path, Environment> createEnv(FileSystem fs, Function<String, Path> temp) throws IOException {
Path home = temp.apply("install-plugin-command-tests");
Files.createDirectories(home.resolve("bin"));
Files.createFile(home.resolve("bin").resolve("elasticsearch"));
Files.createDirectories(home.resolve("config"));
Files.createFile(home.resolve("config").resolve("elasticsearch.yml"));
Path plugins = Files.createDirectories(home.resolve("plugins"));
assertTrue(Files.exists(plugins));
Settings settings = Settings.builder()
.put("path.home", home)
.build();
return Tuple.tuple(home, TestEnvironment.newEnvironment(settings));
}
static Path createPluginDir(Function<String, Path> temp) throws IOException {
return temp.apply("pluginDir");
}
/** creates a fake jar file with empty class files */
static void writeJar(Path jar, String... classes) throws IOException {
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(jar))) {
for (String clazz : classes) {
stream.putNextEntry(new ZipEntry(clazz + ".class")); // no package names, just support simple classes
}
}
}
static Path writeZip(Path structure, String prefix) throws IOException {
Path zip = createTempDir().resolve(structure.getFileName() + ".zip");
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
Files.walkFileTree(structure, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String target = (prefix == null ? "" : prefix + "/") + structure.relativize(file).toString();
stream.putNextEntry(new ZipEntry(target));
Files.copy(file, stream);
return FileVisitResult.CONTINUE;
}
});
}
return zip;
}
/** creates a plugin .zip and returns the url for testing */
static String createPluginUrl(String name, Path structure, String... additionalProps) throws IOException {
return createPlugin(name, structure, additionalProps).toUri().toURL().toString();
}
/** creates an meta plugin .zip and returns the url for testing */
static String createMetaPluginUrl(String name, Path structure) throws IOException {
return createMetaPlugin(name, structure).toUri().toURL().toString();
}
static void writeMetaPlugin(String name, Path structure) throws IOException {
PluginTestUtil.writeMetaPluginProperties(structure,
"description", "fake desc",
"name", name
);
}
static void writePlugin(String name, Path structure, String... additionalProps) throws IOException {
String[] properties = Stream.concat(Stream.of(
"description", "fake desc",
"name", name,
"version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"),
"classname", "FakePlugin"
), Arrays.stream(additionalProps)).toArray(String[]::new);
PluginTestUtil.writePluginProperties(structure, properties);
String className = name.substring(0, 1).toUpperCase(Locale.ENGLISH) + name.substring(1) + "Plugin";
writeJar(structure.resolve("plugin.jar"), className);
}
static void writePluginSecurityPolicy(Path pluginDir, String... permissions) throws IOException {
StringBuilder securityPolicyContent = new StringBuilder("grant {\n ");
for (String permission : permissions) {
securityPolicyContent.append("permission java.lang.RuntimePermission \"");
securityPolicyContent.append(permission);
securityPolicyContent.append("\";");
}
securityPolicyContent.append("\n};\n");
Files.write(pluginDir.resolve("plugin-security.policy"), securityPolicyContent.toString().getBytes(StandardCharsets.UTF_8));
}
static Path createPlugin(String name, Path structure, String... additionalProps) throws IOException {
writePlugin(name, structure, additionalProps);
return writeZip(structure, null);
}
static Path createMetaPlugin(String name, Path structure) throws IOException {
writeMetaPlugin(name, structure);
return writeZip(structure, null);
}
void installPlugin(String pluginUrl, Path home) throws Exception {
installPlugin(pluginUrl, home, skipJarHellCommand);
}
void installPlugin(String pluginUrl, Path home, InstallPluginCommand command) throws Exception {
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build());
command.execute(terminal, pluginUrl, false, env);
}
void assertMetaPlugin(String metaPlugin, String name, Path original, Environment env) throws IOException {
assertPluginInternal(name, env.pluginsFile().resolve(metaPlugin));
assertConfigAndBin(metaPlugin, original, env);
}
void assertPlugin(String name, Path original, Environment env) throws IOException {
assertPluginInternal(name, env.pluginsFile());
assertConfigAndBin(name, original, env);
assertInstallCleaned(env);
}
void assertPluginInternal(String name, Path pluginsFile) throws IOException {
Path got = pluginsFile.resolve(name);
assertTrue("dir " + name + " exists", Files.exists(got));
if (isPosix) {
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(got);
assertThat(
perms,
containsInAnyOrder(
PosixFilePermission.OWNER_READ,
PosixFilePermission.OWNER_WRITE,
PosixFilePermission.OWNER_EXECUTE,
PosixFilePermission.GROUP_READ,
PosixFilePermission.GROUP_EXECUTE,
PosixFilePermission.OTHERS_READ,
PosixFilePermission.OTHERS_EXECUTE));
}
assertTrue("jar was copied", Files.exists(got.resolve("plugin.jar")));
assertFalse("bin was not copied", Files.exists(got.resolve("bin")));
assertFalse("config was not copied", Files.exists(got.resolve("config")));
}
void assertConfigAndBin(String name, Path original, Environment env) throws IOException {
if (Files.exists(original.resolve("bin"))) {
Path binDir = env.binFile().resolve(name);
assertTrue("bin dir exists", Files.exists(binDir));
assertTrue("bin is a dir", Files.isDirectory(binDir));
PosixFileAttributes binAttributes = null;
if (isPosix) {
binAttributes = Files.readAttributes(env.binFile(), PosixFileAttributes.class);
}
try (DirectoryStream<Path> stream = Files.newDirectoryStream(binDir)) {
for (Path file : stream) {
assertFalse("not a dir", Files.isDirectory(file));
if (isPosix) {
PosixFileAttributes attributes = Files.readAttributes(file, PosixFileAttributes.class);
assertEquals(InstallPluginCommand.BIN_FILES_PERMS, attributes.permissions());
}
}
}
}
if (Files.exists(original.resolve("config"))) {
Path configDir = env.configFile().resolve(name);
assertTrue("config dir exists", Files.exists(configDir));
assertTrue("config is a dir", Files.isDirectory(configDir));
UserPrincipal user = null;
GroupPrincipal group = null;
if (isPosix) {
PosixFileAttributes configAttributes =
Files.getFileAttributeView(env.configFile(), PosixFileAttributeView.class).readAttributes();
user = configAttributes.owner();
group = configAttributes.group();
PosixFileAttributes attributes = Files.getFileAttributeView(configDir, PosixFileAttributeView.class).readAttributes();
assertThat(attributes.owner(), equalTo(user));
assertThat(attributes.group(), equalTo(group));
}
try (DirectoryStream<Path> stream = Files.newDirectoryStream(configDir)) {
for (Path file : stream) {
assertFalse("not a dir", Files.isDirectory(file));
if (isPosix) {
PosixFileAttributes attributes = Files.readAttributes(file, PosixFileAttributes.class);
if (user != null) {
assertThat(attributes.owner(), equalTo(user));
}
if (group != null) {
assertThat(attributes.group(), equalTo(group));
}
}
}
}
}
}
void assertInstallCleaned(Environment env) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile())) {
for (Path file : stream) {
if (file.getFileName().toString().startsWith(".installing")) {
fail("Installation dir still exists, " + file);
}
}
}
}
public void testMissingPluginId() throws IOException {
final Tuple<Path, Environment> env = createEnv(fs, temp);
final UserException e = expectThrows(UserException.class, () -> installPlugin(null, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("plugin id is required"));
}
public void testSomethingWorks() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testWithMetaPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createDirectory(pluginDir.resolve("fake1"));
writePlugin("fake1", pluginDir.resolve("fake1"));
Files.createDirectory(pluginDir.resolve("fake2"));
writePlugin("fake2", pluginDir.resolve("fake2"));
String pluginZip = createMetaPluginUrl("my_plugins", pluginDir);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins", "fake1", pluginDir, env.v2());
assertMetaPlugin("my_plugins", "fake2", pluginDir, env.v2());
}
public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
String pluginZip = createPluginUrl("fake", pluginDir);
final Path removing = env.v2().pluginsFile().resolve(".removing-failed");
Files.createDirectory(removing);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1()));
final String expected = String.format(
Locale.ROOT,
"found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]",
removing);
assertThat(e, hasToString(containsString(expected)));
// test with meta plugin
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
final IllegalStateException e1 = expectThrows(IllegalStateException.class, () -> installPlugin(metaZip, env.v1()));
assertThat(e1, hasToString(containsString(expected)));
}
public void testSpaceInUrl() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
Path pluginZipWithSpaces = createTempFile("foo bar", ".zip");
try (InputStream in = FileSystemUtils.openFileURLStream(new URL(pluginZip))) {
Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING);
}
installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testMalformedUrlNotMaven() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
// has two colons, so it appears similar to maven coordinates
MalformedURLException e = expectThrows(MalformedURLException.class, () -> installPlugin("://host:1234", env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("no protocol"));
}
public void testFileNotMaven() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
String dir = randomAlphaOfLength(10) + ":" + randomAlphaOfLength(5) + "\\" + randomAlphaOfLength(5);
Exception e = expectThrows(Exception.class,
// has two colons, so it appears similar to maven coordinates
() -> installPlugin("file:" + dir, env.v1()));
assertFalse(e.getMessage(), e.getMessage().contains("maven.org"));
assertTrue(e.getMessage(), e.getMessage().contains(dir));
}
public void testUnknownPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
UserException e = expectThrows(UserException.class, () -> installPlugin("foo", env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Unknown plugin foo"));
}
public void testPluginsDirReadOnly() throws Exception {
assumeTrue("posix and filesystem", isPosix && isReal);
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) {
pluginsAttrs.setPermissions(new HashSet<>());
String pluginZip = createPluginUrl("fake", pluginDir);
IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString()));
}
assertInstallCleaned(env.v2());
}
public void testBuiltinModule() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("lang-painless", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("is a system module"));
assertInstallCleaned(env.v2());
}
public void testJarHell() throws Exception {
// jar hell test needs a real filesystem
assumeTrue("real filesystem", isReal);
Tuple<Path, Environment> environment = createEnv(fs, temp);
Path pluginDirectory = createPluginDir(temp);
writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin");
String pluginZip = createPluginUrl("fake", pluginDirectory); // adds plugin.jar with FakePlugin
IllegalStateException e = expectThrows(IllegalStateException.class,
() -> installPlugin(pluginZip, environment.v1(), defaultCommand));
assertTrue(e.getMessage(), e.getMessage().contains("jar hell"));
assertInstallCleaned(environment.v2());
}
public void testJarHellInMetaPlugin() throws Exception {
// jar hell test needs a real filesystem
assumeTrue("real filesystem", isReal);
Tuple<Path, Environment> environment = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createDirectory(pluginDir.resolve("fake1"));
writePlugin("fake1", pluginDir.resolve("fake1"));
Files.createDirectory(pluginDir.resolve("fake2"));
writePlugin("fake2", pluginDir.resolve("fake2")); // adds plugin.jar with Fake2Plugin
writeJar(pluginDir.resolve("fake2").resolve("other.jar"), "Fake2Plugin");
String pluginZip = createMetaPluginUrl("my_plugins", pluginDir);
IllegalStateException e = expectThrows(IllegalStateException.class,
() -> installPlugin(pluginZip, environment.v1(), defaultCommand));
assertTrue(e.getMessage(), e.getMessage().contains("jar hell"));
assertInstallCleaned(environment.v2());
}
public void testIsolatedPlugins() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
// these both share the same FakePlugin class
Path pluginDir1 = createPluginDir(temp);
String pluginZip1 = createPluginUrl("fake1", pluginDir1);
installPlugin(pluginZip1, env.v1());
Path pluginDir2 = createPluginDir(temp);
String pluginZip2 = createPluginUrl("fake2", pluginDir2);
installPlugin(pluginZip2, env.v1());
assertPlugin("fake1", pluginDir1, env.v2());
assertPlugin("fake2", pluginDir2, env.v2());
}
public void testExistingPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env.v2());
}
public void testExistingMetaPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaZip = createPluginDir(temp);
Path pluginDir = metaZip.resolve("fake");
Files.createDirectory(pluginDir);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env.v2());
String anotherZip = createMetaPluginUrl("another_plugins", metaZip);
e = expectThrows(UserException.class, () -> installPlugin(anotherZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env.v2());
}
public void testBin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testMetaBin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createMetaPluginUrl("my_plugins", metaDir);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins","fake", pluginDir, env.v2());
}
public void testBinNotDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
Path binDir = pluginDir.resolve("bin");
Files.createFile(binDir);
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
}
public void testBinContainsDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
Path dirInBinDir = pluginDir.resolve("bin").resolve("foo");
Files.createDirectories(dirInBinDir);
Files.createFile(dirInBinDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin"));
assertInstallCleaned(env.v2());
}
public void testBinConflict() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("elasticsearch", pluginDir);
FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("elasticsearch").toString()));
assertInstallCleaned(env.v2());
}
public void testBinPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) {
Set<PosixFilePermission> perms = binAttrs.getCopyPermissions();
// make sure at least one execute perm is missing, so we know we forced it during installation
perms.remove(PosixFilePermission.GROUP_EXECUTE);
binAttrs.setPermissions(perms);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
}
public void testMetaBinPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createMetaPluginUrl("my_plugins", metaDir);
try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) {
Set<PosixFilePermission> perms = binAttrs.getCopyPermissions();
// make sure at least one execute perm is missing, so we know we forced it during installation
perms.remove(PosixFilePermission.GROUP_EXECUTE);
binAttrs.setPermissions(perms);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2());
}
}
public void testPluginPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
final Tuple<Path, Environment> env = createEnv(fs, temp);
final Path pluginDir = createPluginDir(temp);
final Path resourcesDir = pluginDir.resolve("resources");
final Path platformDir = pluginDir.resolve("platform");
final Path platformNameDir = platformDir.resolve("linux-x86_64");
final Path platformBinDir = platformNameDir.resolve("bin");
Files.createDirectories(platformBinDir);
Files.createFile(pluginDir.resolve("fake-" + Version.CURRENT.toString() + ".jar"));
Files.createFile(platformBinDir.resolve("fake_executable"));
Files.createDirectory(resourcesDir);
Files.createFile(resourcesDir.resolve("resource"));
final String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
final Path fake = env.v2().pluginsFile().resolve("fake");
final Path resources = fake.resolve("resources");
final Path platform = fake.resolve("platform");
final Path platformName = platform.resolve("linux-x86_64");
final Path bin = platformName.resolve("bin");
assert755(fake);
assert644(fake.resolve("fake-" + Version.CURRENT + ".jar"));
assert755(resources);
assert644(resources.resolve("resource"));
assert755(platform);
assert755(platformName);
assert755(bin.resolve("fake_executable"));
}
private void assert644(final Path path) throws IOException {
final Set<PosixFilePermission> permissions = Files.getPosixFilePermissions(path);
assertTrue(permissions.contains(PosixFilePermission.OWNER_READ));
assertTrue(permissions.contains(PosixFilePermission.OWNER_WRITE));
assertFalse(permissions.contains(PosixFilePermission.OWNER_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.GROUP_READ));
assertFalse(permissions.contains(PosixFilePermission.GROUP_WRITE));
assertFalse(permissions.contains(PosixFilePermission.GROUP_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.OTHERS_READ));
assertFalse(permissions.contains(PosixFilePermission.OTHERS_WRITE));
assertFalse(permissions.contains(PosixFilePermission.OTHERS_EXECUTE));
}
private void assert755(final Path path) throws IOException {
final Set<PosixFilePermission> permissions = Files.getPosixFilePermissions(path);
assertTrue(permissions.contains(PosixFilePermission.OWNER_READ));
assertTrue(permissions.contains(PosixFilePermission.OWNER_WRITE));
assertTrue(permissions.contains(PosixFilePermission.OWNER_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.GROUP_READ));
assertFalse(permissions.contains(PosixFilePermission.GROUP_WRITE));
assertTrue(permissions.contains(PosixFilePermission.GROUP_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.OTHERS_READ));
assertFalse(permissions.contains(PosixFilePermission.OTHERS_WRITE));
assertTrue(permissions.contains(PosixFilePermission.OTHERS_EXECUTE));
}
public void testConfig() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path configDir = pluginDir.resolve("config");
Files.createDirectory(configDir);
Files.createFile(configDir.resolve("custom.yml"));
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testExistingConfig() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path envConfigDir = env.v2().configFile().resolve("fake");
Files.createDirectories(envConfigDir);
Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8));
Path pluginDir = createPluginDir(temp);
Path configDir = pluginDir.resolve("config");
Files.createDirectory(configDir);
Files.write(configDir.resolve("custom.yml"), "new config".getBytes(StandardCharsets.UTF_8));
Files.createFile(configDir.resolve("other.yml"));
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
List<String> configLines = Files.readAllLines(envConfigDir.resolve("custom.yml"), StandardCharsets.UTF_8);
assertEquals(1, configLines.size());
assertEquals("existing config", configLines.get(0));
assertTrue(Files.exists(envConfigDir.resolve("other.yml")));
}
public void testExistingMetaConfig() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path envConfigDir = env.v2().configFile().resolve("my_plugins");
Files.createDirectories(envConfigDir);
Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8));
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir);
Path configDir = pluginDir.resolve("config");
Files.createDirectory(configDir);
Files.write(configDir.resolve("custom.yml"), "new config".getBytes(StandardCharsets.UTF_8));
Files.createFile(configDir.resolve("other.yml"));
String pluginZip = createMetaPluginUrl("my_plugins", metaDir);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2());
List<String> configLines = Files.readAllLines(envConfigDir.resolve("custom.yml"), StandardCharsets.UTF_8);
assertEquals(1, configLines.size());
assertEquals("existing config", configLines.get(0));
assertTrue(Files.exists(envConfigDir.resolve("other.yml")));
}
public void testConfigNotDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectories(pluginDir);
Path configDir = pluginDir.resolve("config");
Files.createFile(configDir);
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
}
public void testConfigContainsDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path dirInConfigDir = pluginDir.resolve("config").resolve("foo");
Files.createDirectories(dirInConfigDir);
Files.createFile(dirInConfigDir.resolve("myconfig.yml"));
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin"));
assertInstallCleaned(env.v2());
}
public void testMissingDescriptor() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
Files.createFile(pluginDir.resolve("fake.yml"));
String pluginZip = writeZip(pluginDir, null).toUri().toURL().toString();
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(NoSuchFileException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties"));
assertInstallCleaned(env.v2());
}
public void testContainsIntermediateDirectory() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES));
String pluginZip = writeZip(pluginDir, "elasticsearch").toUri().toURL().toString();
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e.getMessage(), containsString("This plugin was built with an older plugin structure"));
assertInstallCleaned(env.v2());
}
public void testContainsIntermediateDirectoryMeta() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createFile(pluginDir.resolve(MetaPluginInfo.ES_META_PLUGIN_PROPERTIES));
String pluginZip = writeZip(pluginDir, "elasticsearch").toUri().toURL().toString();
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e.getMessage(), containsString("This plugin was built with an older plugin structure"));
assertInstallCleaned(env.v2());
}
public void testZipRelativeOutsideEntryName() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path zip = createTempDir().resolve("broken.zip");
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
stream.putNextEntry(new ZipEntry("../blah"));
}
String pluginZip = zip.toUri().toURL().toString();
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory"));
assertInstallCleaned(env.v2());
}
public void testOfficialPluginsHelpSorted() throws Exception {
MockTerminal terminal = new MockTerminal();
new InstallPluginCommand() {
@Override
protected boolean addShutdownHook() {
return false;
}
}.main(new String[] { "--help" }, terminal);
try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) {
String line = reader.readLine();
// first find the beginning of our list of official plugins
while (line.endsWith("may be installed by name:") == false) {
line = reader.readLine();
}
// now check each line compares greater than the last, until we reach an empty line
String prev = reader.readLine();
line = reader.readLine();
while (line != null && line.trim().isEmpty() == false) {
assertTrue(prev + " < " + line, prev.compareTo(line) < 0);
prev = line;
line = reader.readLine();
}
}
}
public void testOfficialPluginsIncludesXpack() throws Exception {
MockTerminal terminal = new MockTerminal();
new InstallPluginCommand() {
@Override
protected boolean addShutdownHook() {
return false;
}
}.main(new String[] { "--help" }, terminal);
assertTrue(terminal.getOutput(), terminal.getOutput().contains("x-pack"));
}
public void testInstallMisspelledOfficialPlugins() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
UserException e = expectThrows(UserException.class, () -> installPlugin("xpack", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin xpack, did you mean [x-pack]?"));
e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin analysis-smartnc, did you mean [analysis-smartcn]?"));
e = expectThrows(UserException.class, () -> installPlugin("repository", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin repository, did you mean any of [repository-s3, repository-gcs]?"));
e = expectThrows(UserException.class, () -> installPlugin("unknown_plugin", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin"));
}
public void testBatchFlag() throws Exception {
MockTerminal terminal = new MockTerminal();
installPlugin(terminal, true);
assertThat(terminal.getOutput(), containsString("WARNING: plugin requires additional permissions"));
}
public void testQuietFlagDisabled() throws Exception {
MockTerminal terminal = new MockTerminal();
terminal.setVerbosity(randomFrom(Terminal.Verbosity.NORMAL, Terminal.Verbosity.VERBOSE));
installPlugin(terminal, false);
assertThat(terminal.getOutput(), containsString("100%"));
}
public void testQuietFlagEnabled() throws Exception {
MockTerminal terminal = new MockTerminal();
terminal.setVerbosity(Terminal.Verbosity.SILENT);
installPlugin(terminal, false);
assertThat(terminal.getOutput(), not(containsString("100%")));
}
public void testPluginAlreadyInstalled() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
final UserException e = expectThrows(UserException.class,
() -> installPlugin(pluginZip, env.v1(), randomFrom(skipJarHellCommand, defaultCommand)));
assertThat(
e.getMessage(),
equalTo("plugin directory [" + env.v2().pluginsFile().resolve("fake") + "] already exists; " +
"if you need to update the plugin, uninstall it first using command 'remove fake'"));
}
public void testMetaPluginAlreadyInstalled() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
{
// install fake plugin
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
}
Path pluginDir = createPluginDir(temp);
Files.createDirectory(pluginDir.resolve("fake"));
writePlugin("fake", pluginDir.resolve("fake"));
Files.createDirectory(pluginDir.resolve("other"));
writePlugin("other", pluginDir.resolve("other"));
String metaZip = createMetaPluginUrl("meta", pluginDir);
final UserException e = expectThrows(UserException.class,
() -> installPlugin(metaZip, env.v1(), randomFrom(skipJarHellCommand, defaultCommand)));
assertThat(
e.getMessage(),
equalTo("plugin directory [" + env.v2().pluginsFile().resolve("fake") + "] already exists; " +
"if you need to update the plugin, uninstall it first using command 'remove fake'"));
}
private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
// if batch is enabled, we also want to add a security policy
if (isBatch) {
writePluginSecurityPolicy(pluginDir, "setFactory");
}
String pluginZip = createPlugin("fake", pluginDir).toUri().toURL().toString();
skipJarHellCommand.execute(terminal, pluginZip, isBatch, env.v2());
}
void assertInstallPluginFromUrl(String pluginId, String name, String url, String stagingHash,
String shaExtension, Function<byte[], String> shaCalculator) throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path pluginZip = createPlugin(name, pluginDir);
InstallPluginCommand command = new InstallPluginCommand() {
@Override
Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException {
assertEquals(url, urlString);
Path downloadedPath = tmpDir.resolve("downloaded.zip");
Files.copy(pluginZip, downloadedPath);
return downloadedPath;
}
@Override
URL openUrl(String urlString) throws Exception {
String expectedUrl = url + shaExtension;
if (expectedUrl.equals(urlString)) {
// calc sha an return file URL to it
Path shaFile = temp.apply("shas").resolve("downloaded.zip" + shaExtension);
byte[] zipbytes = Files.readAllBytes(pluginZip);
String checksum = shaCalculator.apply(zipbytes);
Files.write(shaFile, checksum.getBytes(StandardCharsets.UTF_8));
return shaFile.toUri().toURL();
}
return null;
}
@Override
boolean urlExists(Terminal terminal, String urlString) throws IOException {
return urlString.equals(url);
}
@Override
String getStagingHash() {
return stagingHash;
}
@Override
void jarHellCheck(PluginInfo candidateInfo, Path candidate, Path pluginsDir, Path modulesDir) throws Exception {
// no jarhell check
}
};
installPlugin(pluginId, env.v1(), command);
assertPlugin(name, pluginDir, env.v2());
}
public void assertInstallPluginFromUrl(String pluginId, String name, String url, String stagingHash) throws Exception {
MessageDigest digest = MessageDigest.getInstance("SHA-512");
assertInstallPluginFromUrl(pluginId, name, url, stagingHash, ".sha512", checksumAndFilename(digest, url));
}
public void testOfficalPlugin() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null);
}
public void testOfficalPluginStaging() throws Exception {
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Version.CURRENT + ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123");
}
public void testOfficalPlatformPlugin() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Platforms.PLATFORM_NAME +
"-" + Version.CURRENT + ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null);
}
public void testOfficalPlatformPluginStaging() throws Exception {
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Platforms.PLATFORM_NAME + "-"+ Version.CURRENT + ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123");
}
public void testMavenPlugin() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null);
}
public void testMavenPlatformPlugin() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-" + Platforms.PLATFORM_NAME + "-1.0.0.zip";
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null);
}
public void testMavenSha1Backcompat() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
MessageDigest digest = MessageDigest.getInstance("SHA-1");
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, ".sha1", checksum(digest));
assertTrue(terminal.getOutput(), terminal.getOutput().contains("sha512 not found, falling back to sha1"));
}
public void testOfficialShaMissing() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-1");
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, ".sha1", checksum(digest)));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertEquals("Plugin checksum missing: " + url + ".sha512", e.getMessage());
}
public void testMavenShaMissing() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, ".dne", bytes -> null));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertEquals("Plugin checksum missing: " + url + ".sha1", e.getMessage());
}
public void testInvalidShaFileMissingFilename() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, ".sha512", checksum(digest)));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
}
public void testInvalidShaFileMismatchFilename() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
".sha512",
checksumAndString(digest, " repository-s3-" + Version.CURRENT + ".zip")));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertThat(e, hasToString(matches("checksum file at \\[.*\\] is not for this plugin")));
}
public void testInvalidShaFileContainingExtraLine() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
".sha512",
checksumAndString(digest, " analysis-icu-" + Version.CURRENT + ".zip\nfoobar")));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
}
public void testSha512Mismatch() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
".sha512",
bytes -> "foobar analysis-icu-" + Version.CURRENT + ".zip"));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().contains("SHA-512 mismatch, expected foobar"));
}
public void testSha1Mismatch() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
UserException e = expectThrows(UserException.class, () ->
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, ".sha1", bytes -> "foobar"));
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().contains("SHA-1 mismatch, expected foobar"));
}
public void testKeystoreNotRequired() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir, "requires.keystore", "false");
installPlugin(pluginZip, env.v1());
assertFalse(Files.exists(KeyStoreWrapper.keystorePath(env.v2().configFile())));
}
public void testKeystoreRequiredAlreadyExists() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
KeyStoreWrapper keystore = KeyStoreWrapper.create();
keystore.save(env.v2().configFile(), new char[0]);
byte[] expectedBytes = Files.readAllBytes(KeyStoreWrapper.keystorePath(env.v2().configFile()));
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir, "requires.keystore", "true");
installPlugin(pluginZip, env.v1());
byte[] gotBytes = Files.readAllBytes(KeyStoreWrapper.keystorePath(env.v2().configFile()));
assertArrayEquals("Keystore was modified", expectedBytes, gotBytes);
}
public void testKeystoreRequiredCreated() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir, "requires.keystore", "true");
installPlugin(pluginZip, env.v1());
assertTrue(Files.exists(KeyStoreWrapper.keystorePath(env.v2().configFile())));
}
public void testKeystoreRequiredCreatedWithMetaPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir, "requires.keystore", "true");
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
installPlugin(metaZip, env.v1());
assertTrue(Files.exists(KeyStoreWrapper.keystorePath(env.v2().configFile())));
}
private Function<byte[], String> checksum(final MessageDigest digest) {
return checksumAndString(digest, "");
}
private Function<byte[], String> checksumAndFilename(final MessageDigest digest, final String url) throws MalformedURLException {
final String[] segments = URI.create(url).getPath().split("/");
return checksumAndString(digest, " " + segments[segments.length - 1]);
}
private Function<byte[], String> checksumAndString(final MessageDigest digest, final String s) {
return bytes -> MessageDigests.toHexString(digest.digest(bytes)) + s;
}
public void testMetaPluginPolicyConfirmation() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path fake1Dir = metaDir.resolve("fake1");
Files.createDirectory(fake1Dir);
writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory");
writePlugin("fake1", fake1Dir);
Path fake2Dir = metaDir.resolve("fake2");
Files.createDirectory(fake2Dir);
writePluginSecurityPolicy(fake2Dir, "setAccessible", "accessDeclaredMembers");
writePlugin("fake2", fake2Dir);
String pluginZip = createMetaPluginUrl("meta-plugin", metaDir);
// default answer, does not install
terminal.addTextInput("");
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertEquals("installation aborted by user", e.getMessage());
assertThat(terminal.getOutput(), containsString("WARNING: plugin requires additional permissions"));
try (Stream<Path> fileStream = Files.list(env.v2().pluginsFile())) {
assertThat(fileStream.collect(Collectors.toList()), empty());
}
// explicitly do not install
terminal.reset();
terminal.addTextInput("n");
e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertEquals("installation aborted by user", e.getMessage());
assertThat(terminal.getOutput(), containsString("WARNING: plugin requires additional permissions"));
try (Stream<Path> fileStream = Files.list(env.v2().pluginsFile())) {
assertThat(fileStream.collect(Collectors.toList()), empty());
}
// allow installation
terminal.reset();
terminal.addTextInput("y");
installPlugin(pluginZip, env.v1());
assertThat(terminal.getOutput(), containsString("WARNING: plugin requires additional permissions"));
assertMetaPlugin("meta-plugin", "fake1", metaDir, env.v2());
assertMetaPlugin("meta-plugin", "fake2", metaDir, env.v2());
}
}
| |
/*
* The MIT License
*
* Copyright 2015 user.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.fakekoji.core;
import hudson.plugins.scm.koji.model.Build;
import hudson.plugins.scm.koji.model.BuildProvider;
import hudson.plugins.scm.koji.model.RPM;
import org.fakekoji.core.utils.BuildHelper;
import org.fakekoji.core.utils.DirFilter;
import org.fakekoji.jobmanager.ConfigManager;
import org.fakekoji.storage.StorageException;
import org.fakekoji.xmlrpc.server.JavaServerConstants;
import org.fakekoji.xmlrpc.server.xmlrpcrequestparams.GetBuildDetail;
import org.fakekoji.xmlrpc.server.xmlrpcrequestparams.GetBuildList;
import java.io.File;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* Hart beat of fake koji. This class works over directory, with similar
* structure as koji have, and is trying to deduct informations just on content
* and names. On those deductions it offers its content and even tags.
*/
public class FakeKojiDB {
private static final Logger LOGGER = Logger.getLogger(JavaServerConstants.FAKE_KOJI_LOGGER);
private final String[] projects;
private final List<FakeBuild> builds;
private final AccessibleSettings settings;
public FakeKojiDB(AccessibleSettings settings) {
LOGGER.info("(re)initizing fake koji DB");
this.settings = settings;
File[] projectDirs = settings.getDbFileRoot().listFiles(new DirFilter());
projects = new String[projectDirs.length];
builds = new ArrayList<>();
//read all projects
for (int i = 0; i < projectDirs.length; i++) {
File projectDir = projectDirs[i];
projects[i] = projectDir.getName();
//and all builds in those project
File[] versions = projectDir.listFiles(new DirFilter());
for (File version : versions) {
File[] releases = version.listFiles(new DirFilter());
for (File release : releases) {
FakeBuild b = new FakeBuild(projectDir.getName(), version.getName(), release.getName(), release, settings.getProjectMapping());
builds.add(b);
}
}
}
}
public Integer getPkgId(String requestedProject) {
StringBuilder triedProjects = new StringBuilder();
for (String project : projects) {
triedProjects.append(" ").append(project);
if (project.equals(requestedProject)) {
//is there better str->int function?
//indeed, the file. But number of projects is small.
return project.hashCode();
}
}
LOGGER.info("Unknown project " + requestedProject + ". Tried: " + triedProjects + ".");
return null;
}
public List<Build> getProjectBuilds(Integer projectId, Set<String> fakeTags) {
List<Build> projectBuilds = new ArrayList<>();
for (FakeBuild build : builds) {
if (build.getProjectID() == projectId && isOkForOldApi(build)) {
if (new IsFailedBuild(build.getDir()).reCheck().getLastResult()) {
LOGGER.info("Removing build " + build.toString() + " from result. Contains FAILED records");
continue;
}
if (fakeTags == null) {
projectBuilds.add(build.toBuild());
} else {
projectBuilds.add(build.toBuild(fakeTags));
}
}
}
return projectBuilds;
}
public List<Build> getProjectBuilds(Integer projectId) {
return getProjectBuilds(projectId, null);
}
FakeBuild getBuildById(Integer buildId) {
for (FakeBuild build : builds) {
if (build.getBuildID() == buildId) {
return build;
}
}
return null;
}
/**
* This method is trying to deduct tags from content of build. Purpose is,
* that if the build is for some os only, it should be tagged accodringly.
* On contrary, if it is static, then it should pass anywhere
*
* @param buildId Integer
* @return set of strings
*/
public Set<String> getTags(Integer buildId) {
for (FakeBuild build : builds) {
if (build.getBuildID() == buildId) {
return build.getTags();
}
}
return Collections.emptySet();
}
/*
void checkAll() {
for (String project : projects) {
check(project);
}
}
void check(String string) {
Integer id = getPkgId(string);
if (id == null) {
return;
}
ServerLogger.log(string + " id=" + id);
Object[] chBuilds = getProjectBuildsByProjectIdAsMaps(id);
ServerLogger.log(string + " builds#=" + chBuilds.length);
int bn = 0;
for (Object chBuild : chBuilds) {
bn++;
ServerLogger.log("####### " + bn + " #######");
Map m = (Map) chBuild;
Set keys = m.keySet();
for (Object key : keys) {
Object value = m.get(key);
ServerLogger.log(" " + key + ": " + value);
if (key.equals(Constants.build_id)) {
ServerLogger.log(" tags:");
Object[] tags = getTags((Integer) value);
for (Object tag : tags) {
ServerLogger.log(" " + tag);
}
ServerLogger.log("Artifacts for given build");
FakeBuild bld = getBuildById((Integer) value);
bld.printExpectedArchesForThisBuild();
List<String> arches = bld.getArches();
ServerLogger.log(" archs: " + arches.size());
for (String arch : arches) {
ServerLogger.log(" logs: " + arch);
//list logs
List<File> logs = bld.getLogs(arch);
logs.stream().forEach((log) -> {
ServerLogger.log(log.toString());
});
//list others
ServerLogger.log(" all: " + arch);
List<File> files = bld.getNonLogs(arch);
files.stream().forEach((f) -> {
ServerLogger.log(f.toString());
});
}
}
if (key.equals(Constants.rpms)) {
ServerLogger.log(" mapped: ");
Object[] rpms = (Object[]) value;
for (Object rpm : rpms) {
Map mrpms = (Map) rpm;
Set rks = mrpms.keySet();
rks.stream().forEach((rk) -> {
ServerLogger.log(" " + rk + ": " + mrpms.get(rk));
});
}
}
}
}
ServerLogger.log("Artifacts for given project " + string + " " + id);
List<FakeBuild> blds = getProjectBuildsByProjectId(id);
for (FakeBuild bld : blds) {
List<String> arches = bld.getArches();
for (String arch : arches) {
ServerLogger.log(" arch: " + arch);
//list logs
List<File> logs = bld.getLogs(arch);
logs.stream().forEach((log) -> {
ServerLogger.log(log.toString());
});
//list others
List<File> files = bld.getNonLogs(arch);
files.stream().forEach((f) -> {
ServerLogger.log(f.toString());
});
}
}
}
*/
public List<RPM> getRpms(Integer buildId, List<String> archs) {
FakeBuild build = getBuildById(buildId);
if (build == null) {
return Collections.emptyList();
}
return build.getRpms(archs);
}
// all files other than rpms(tarxz, msi, ...) should be contained here
public List<String> getArchives(Object get, Object get0) {
return Collections.emptyList();
}
//n,v,r,
//*.tarxz else oldApi
public List<Build> getBuildList(GetBuildList params) {
final BuildHelper buildHelper;
try {
final String hostname = InetAddress.getLocalHost().getHostName();
final BuildProvider thisBuildProvider = new BuildProvider(
hostname + ':' + settings.getXmlRpcPort(),
hostname + ':' + settings.getFileDownloadPort()
);
buildHelper = BuildHelper.create(
ConfigManager.create(settings.getConfigRoot().getAbsolutePath()),
params,
settings.getDbFileRoot(),
thisBuildProvider
);
} catch (StorageException | UnknownHostException e) {
LOGGER.severe(e.getMessage());
return Collections.emptyList();
}
return builds.stream()
.map(FakeBuild::getNVR)
.map(buildHelper.getOToolBuildParser())
.filter(Optional::isPresent)
.map(Optional::get)
.filter(buildHelper.getPackageNamePredicate())
.filter(buildHelper.getProjectNamePredicate())
.filter(buildHelper.getBuildPlatformPredicate())
.map(buildHelper.getBuildParser())
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toList());
}
public static boolean isOkForNewApi(String name) {
return name.endsWith(".tarxz");
}
public static boolean isOkForOldApi(String name) {
return name.endsWith(".rpm") ||
name.endsWith(".msi") ||
name.endsWith(".zip");
}
private boolean isOkForOldApi(FakeBuild b) {
List<File> files = b.getNonLogs();
for (File file : files) {
if (isOkForOldApi(file.getName())) {
return true;
}
}
return false;
}
public Build getBuildDetail(GetBuildDetail i) {
File dir = new File(settings.getDbFileRoot().getAbsolutePath() + "/" +
i.n + "/" + i.v + "/" + i.r);
FakeBuild fb = new FakeBuild(i.n, i.v, i.r, dir, settings.getProjectMapping());
return fb.toBuild(new HashSet<>());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.service;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.matches;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertSame;
import static org.testng.AssertJUnit.assertTrue;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import java.lang.reflect.Field;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.function.Supplier;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import org.apache.bookkeeper.common.util.OrderedExecutor;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import org.apache.bookkeeper.mledger.AsyncCallbacks.AddEntryCallback;
import org.apache.bookkeeper.mledger.AsyncCallbacks.DeleteCursorCallback;
import org.apache.bookkeeper.mledger.AsyncCallbacks.DeleteLedgerCallback;
import org.apache.bookkeeper.mledger.AsyncCallbacks.OpenCursorCallback;
import org.apache.bookkeeper.mledger.AsyncCallbacks.OpenLedgerCallback;
import org.apache.bookkeeper.mledger.ManagedCursor;
import org.apache.bookkeeper.mledger.ManagedLedger;
import org.apache.bookkeeper.mledger.ManagedLedgerConfig;
import org.apache.bookkeeper.mledger.ManagedLedgerException;
import org.apache.bookkeeper.mledger.ManagedLedgerFactory;
import org.apache.bookkeeper.mledger.impl.ManagedCursorImpl;
import org.apache.bookkeeper.mledger.impl.PositionImpl;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.broker.ServiceConfiguration;
import org.apache.pulsar.broker.cache.ConfigurationCacheService;
import org.apache.pulsar.broker.cache.LocalZooKeeperCacheService;
import org.apache.pulsar.broker.namespace.NamespaceService;
import org.apache.pulsar.broker.service.persistent.PersistentDispatcherMultipleConsumers;
import org.apache.pulsar.broker.service.persistent.PersistentDispatcherSingleActiveConsumer;
import org.apache.pulsar.broker.service.persistent.PersistentSubscription;
import org.apache.pulsar.broker.service.persistent.PersistentTopic;
import org.apache.pulsar.broker.transaction.TransactionTestBase;
import org.apache.pulsar.common.api.proto.BaseCommand;
import org.apache.pulsar.common.api.proto.CommandActiveConsumerChange;
import org.apache.pulsar.common.api.proto.CommandSubscribe.InitialPosition;
import org.apache.pulsar.common.api.proto.CommandSubscribe.SubType;
import org.apache.pulsar.common.api.proto.ProtocolVersion;
import org.apache.pulsar.common.naming.NamespaceBundle;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.policies.data.Policies;
import org.apache.pulsar.zookeeper.ZooKeeperCache;
import org.apache.pulsar.zookeeper.ZooKeeperDataCache;
import org.apache.zookeeper.ZooKeeper;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "broker")
public class PersistentDispatcherFailoverConsumerTest {
private BrokerService brokerService;
private ManagedLedgerFactory mlFactoryMock;
private ServerCnx serverCnx;
private ServerCnx serverCnxWithOldVersion;
private ManagedLedger ledgerMock;
private ManagedCursor cursorMock;
private ConfigurationCacheService configCacheService;
private ChannelHandlerContext channelCtx;
private LinkedBlockingQueue<CommandActiveConsumerChange> consumerChanges;
private ZooKeeper mockZk;
protected PulsarService pulsar;
final String successTopicName = "persistent://part-perf/global/perf.t1/ptopic";
final String failTopicName = "persistent://part-perf/global/perf.t1/pfailTopic";
private OrderedExecutor executor;
private EventLoopGroup eventLoopGroup;
@BeforeMethod
public void setup() throws Exception {
executor = OrderedExecutor.newBuilder().numThreads(1).name("persistent-dispatcher-failover-test").build();
ServiceConfiguration svcConfig = spy(new ServiceConfiguration());
svcConfig.setBrokerShutdownTimeoutMs(0L);
pulsar = spy(new PulsarService(svcConfig));
doReturn(svcConfig).when(pulsar).getConfiguration();
mlFactoryMock = mock(ManagedLedgerFactory.class);
doReturn(mlFactoryMock).when(pulsar).getManagedLedgerFactory();
doReturn(TransactionTestBase.createMockBookKeeper(executor))
.when(pulsar).getBookKeeperClient();
eventLoopGroup = new NioEventLoopGroup();
ZooKeeperCache cache = mock(ZooKeeperCache.class);
doReturn(30).when(cache).getZkOperationTimeoutSeconds();
doReturn(cache).when(pulsar).getLocalZkCache();
configCacheService = mock(ConfigurationCacheService.class);
@SuppressWarnings("unchecked")
ZooKeeperDataCache<Policies> zkDataCache = mock(ZooKeeperDataCache.class);
LocalZooKeeperCacheService zkCache = mock(LocalZooKeeperCacheService.class);
doReturn(CompletableFuture.completedFuture(Optional.empty())).when(zkDataCache).getAsync(any());
doReturn(zkDataCache).when(zkCache).policiesCache();
doReturn(zkDataCache).when(configCacheService).policiesCache();
doReturn(configCacheService).when(pulsar).getConfigurationCache();
doReturn(zkCache).when(pulsar).getLocalZkCacheService();
brokerService = spy(new BrokerService(pulsar, eventLoopGroup));
doReturn(brokerService).when(pulsar).getBrokerService();
consumerChanges = new LinkedBlockingQueue<>();
this.channelCtx = mock(ChannelHandlerContext.class);
doAnswer(invocationOnMock -> {
ByteBuf buf = invocationOnMock.getArgument(0);
ByteBuf cmdBuf = buf.retainedSlice(4, buf.writerIndex() - 4);
try {
int cmdSize = (int) cmdBuf.readUnsignedInt();
int writerIndex = cmdBuf.writerIndex();
BaseCommand cmd = new BaseCommand();
cmd.parseFrom(cmdBuf, cmdSize);
if (cmd.hasActiveConsumerChange()) {
consumerChanges.put(cmd.getActiveConsumerChange());
}
} finally {
cmdBuf.release();
}
return null;
}).when(channelCtx).writeAndFlush(any(), any());
serverCnx = spy(new ServerCnx(pulsar));
doReturn(true).when(serverCnx).isActive();
doReturn(true).when(serverCnx).isWritable();
doReturn(new InetSocketAddress("localhost", 1234)).when(serverCnx).clientAddress();
when(serverCnx.getRemoteEndpointProtocolVersion()).thenReturn(ProtocolVersion.v12.getValue());
when(serverCnx.ctx()).thenReturn(channelCtx);
doReturn(new PulsarCommandSenderImpl(null, serverCnx))
.when(serverCnx).getCommandSender();
serverCnxWithOldVersion = spy(new ServerCnx(pulsar));
doReturn(true).when(serverCnxWithOldVersion).isActive();
doReturn(true).when(serverCnxWithOldVersion).isWritable();
doReturn(new InetSocketAddress("localhost", 1234))
.when(serverCnxWithOldVersion).clientAddress();
when(serverCnxWithOldVersion.getRemoteEndpointProtocolVersion())
.thenReturn(ProtocolVersion.v11.getValue());
when(serverCnxWithOldVersion.ctx()).thenReturn(channelCtx);
doReturn(new PulsarCommandSenderImpl(null, serverCnxWithOldVersion))
.when(serverCnxWithOldVersion).getCommandSender();
NamespaceService nsSvc = mock(NamespaceService.class);
doReturn(nsSvc).when(pulsar).getNamespaceService();
doReturn(true).when(nsSvc).isServiceUnitOwned(any(NamespaceBundle.class));
doReturn(true).when(nsSvc).isServiceUnitActive(any(TopicName.class));
doReturn(CompletableFuture.completedFuture(true)).when(nsSvc).checkTopicOwnership(any(TopicName.class));
setupMLAsyncCallbackMocks();
}
@AfterMethod(alwaysRun = true)
public void shutdown() throws Exception {
if (brokerService != null) {
brokerService.close();
brokerService = null;
}
if (pulsar != null) {
pulsar.close();
pulsar = null;
}
executor.shutdown();
eventLoopGroup.shutdownGracefully().get();
}
void setupMLAsyncCallbackMocks() {
ledgerMock = mock(ManagedLedger.class);
cursorMock = mock(ManagedCursorImpl.class);
doReturn(new ArrayList<Object>()).when(ledgerMock).getCursors();
doReturn("mockCursor").when(cursorMock).getName();
// call openLedgerComplete with ledgerMock on ML factory asyncOpen
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
((OpenLedgerCallback) invocationOnMock.getArguments()[2]).openLedgerComplete(ledgerMock, null);
return null;
}
}).when(mlFactoryMock).asyncOpen(matches(".*success.*"), any(ManagedLedgerConfig.class),
any(OpenLedgerCallback.class), any(Supplier.class), any());
// call openLedgerFailed on ML factory asyncOpen
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
((OpenLedgerCallback) invocationOnMock.getArguments()[2])
.openLedgerFailed(new ManagedLedgerException("Managed ledger failure"), null);
return null;
}
}).when(mlFactoryMock).asyncOpen(matches(".*fail.*"), any(ManagedLedgerConfig.class),
any(OpenLedgerCallback.class), any(Supplier.class), any());
// call addComplete on ledger asyncAddEntry
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
((AddEntryCallback) invocationOnMock.getArguments()[1]).addComplete(
new PositionImpl(1, 1), null, null);
return null;
}
}).when(ledgerMock).asyncAddEntry(any(byte[].class), any(AddEntryCallback.class), any());
// call openCursorComplete on cursor asyncOpen
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
((OpenCursorCallback) invocationOnMock.getArguments()[2]).openCursorComplete(cursorMock, null);
return null;
}
}).when(ledgerMock).asyncOpenCursor(matches(".*success.*"), any(InitialPosition.class), any(OpenCursorCallback.class), any());
// call deleteLedgerComplete on ledger asyncDelete
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
((DeleteLedgerCallback) invocationOnMock.getArguments()[0]).deleteLedgerComplete(null);
return null;
}
}).when(ledgerMock).asyncDelete(any(DeleteLedgerCallback.class), any());
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
((DeleteCursorCallback) invocationOnMock.getArguments()[1]).deleteCursorComplete(null);
return null;
}
}).when(ledgerMock).asyncDeleteCursor(matches(".*success.*"), any(DeleteCursorCallback.class), any());
}
private void verifyActiveConsumerChange(CommandActiveConsumerChange change,
long consumerId,
boolean isActive) {
assertEquals(consumerId, change.getConsumerId());
assertEquals(isActive, change.isIsActive());
}
@Test
public void testConsumerGroupChangesWithOldNewConsumers() throws Exception {
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false);
int partitionIndex = 0;
PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock,
SubType.Failover, partitionIndex, topic, sub);
// 1. Verify no consumers connected
assertFalse(pdfc.isConsumerConnected());
// 2. Add old consumer
Consumer consumer1 = new Consumer(sub, SubType.Exclusive, topic.getName(), 1 /* consumer id */, 0,
"Cons1"/* consumer name */, 50000, serverCnxWithOldVersion, "myrole-1", Collections.emptyMap(), false, InitialPosition.Latest, null);
pdfc.addConsumer(consumer1);
List<Consumer> consumers = pdfc.getConsumers();
assertSame(consumers.get(0).consumerName(), consumer1.consumerName());
assertEquals(1, consumers.size());
assertNull(consumerChanges.poll());
verify(channelCtx, times(0)).write(any());
// 3. Add new consumer
Consumer consumer2 = new Consumer(sub, SubType.Exclusive, topic.getName(), 2 /* consumer id */, 0,
"Cons2"/* consumer name */, 50000, serverCnx, "myrole-1", Collections.emptyMap(), false, InitialPosition.Latest, null);
pdfc.addConsumer(consumer2);
consumers = pdfc.getConsumers();
assertSame(consumers.get(0).consumerName(), consumer1.consumerName());
assertEquals(2, consumers.size());
CommandActiveConsumerChange change = consumerChanges.take();
verifyActiveConsumerChange(change, 2, false);
verify(channelCtx, times(1)).writeAndFlush(any(), any());
}
@Test
public void testAddRemoveConsumer() throws Exception {
log.info("--- Starting PersistentDispatcherFailoverConsumerTest::testAddConsumer ---");
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false);
int partitionIndex = 4;
PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock,
SubType.Failover, partitionIndex, topic, sub);
// 1. Verify no consumers connected
assertFalse(pdfc.isConsumerConnected());
// 2. Add consumer
Consumer consumer1 = spy(new Consumer(sub, SubType.Exclusive, topic.getName(), 1 /* consumer id */, 0,
"Cons1"/* consumer name */, 50000, serverCnx, "myrole-1", Collections.emptyMap(),
false /* read compacted */, InitialPosition.Latest, null));
pdfc.addConsumer(consumer1);
List<Consumer> consumers = pdfc.getConsumers();
assertSame(consumers.get(0).consumerName(), consumer1.consumerName());
assertEquals(1, consumers.size());
CommandActiveConsumerChange change = consumerChanges.take();
verifyActiveConsumerChange(change, 1, true);
verify(consumer1, times(1)).notifyActiveConsumerChange(same(consumer1));
// 3. Add again, duplicate allowed
pdfc.addConsumer(consumer1);
consumers = pdfc.getConsumers();
assertSame(consumers.get(0).consumerName(), consumer1.consumerName());
assertEquals(2, consumers.size());
// 4. Verify active consumer
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
// get the notified with who is the leader
change = consumerChanges.take();
verifyActiveConsumerChange(change, 1, true);
verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer1));
// 5. Add another consumer which does not change active consumer
Consumer consumer2 = spy(new Consumer(sub, SubType.Exclusive, topic.getName(), 2 /* consumer id */, 0, "Cons2"/* consumer name */,
50000, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, InitialPosition.Latest, null));
pdfc.addConsumer(consumer2);
consumers = pdfc.getConsumers();
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
assertEquals(3, consumers.size());
// get notified with who is the leader
change = consumerChanges.take();
verifyActiveConsumerChange(change, 2, false);
verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer1));
verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer1));
// 6. Add a consumer which changes active consumer
Consumer consumer0 = spy(new Consumer(sub, SubType.Exclusive, topic.getName(), 0 /* consumer id */, 0,
"Cons0"/* consumer name */, 50000, serverCnx, "myrole-1", Collections.emptyMap(),
false /* read compacted */, InitialPosition.Latest, null));
pdfc.addConsumer(consumer0);
consumers = pdfc.getConsumers();
assertSame(pdfc.getActiveConsumer().consumerName(), consumer0.consumerName());
assertEquals(4, consumers.size());
// all consumers will receive notifications
change = consumerChanges.take();
verifyActiveConsumerChange(change, 0, true);
change = consumerChanges.take();
verifyActiveConsumerChange(change, 1, false);
change = consumerChanges.take();
verifyActiveConsumerChange(change, 1, false);
change = consumerChanges.take();
verifyActiveConsumerChange(change, 2, false);
verify(consumer0, times(1)).notifyActiveConsumerChange(same(consumer0));
verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer1));
verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer0));
verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer1));
verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer0));
// 7. Remove last consumer
pdfc.removeConsumer(consumer2);
consumers = pdfc.getConsumers();
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
assertEquals(3, consumers.size());
// not consumer group changes
assertNull(consumerChanges.poll());
// 8. Verify if we cannot unsubscribe when more than one consumer is connected
assertFalse(pdfc.canUnsubscribe(consumer0));
// 9. Remove active consumer
pdfc.removeConsumer(consumer0);
consumers = pdfc.getConsumers();
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
assertEquals(2, consumers.size());
// the remaining consumers will receive notifications
change = consumerChanges.take();
verifyActiveConsumerChange(change, 1, true);
change = consumerChanges.take();
verifyActiveConsumerChange(change, 1, true);
// 10. Attempt to remove already removed consumer
String cause = "";
try {
pdfc.removeConsumer(consumer0);
} catch (Exception e) {
cause = e.getMessage();
}
assertEquals(cause, "Consumer was not connected");
// 11. Remove active consumer
pdfc.removeConsumer(consumer1);
consumers = pdfc.getConsumers();
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
assertEquals(1, consumers.size());
// not consumer group changes
assertNull(consumerChanges.poll());
// 11. With only one consumer, unsubscribe is allowed
assertTrue(pdfc.canUnsubscribe(consumer1));
}
@Test
public void testAddRemoveConsumerNonPartitionedTopic() throws Exception {
log.info("--- Starting PersistentDispatcherFailoverConsumerTest::testAddConsumer ---");
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false);
// Non partitioned topic.
int partitionIndex = -1;
PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock,
SubType.Failover, partitionIndex, topic, sub);
// 1. Verify no consumers connected
assertFalse(pdfc.isConsumerConnected());
// 2. Add a consumer
Consumer consumer1 = spy(new Consumer(sub, SubType.Failover, topic.getName(), 1 /* consumer id */, 1,
"Cons1"/* consumer name */, 50000, serverCnx, "myrole-1", Collections.emptyMap(),
false /* read compacted */, InitialPosition.Latest, null));
pdfc.addConsumer(consumer1);
List<Consumer> consumers = pdfc.getConsumers();
assertEquals(1, consumers.size());
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
// 3. Add a consumer with same priority level and consumer name is smaller in lexicographic order.
Consumer consumer2 = spy(new Consumer(sub, SubType.Failover, topic.getName(), 2 /* consumer id */, 1,
"Cons2"/* consumer name */, 50000, serverCnx, "myrole-1", Collections.emptyMap(),
false /* read compacted */, InitialPosition.Latest, null));
pdfc.addConsumer(consumer2);
// 4. Verify active consumer doesn't change
consumers = pdfc.getConsumers();
assertEquals(2, consumers.size());
CommandActiveConsumerChange change = consumerChanges.take();
verifyActiveConsumerChange(change, 2, false);
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer1));
// 5. Add another consumer which has higher priority level
Consumer consumer3 = spy(new Consumer(sub, SubType.Failover, topic.getName(), 3 /* consumer id */, 0, "Cons3"/* consumer name */,
50000, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, InitialPosition.Latest, null));
pdfc.addConsumer(consumer3);
consumers = pdfc.getConsumers();
assertEquals(3, consumers.size());
change = consumerChanges.take();
verifyActiveConsumerChange(change, 3, false);
assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName());
verify(consumer3, times(1)).notifyActiveConsumerChange(same(consumer1));
// 7. Remove first consumer and active consumer should change to consumer2 since it's added before consumer3
// though consumer 3 has higher priority level
pdfc.removeConsumer(consumer1);
consumers = pdfc.getConsumers();
assertEquals(2, consumers.size());
change = consumerChanges.take();
verifyActiveConsumerChange(change, 2, true);
assertSame(pdfc.getActiveConsumer().consumerName(), consumer2.consumerName());
verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer2));
verify(consumer3, times(1)).notifyActiveConsumerChange(same(consumer2));
}
@Test
public void testMultipleDispatcherGetNextConsumerWithDifferentPriorityLevel() throws Exception {
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null);
Consumer consumer1 = createConsumer(0, 2, false, 1);
Consumer consumer2 = createConsumer(0, 2, false, 2);
Consumer consumer3 = createConsumer(0, 2, false, 3);
Consumer consumer4 = createConsumer(1, 2, false, 4);
Consumer consumer5 = createConsumer(1, 1, false, 5);
Consumer consumer6 = createConsumer(1, 2, false, 6);
Consumer consumer7 = createConsumer(2, 1, false, 7);
Consumer consumer8 = createConsumer(2, 1, false, 8);
Consumer consumer9 = createConsumer(2, 1, false, 9);
dispatcher.addConsumer(consumer1);
dispatcher.addConsumer(consumer2);
dispatcher.addConsumer(consumer3);
dispatcher.addConsumer(consumer4);
dispatcher.addConsumer(consumer5);
dispatcher.addConsumer(consumer6);
dispatcher.addConsumer(consumer7);
dispatcher.addConsumer(consumer8);
dispatcher.addConsumer(consumer9);
Assert.assertEquals(getNextConsumer(dispatcher), consumer1);
Assert.assertEquals(getNextConsumer(dispatcher), consumer2);
Assert.assertEquals(getNextConsumer(dispatcher), consumer3);
Assert.assertEquals(getNextConsumer(dispatcher), consumer1);
Assert.assertEquals(getNextConsumer(dispatcher), consumer2);
Assert.assertEquals(getNextConsumer(dispatcher), consumer3);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer5);
Assert.assertEquals(getNextConsumer(dispatcher), consumer6);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer6);
Assert.assertEquals(getNextConsumer(dispatcher), consumer7);
Assert.assertEquals(getNextConsumer(dispatcher), consumer8);
// in between add upper priority consumer with more permits
Consumer consumer10 = createConsumer(0, 2, false, 10);
dispatcher.addConsumer(consumer10);
Assert.assertEquals(getNextConsumer(dispatcher), consumer10);
Assert.assertEquals(getNextConsumer(dispatcher), consumer10);
Assert.assertEquals(getNextConsumer(dispatcher), consumer9);
}
@Test
public void testFewBlockedConsumerSamePriority() throws Exception{
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null);
Consumer consumer1 = createConsumer(0, 2, false, 1);
Consumer consumer2 = createConsumer(0, 2, false, 2);
Consumer consumer3 = createConsumer(0, 2, false, 3);
Consumer consumer4 = createConsumer(0, 2, false, 4);
Consumer consumer5 = createConsumer(0, 1, true, 5);
Consumer consumer6 = createConsumer(0, 2, true, 6);
dispatcher.addConsumer(consumer1);
dispatcher.addConsumer(consumer2);
dispatcher.addConsumer(consumer3);
dispatcher.addConsumer(consumer4);
dispatcher.addConsumer(consumer5);
dispatcher.addConsumer(consumer6);
Assert.assertEquals(getNextConsumer(dispatcher), consumer1);
Assert.assertEquals(getNextConsumer(dispatcher), consumer2);
Assert.assertEquals(getNextConsumer(dispatcher), consumer3);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer1);
Assert.assertEquals(getNextConsumer(dispatcher), consumer2);
Assert.assertEquals(getNextConsumer(dispatcher), consumer3);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
assertNull(getNextConsumer(dispatcher));
}
@Test
public void testFewBlockedConsumerDifferentPriority() throws Exception {
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null);
Consumer consumer1 = createConsumer(0, 2, false, 1);
Consumer consumer2 = createConsumer(0, 2, false, 2);
Consumer consumer3 = createConsumer(0, 2, false, 3);
Consumer consumer4 = createConsumer(0, 2, false, 4);
Consumer consumer5 = createConsumer(0, 1, true, 5);
Consumer consumer6 = createConsumer(0, 2, true, 6);
Consumer consumer7 = createConsumer(1, 2, false, 7);
Consumer consumer8 = createConsumer(1, 10, true, 8);
Consumer consumer9 = createConsumer(1, 2, false, 9);
Consumer consumer10 = createConsumer(2, 2, false, 10);
Consumer consumer11 = createConsumer(2, 10, true, 11);
Consumer consumer12 = createConsumer(2, 2, false, 12);
dispatcher.addConsumer(consumer1);
dispatcher.addConsumer(consumer2);
dispatcher.addConsumer(consumer3);
dispatcher.addConsumer(consumer4);
dispatcher.addConsumer(consumer5);
dispatcher.addConsumer(consumer6);
dispatcher.addConsumer(consumer7);
dispatcher.addConsumer(consumer8);
dispatcher.addConsumer(consumer9);
dispatcher.addConsumer(consumer10);
dispatcher.addConsumer(consumer11);
dispatcher.addConsumer(consumer12);
Assert.assertEquals(getNextConsumer(dispatcher), consumer1);
Assert.assertEquals(getNextConsumer(dispatcher), consumer2);
Assert.assertEquals(getNextConsumer(dispatcher), consumer3);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer1);
Assert.assertEquals(getNextConsumer(dispatcher), consumer2);
Assert.assertEquals(getNextConsumer(dispatcher), consumer3);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer7);
Assert.assertEquals(getNextConsumer(dispatcher), consumer9);
Assert.assertEquals(getNextConsumer(dispatcher), consumer7);
Assert.assertEquals(getNextConsumer(dispatcher), consumer9);
Assert.assertEquals(getNextConsumer(dispatcher), consumer10);
Assert.assertEquals(getNextConsumer(dispatcher), consumer12);
// add consumer with lower priority again
Consumer consumer13 = createConsumer(0, 2, false, 13);
Consumer consumer14 = createConsumer(0, 2, true, 14);
dispatcher.addConsumer(consumer13);
dispatcher.addConsumer(consumer14);
Assert.assertEquals(getNextConsumer(dispatcher), consumer13);
Assert.assertEquals(getNextConsumer(dispatcher), consumer13);
Assert.assertEquals(getNextConsumer(dispatcher), consumer10);
Assert.assertEquals(getNextConsumer(dispatcher), consumer12);
assertNull(getNextConsumer(dispatcher));
}
@Test
public void testFewBlockedConsumerDifferentPriority2() throws Exception {
PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, brokerService);
PersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null);
Consumer consumer1 = createConsumer(0, 2, true, 1);
Consumer consumer2 = createConsumer(0, 2, true, 2);
Consumer consumer3 = createConsumer(0, 2, true, 3);
Consumer consumer4 = createConsumer(1, 2, false, 4);
Consumer consumer5 = createConsumer(1, 1, false, 5);
Consumer consumer6 = createConsumer(2, 1, false, 6);
Consumer consumer7 = createConsumer(2, 2, true, 7);
dispatcher.addConsumer(consumer1);
dispatcher.addConsumer(consumer2);
dispatcher.addConsumer(consumer3);
dispatcher.addConsumer(consumer4);
dispatcher.addConsumer(consumer5);
dispatcher.addConsumer(consumer6);
dispatcher.addConsumer(consumer7);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer5);
Assert.assertEquals(getNextConsumer(dispatcher), consumer4);
Assert.assertEquals(getNextConsumer(dispatcher), consumer6);
assertNull(getNextConsumer(dispatcher));
}
@SuppressWarnings("unchecked")
private Consumer getNextConsumer(PersistentDispatcherMultipleConsumers dispatcher) throws Exception {
Consumer consumer = dispatcher.getNextConsumer();
if (consumer != null) {
Field field = Consumer.class.getDeclaredField("MESSAGE_PERMITS_UPDATER");
field.setAccessible(true);
AtomicIntegerFieldUpdater<Consumer> messagePermits = (AtomicIntegerFieldUpdater<Consumer>) field.get(consumer);
messagePermits.decrementAndGet(consumer);
return consumer;
}
return null;
}
private Consumer createConsumer(int priority, int permit, boolean blocked, int id) throws Exception {
Consumer consumer =
new Consumer(null, SubType.Shared, "test-topic", id, priority, ""+id, 5000,
serverCnx, "appId", Collections.emptyMap(), false /* read compacted */, InitialPosition.Latest, null);
try {
consumer.flowPermits(permit);
} catch (Exception e) {
}
// set consumer blocked flag
Field blockField = Consumer.class.getDeclaredField("blockedConsumerOnUnackedMsgs");
blockField.setAccessible(true);
blockField.set(consumer, blocked);
return consumer;
}
private static final Logger log = LoggerFactory.getLogger(PersistentDispatcherFailoverConsumerTest.class);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.utils.collections;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.function.IntFunction;
/**
* This collection is a concurrent append-only list that grows in chunks.<br>
* It's safe to be used by many threads concurrently and has a max capacity of {@link Integer#MAX_VALUE}.
*/
public final class ConcurrentAppendOnlyChunkedList<T> {
private static final class AtomicChunk<T> extends AtomicReferenceArray<T> {
AtomicChunk<T> next = null;
final AtomicChunk<T> prev;
final int index;
AtomicChunk(int index, AtomicChunk<T> prev, int length) {
super(length);
this.index = index;
this.prev = prev;
}
}
private static final AtomicLongFieldUpdater<ConcurrentAppendOnlyChunkedList> LAST_INDEX_UPDATER = AtomicLongFieldUpdater.newUpdater(ConcurrentAppendOnlyChunkedList.class, "lastIndex");
private static final AtomicLongFieldUpdater<ConcurrentAppendOnlyChunkedList> CACHED_LAST_INDEX_UPDATER = AtomicLongFieldUpdater.newUpdater(ConcurrentAppendOnlyChunkedList.class, "cachedLastIndex");
private final int chunkSize;
private final int chunkMask;
private final int chunkSizeLog2;
private AtomicChunk<T> firstBuffer = null;
private AtomicChunk<T> lastBuffer = null;
//it is both the current index of the next element to be claimed and the current size of the collection
//it's using a parity bit to mark the rotation state ie size === lastIndex >> 1
private volatile long lastIndex = 0;
//cached view of lastIndex used to avoid invalidating lastIndex while being updated by the appends
private volatile long cachedLastIndex = 0;
/**
* @throws IllegalArgumentException if {@code chunkSize} is <0 or not a power of 2
*/
public ConcurrentAppendOnlyChunkedList(final int chunkSize) {
if (chunkSize <= 0) {
throw new IllegalArgumentException("chunkSize must be >0");
}
//IMPORTANT: to enable some nice optimizations on / and %, chunk size MUST BE a power of 2
if (Integer.bitCount(chunkSize) != 1) {
throw new IllegalArgumentException("chunkSize must be a power of 2");
}
this.chunkSize = chunkSize;
this.chunkMask = chunkSize - 1;
this.chunkSizeLog2 = Integer.numberOfTrailingZeros(chunkSize);
}
private long getValidLastIndex() {
return this.lastIndex >> 1;
}
/**
* It returns the number of elements currently added.
*/
public int size() {
return (int) getValidLastIndex();
}
/**
* It appends {@code elements} to the collection.
*/
public void addAll(T[] elements) {
for (T e : elements) {
add(e);
}
}
/**
* Returns the element at the specified position in this collection or {@code null} if not found.
*/
public T get(int index) {
if (index < 0) {
return null;
}
//it allow to perform less cache invalidations vs lastIndex if there are bursts of appends
long lastIndex = cachedLastIndex;
if (index >= lastIndex) {
lastIndex = getValidLastIndex();
//it is a element over the current size?
if (index >= lastIndex) {
return null;
}
//publish it for others readers
CACHED_LAST_INDEX_UPDATER.lazySet(this, lastIndex);
}
final AtomicChunk<T> buffer;
final int offset;
if (index >= chunkSize) {
offset = index & chunkMask;
//slow path is moved in a separate method
buffer = getChunkOf(index, lastIndex);
} else {
offset = index;
buffer = firstBuffer;
}
return pollElement(buffer, offset);
}
/**
* Implements a lock-free version of the optimization used on {@link java.util.LinkedList#get(int)} to speed up queries
* ie backward search of a node if needed.
*/
private AtomicChunk<T> getChunkOf(final int index, final long lastIndex) {
final int chunkSizeLog2 = this.chunkSizeLog2;
//fast division by a power of 2
final int chunkIndex = index >> chunkSizeLog2;
//size is never allowed to be > Integer.MAX_VALUE
final int lastChunkIndex = (int) lastIndex >> chunkSizeLog2;
int chunkIndexes = chunkIndex;
AtomicChunk<T> buffer = null;
boolean forward = true;
int distanceFromLastChunkIndex = lastChunkIndex - chunkIndex;
//it's worth to go backward from lastChunkIndex?
//trying first to check against the value we already have: if it won't worth, won't make sense to load the lastBuffer
if (distanceFromLastChunkIndex < chunkIndex) {
final AtomicChunk<T> lastBuffer = this.lastBuffer;
//lastBuffer is a potential moving, always increasing, target ie better to re-check the distance
distanceFromLastChunkIndex = lastBuffer.index - chunkIndex;
if (distanceFromLastChunkIndex < chunkIndex) {
//we're saving some jumps ie is fine to go backward from here
buffer = lastBuffer;
chunkIndexes = distanceFromLastChunkIndex;
forward = false;
}
}
//start from the first buffer only is needed
if (buffer == null) {
buffer = firstBuffer;
}
for (int i = 0; i < chunkIndexes; i++) {
//next chunk is always set if below a read lastIndex value
//previous chunk is final and can be safely read
buffer = forward ? buffer.next : buffer.prev;
}
return buffer;
}
/**
* Appends the specified element to the end of this collection.
*
* @throws NullPointerException if {@code e} is {@code null}
**/
public void add(T e) {
Objects.requireNonNull(e);
while (true) {
final long lastIndex = this.lastIndex;
// lower bit is indicative of appending
if ((lastIndex & 1) == 1) {
continue;
}
final long validLastIndex = lastIndex >> 1;
if (validLastIndex == Integer.MAX_VALUE) {
throw new IllegalStateException("can't add more then " + Integer.MAX_VALUE + " elements");
}
//load acquire the current lastBuffer
final AtomicChunk<T> lastBuffer = this.lastBuffer;
final int offset = (int) (validLastIndex & chunkMask);
//only the first attempt to add an element to a chunk can attempt to resize
if (offset == 0) {
if (addChunkAndElement(lastBuffer, lastIndex, validLastIndex, e)) {
return;
}
} else if (LAST_INDEX_UPDATER.compareAndSet(this, lastIndex, lastIndex + 2)) {
//this.lastBuffer is the correct buffer to append a element: it is guarded by the lastIndex logic
//NOTE: lastIndex is being updated before setting a new value
lastBuffer.lazySet(offset, e);
return;
}
}
}
private boolean addChunkAndElement(AtomicChunk<T> lastBuffer, long lastIndex, long validLastIndex, T element) {
// adding 1 will set the lower bit
if (!LAST_INDEX_UPDATER.compareAndSet(this, lastIndex, lastIndex + 1)) {
return false;
}
final AtomicChunk<T> newChunk;
try {
final int index = (int) (validLastIndex >> chunkSizeLog2);
newChunk = new AtomicChunk<>(index, lastBuffer, chunkSize);
} catch (OutOfMemoryError oom) {
//unblock lastIndex without updating it
LAST_INDEX_UPDATER.lazySet(this, lastIndex);
throw oom;
}
//adding the element to it
newChunk.lazySet(0, element);
//linking it to the old one, if any
if (lastBuffer != null) {
//a plain store is enough, given that lastIndex prevents any reader/writer to access it
lastBuffer.next = newChunk;
} else {
//it's first one
this.firstBuffer = newChunk;
}
//making it the current produced one
this.lastBuffer = newChunk;
//store release any previous write and unblock anyone waiting resizing to finish
//and would clean the lower bit
LAST_INDEX_UPDATER.lazySet(this, lastIndex + 2);
return true;
}
/**
* Returns an array containing all of the elements in this collection in proper
* sequence (from first to last element).<br>
* {@code arrayAllocator} will be used to instantiate the array of the correct size with the right runtime type.
*/
public T[] toArray(IntFunction<T[]> arrayAllocator) {
final long lastIndex = getValidLastIndex();
assert lastIndex <= Integer.MAX_VALUE;
final int size = (int) lastIndex;
final T[] elements = arrayAllocator.apply(size);
//fast division by a power of 2
final int chunkSize = this.chunkSize;
final int chunks = size > chunkSize ? size >> chunkSizeLog2 : 0;
AtomicChunk<T> buffer = firstBuffer;
int elementIndex = 0;
for (int i = 0; i < chunks; i++) {
drain(buffer, elements, elementIndex, chunkSize);
elementIndex += chunkSize;
//the next chunk is always set if we stay below a past size/lastIndex value
buffer = buffer.next;
}
final int remaining = chunks > 0 ? (size & chunkMask) : size;
drain(buffer, elements, elementIndex, remaining);
return elements;
}
//NOTE: lastIndex is being updated BEFORE setting a new value ie on reader side need to spin until a not null value is set
private static <T> T pollElement(AtomicChunk<T> buffer, int i) {
T e;
while ((e = buffer.get(i)) == null) {
}
return e;
}
private static <T> void drain(AtomicChunk<T> buffer, T[] elements, int elementNumber, int length) {
for (int j = 0; j < length; j++) {
final T e = pollElement(buffer, j);
assert e != null;
elements[elementNumber] = e;
elementNumber++;
}
}
}
| |
/*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.handmark.pulltorefresh.library.internal;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.graphics.drawable.AnimationDrawable;
import android.graphics.drawable.Drawable;
import android.text.TextUtils;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.handmark.pulltorefresh.library.ILoadingLayout;
import com.handmark.pulltorefresh.library.PullToRefreshBase.Mode;
import com.handmark.pulltorefresh.library.PullToRefreshBase.Orientation;
import com.handmark.pulltorefresh.library.R;
@SuppressLint("ViewConstructor")
public abstract class LoadingLayout extends FrameLayout implements ILoadingLayout {
static final String LOG_TAG = "PullToRefresh-LoadingLayout";
static final Interpolator ANIMATION_INTERPOLATOR = new LinearInterpolator();
private FrameLayout mInnerLayout;
protected final ImageView mHeaderImage;
protected final ProgressBar mHeaderProgress;
private boolean mUseIntrinsicAnimation;
private final TextView mHeaderText;
private final TextView mSubHeaderText;
protected final Mode mMode;
protected final Orientation mScrollDirection;
private CharSequence mPullLabel;
private CharSequence mRefreshingLabel;
private CharSequence mReleaseLabel;
public LoadingLayout(Context context, final Mode mode, final Orientation scrollDirection, TypedArray attrs) {
super(context);
mMode = mode;
mScrollDirection = scrollDirection;
switch (scrollDirection) {
case HORIZONTAL:
LayoutInflater.from(context).inflate(R.layout.pull_to_refresh_header_horizontal, this);
break;
case VERTICAL:
default:
LayoutInflater.from(context).inflate(R.layout.pull_to_refresh_header_vertical, this);
break;
}
mInnerLayout = (FrameLayout) findViewById(R.id.fl_inner);
mHeaderText = (TextView) mInnerLayout.findViewById(R.id.pull_to_refresh_text);
mHeaderProgress = (ProgressBar) mInnerLayout.findViewById(R.id.pull_to_refresh_progress);
mSubHeaderText = (TextView) mInnerLayout.findViewById(R.id.pull_to_refresh_sub_text);
mHeaderImage = (ImageView) mInnerLayout.findViewById(R.id.pull_to_refresh_image);
FrameLayout.LayoutParams lp = (FrameLayout.LayoutParams) mInnerLayout.getLayoutParams();
switch (mode) {
case PULL_FROM_END:
lp.gravity = scrollDirection == Orientation.VERTICAL ? Gravity.TOP : Gravity.LEFT;
// Load in labels
mPullLabel = context.getString(R.string.pull_to_refresh_from_bottom_pull_label);
mRefreshingLabel = context.getString(R.string.pull_to_refresh_from_bottom_refreshing_label);
mReleaseLabel = context.getString(R.string.pull_to_refresh_from_bottom_release_label);
break;
case PULL_FROM_START:
default:
lp.gravity = scrollDirection == Orientation.VERTICAL ? Gravity.BOTTOM : Gravity.RIGHT;
// Load in labels
mPullLabel = context.getString(R.string.pull_to_refresh_pull_label);
mRefreshingLabel = context.getString(R.string.pull_to_refresh_refreshing_label);
mReleaseLabel = context.getString(R.string.pull_to_refresh_release_label);
break;
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderBackground)) {
Drawable background = attrs.getDrawable(R.styleable.PullToRefresh_ptrHeaderBackground);
if (null != background) {
ViewCompat.setBackground(this, background);
}
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderTextAppearance)) {
TypedValue styleID = new TypedValue();
attrs.getValue(R.styleable.PullToRefresh_ptrHeaderTextAppearance, styleID);
setTextAppearance(styleID.data);
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrSubHeaderTextAppearance)) {
TypedValue styleID = new TypedValue();
attrs.getValue(R.styleable.PullToRefresh_ptrSubHeaderTextAppearance, styleID);
setSubTextAppearance(styleID.data);
}
// Text Color attrs need to be set after TextAppearance attrs
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderTextColor)) {
ColorStateList colors = attrs.getColorStateList(R.styleable.PullToRefresh_ptrHeaderTextColor);
if (null != colors) {
setTextColor(colors);
}
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderSubTextColor)) {
ColorStateList colors = attrs.getColorStateList(R.styleable.PullToRefresh_ptrHeaderSubTextColor);
if (null != colors) {
setSubTextColor(colors);
}
}
// Try and get defined drawable from Attrs
Drawable imageDrawable = null;
if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawable)) {
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawable);
}
// Check Specific Drawable from Attrs, these overrite the generic
// drawable attr above
switch (mode) {
case PULL_FROM_START:
default:
if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableStart)) {
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableStart);
} else if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableTop)) {
Utils.warnDeprecation("ptrDrawableTop", "ptrDrawableStart");
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableTop);
}
break;
case PULL_FROM_END:
if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableEnd)) {
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableEnd);
} else if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableBottom)) {
Utils.warnDeprecation("ptrDrawableBottom", "ptrDrawableEnd");
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableBottom);
}
break;
}
// If we don't have a user defined drawable, load the default
if (null == imageDrawable) {
imageDrawable = context.getResources().getDrawable(getDefaultDrawableResId());
}
// Set Drawable, and save width/height
setLoadingDrawable(imageDrawable);
reset();
}
public final void setHeight(int height) {
ViewGroup.LayoutParams lp = (ViewGroup.LayoutParams) getLayoutParams();
lp.height = height;
requestLayout();
}
public final void setWidth(int width) {
ViewGroup.LayoutParams lp = (ViewGroup.LayoutParams) getLayoutParams();
lp.width = width;
requestLayout();
}
public final int getContentSize() {
switch (mScrollDirection) {
case HORIZONTAL:
return mInnerLayout.getWidth();
case VERTICAL:
default:
return mInnerLayout.getHeight();
}
}
public final void hideAllViews() {
if (View.VISIBLE == mHeaderText.getVisibility()) {
mHeaderText.setVisibility(View.INVISIBLE);
}
if (View.VISIBLE == mHeaderProgress.getVisibility()) {
mHeaderProgress.setVisibility(View.INVISIBLE);
}
if (View.VISIBLE == mHeaderImage.getVisibility()) {
mHeaderImage.setVisibility(View.INVISIBLE);
}
if (View.VISIBLE == mSubHeaderText.getVisibility()) {
mSubHeaderText.setVisibility(View.INVISIBLE);
}
}
public final void onPull(float scaleOfLayout) {
if (!mUseIntrinsicAnimation) {
onPullImpl(scaleOfLayout);
}
}
public final void pullToRefresh() {
if (null != mHeaderText) {
mHeaderText.setText(mPullLabel);
}
// Now call the callback
pullToRefreshImpl();
}
public final void refreshing() {
if (null != mHeaderText) {
mHeaderText.setText(mRefreshingLabel);
}
if (mUseIntrinsicAnimation) {
((AnimationDrawable) mHeaderImage.getDrawable()).start();
} else {
// Now call the callback
refreshingImpl();
}
if (null != mSubHeaderText) {
mSubHeaderText.setVisibility(View.GONE);
}
}
public final void releaseToRefresh() {
if (null != mHeaderText) {
mHeaderText.setText(mReleaseLabel);
}
// Now call the callback
releaseToRefreshImpl();
}
public final void reset() {
if (null != mHeaderText) {
mHeaderText.setText(mPullLabel);
}
mHeaderImage.setVisibility(View.VISIBLE);
if (mUseIntrinsicAnimation) {
((AnimationDrawable) mHeaderImage.getDrawable()).stop();
} else {
// Now call the callback
resetImpl();
}
if (null != mSubHeaderText) {
if (TextUtils.isEmpty(mSubHeaderText.getText())) {
mSubHeaderText.setVisibility(View.GONE);
} else {
mSubHeaderText.setVisibility(View.VISIBLE);
}
}
}
@Override
public void setLastUpdatedLabel(CharSequence label) {
setSubHeaderText(label);
}
public final void setLoadingDrawable(Drawable imageDrawable) {
// Set Drawable
mHeaderImage.setImageDrawable(imageDrawable);
mUseIntrinsicAnimation = (imageDrawable instanceof AnimationDrawable);
// Now call the callback
onLoadingDrawableSet(imageDrawable);
}
public void setPullLabel(CharSequence pullLabel) {
mPullLabel = pullLabel;
}
public void setRefreshingLabel(CharSequence refreshingLabel) {
mRefreshingLabel = refreshingLabel;
}
public void setReleaseLabel(CharSequence releaseLabel) {
mReleaseLabel = releaseLabel;
}
@Override
public void setTextTypeface(Typeface tf) {
mHeaderText.setTypeface(tf);
}
public final void showInvisibleViews() {
if (View.INVISIBLE == mHeaderText.getVisibility()) {
mHeaderText.setVisibility(View.VISIBLE);
}
if (View.INVISIBLE == mHeaderProgress.getVisibility()) {
mHeaderProgress.setVisibility(View.VISIBLE);
}
if (View.INVISIBLE == mHeaderImage.getVisibility()) {
mHeaderImage.setVisibility(View.VISIBLE);
}
if (View.INVISIBLE == mSubHeaderText.getVisibility()) {
mSubHeaderText.setVisibility(View.VISIBLE);
}
}
/**
* Callbacks for derivative Layouts
*/
protected abstract int getDefaultDrawableResId();
protected abstract void onLoadingDrawableSet(Drawable imageDrawable);
protected abstract void onPullImpl(float scaleOfLayout);
protected abstract void pullToRefreshImpl();
protected abstract void refreshingImpl();
protected abstract void releaseToRefreshImpl();
protected abstract void resetImpl();
private void setSubHeaderText(CharSequence label) {
if (null != mSubHeaderText) {
if (TextUtils.isEmpty(label)) {
mSubHeaderText.setVisibility(View.GONE);
} else {
mSubHeaderText.setText(label);
mSubHeaderText.setVisibility(View.VISIBLE);
}
}
}
private void setSubTextAppearance(int value) {
if (null != mSubHeaderText) {
mSubHeaderText.setTextAppearance(getContext(), value);
}
}
private void setSubTextColor(ColorStateList color) {
if (null != mSubHeaderText) {
mSubHeaderText.setTextColor(color);
}
}
private void setTextAppearance(int value) {
if (null != mHeaderText) {
mHeaderText.setTextAppearance(getContext(), value);
}
if (null != mSubHeaderText) {
mSubHeaderText.setTextAppearance(getContext(), value);
}
}
private void setTextColor(ColorStateList color) {
if (null != mHeaderText) {
mHeaderText.setTextColor(color);
}
if (null != mSubHeaderText) {
mSubHeaderText.setTextColor(color);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.util.concurrent.Futures;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cache.*;
import org.apache.cassandra.cache.AutoSavingCache.CacheSerializer;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.context.CounterContext;
import org.apache.cassandra.db.filter.*;
import org.apache.cassandra.db.lifecycle.SSTableSet;
import org.apache.cassandra.db.partitions.CachedBTreePartition;
import org.apache.cassandra.db.partitions.CachedPartition;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.DataInputPlus;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.schema.TableMetadata;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.Pair;
public class CacheService implements CacheServiceMBean
{
private static final Logger logger = LoggerFactory.getLogger(CacheService.class);
public static final String MBEAN_NAME = "org.apache.cassandra.db:type=Caches";
public enum CacheType
{
KEY_CACHE("KeyCache"),
ROW_CACHE("RowCache"),
COUNTER_CACHE("CounterCache");
private final String name;
CacheType(String typeName)
{
name = typeName;
}
public String toString()
{
return name;
}
}
public final static CacheService instance = new CacheService();
public final AutoSavingCache<KeyCacheKey, RowIndexEntry> keyCache;
public final AutoSavingCache<RowCacheKey, IRowCacheEntry> rowCache;
public final AutoSavingCache<CounterCacheKey, ClockAndCount> counterCache;
private CacheService()
{
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(this, new ObjectName(MBEAN_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
keyCache = initKeyCache();
rowCache = initRowCache();
counterCache = initCounterCache();
}
/**
* @return auto saving cache object
*/
private AutoSavingCache<KeyCacheKey, RowIndexEntry> initKeyCache()
{
logger.info("Initializing key cache with capacity of {} MBs.", DatabaseDescriptor.getKeyCacheSizeInMB());
long keyCacheInMemoryCapacity = DatabaseDescriptor.getKeyCacheSizeInMB() * 1024 * 1024;
// as values are constant size we can use singleton weigher
// where 48 = 40 bytes (average size of the key) + 8 bytes (size of value)
ICache<KeyCacheKey, RowIndexEntry> kc;
kc = CaffeineCache.create(keyCacheInMemoryCapacity);
AutoSavingCache<KeyCacheKey, RowIndexEntry> keyCache = new AutoSavingCache<>(kc, CacheType.KEY_CACHE, new KeyCacheSerializer());
int keyCacheKeysToSave = DatabaseDescriptor.getKeyCacheKeysToSave();
keyCache.scheduleSaving(DatabaseDescriptor.getKeyCacheSavePeriod(), keyCacheKeysToSave);
return keyCache;
}
/**
* @return initialized row cache
*/
private AutoSavingCache<RowCacheKey, IRowCacheEntry> initRowCache()
{
logger.info("Initializing row cache with capacity of {} MBs", DatabaseDescriptor.getRowCacheSizeInMB());
CacheProvider<RowCacheKey, IRowCacheEntry> cacheProvider;
String cacheProviderClassName = DatabaseDescriptor.getRowCacheSizeInMB() > 0
? DatabaseDescriptor.getRowCacheClassName() : "org.apache.cassandra.cache.NopCacheProvider";
try
{
Class<CacheProvider<RowCacheKey, IRowCacheEntry>> cacheProviderClass =
(Class<CacheProvider<RowCacheKey, IRowCacheEntry>>) Class.forName(cacheProviderClassName);
cacheProvider = cacheProviderClass.newInstance();
}
catch (Exception e)
{
throw new RuntimeException("Cannot find configured row cache provider class " + DatabaseDescriptor.getRowCacheClassName());
}
// cache object
ICache<RowCacheKey, IRowCacheEntry> rc = cacheProvider.create();
AutoSavingCache<RowCacheKey, IRowCacheEntry> rowCache = new AutoSavingCache<>(rc, CacheType.ROW_CACHE, new RowCacheSerializer());
int rowCacheKeysToSave = DatabaseDescriptor.getRowCacheKeysToSave();
rowCache.scheduleSaving(DatabaseDescriptor.getRowCacheSavePeriod(), rowCacheKeysToSave);
return rowCache;
}
private AutoSavingCache<CounterCacheKey, ClockAndCount> initCounterCache()
{
logger.info("Initializing counter cache with capacity of {} MBs", DatabaseDescriptor.getCounterCacheSizeInMB());
long capacity = DatabaseDescriptor.getCounterCacheSizeInMB() * 1024 * 1024;
AutoSavingCache<CounterCacheKey, ClockAndCount> cache =
new AutoSavingCache<>(CaffeineCache.create(capacity),
CacheType.COUNTER_CACHE,
new CounterCacheSerializer());
int keysToSave = DatabaseDescriptor.getCounterCacheKeysToSave();
logger.info("Scheduling counter cache save to every {} seconds (going to save {} keys).",
DatabaseDescriptor.getCounterCacheSavePeriod(),
keysToSave == Integer.MAX_VALUE ? "all" : keysToSave);
cache.scheduleSaving(DatabaseDescriptor.getCounterCacheSavePeriod(), keysToSave);
return cache;
}
public int getRowCacheSavePeriodInSeconds()
{
return DatabaseDescriptor.getRowCacheSavePeriod();
}
public void setRowCacheSavePeriodInSeconds(int seconds)
{
if (seconds < 0)
throw new RuntimeException("RowCacheSavePeriodInSeconds must be non-negative.");
DatabaseDescriptor.setRowCacheSavePeriod(seconds);
rowCache.scheduleSaving(seconds, DatabaseDescriptor.getRowCacheKeysToSave());
}
public int getKeyCacheSavePeriodInSeconds()
{
return DatabaseDescriptor.getKeyCacheSavePeriod();
}
public void setKeyCacheSavePeriodInSeconds(int seconds)
{
if (seconds < 0)
throw new RuntimeException("KeyCacheSavePeriodInSeconds must be non-negative.");
DatabaseDescriptor.setKeyCacheSavePeriod(seconds);
keyCache.scheduleSaving(seconds, DatabaseDescriptor.getKeyCacheKeysToSave());
}
public int getCounterCacheSavePeriodInSeconds()
{
return DatabaseDescriptor.getCounterCacheSavePeriod();
}
public void setCounterCacheSavePeriodInSeconds(int seconds)
{
if (seconds < 0)
throw new RuntimeException("CounterCacheSavePeriodInSeconds must be non-negative.");
DatabaseDescriptor.setCounterCacheSavePeriod(seconds);
counterCache.scheduleSaving(seconds, DatabaseDescriptor.getCounterCacheKeysToSave());
}
public int getRowCacheKeysToSave()
{
return DatabaseDescriptor.getRowCacheKeysToSave();
}
public void setRowCacheKeysToSave(int count)
{
if (count < 0)
throw new RuntimeException("RowCacheKeysToSave must be non-negative.");
DatabaseDescriptor.setRowCacheKeysToSave(count);
rowCache.scheduleSaving(getRowCacheSavePeriodInSeconds(), count);
}
public int getKeyCacheKeysToSave()
{
return DatabaseDescriptor.getKeyCacheKeysToSave();
}
public void setKeyCacheKeysToSave(int count)
{
if (count < 0)
throw new RuntimeException("KeyCacheKeysToSave must be non-negative.");
DatabaseDescriptor.setKeyCacheKeysToSave(count);
keyCache.scheduleSaving(getKeyCacheSavePeriodInSeconds(), count);
}
public int getCounterCacheKeysToSave()
{
return DatabaseDescriptor.getCounterCacheKeysToSave();
}
public void setCounterCacheKeysToSave(int count)
{
if (count < 0)
throw new RuntimeException("CounterCacheKeysToSave must be non-negative.");
DatabaseDescriptor.setCounterCacheKeysToSave(count);
counterCache.scheduleSaving(getCounterCacheSavePeriodInSeconds(), count);
}
public void invalidateKeyCache()
{
keyCache.clear();
}
public void invalidateKeyCacheForCf(TableMetadata tableMetadata)
{
Iterator<KeyCacheKey> keyCacheIterator = keyCache.keyIterator();
while (keyCacheIterator.hasNext())
{
KeyCacheKey key = keyCacheIterator.next();
if (key.sameTable(tableMetadata))
keyCacheIterator.remove();
}
}
public void invalidateRowCache()
{
rowCache.clear();
}
public void invalidateRowCacheForCf(TableMetadata tableMetadata)
{
Iterator<RowCacheKey> rowCacheIterator = rowCache.keyIterator();
while (rowCacheIterator.hasNext())
{
RowCacheKey key = rowCacheIterator.next();
if (key.sameTable(tableMetadata))
rowCacheIterator.remove();
}
}
public void invalidateCounterCacheForCf(TableMetadata tableMetadata)
{
Iterator<CounterCacheKey> counterCacheIterator = counterCache.keyIterator();
while (counterCacheIterator.hasNext())
{
CounterCacheKey key = counterCacheIterator.next();
if (key.sameTable(tableMetadata))
counterCacheIterator.remove();
}
}
public void invalidateCounterCache()
{
counterCache.clear();
}
public void setRowCacheCapacityInMB(long capacity)
{
if (capacity < 0)
throw new RuntimeException("capacity should not be negative.");
rowCache.setCapacity(capacity * 1024 * 1024);
}
public void setKeyCacheCapacityInMB(long capacity)
{
if (capacity < 0)
throw new RuntimeException("capacity should not be negative.");
keyCache.setCapacity(capacity * 1024 * 1024);
}
public void setCounterCacheCapacityInMB(long capacity)
{
if (capacity < 0)
throw new RuntimeException("capacity should not be negative.");
counterCache.setCapacity(capacity * 1024 * 1024);
}
public void saveCaches() throws ExecutionException, InterruptedException
{
List<Future<?>> futures = new ArrayList<>(3);
logger.debug("submitting cache saves");
futures.add(keyCache.submitWrite(DatabaseDescriptor.getKeyCacheKeysToSave()));
futures.add(rowCache.submitWrite(DatabaseDescriptor.getRowCacheKeysToSave()));
futures.add(counterCache.submitWrite(DatabaseDescriptor.getCounterCacheKeysToSave()));
FBUtilities.waitOnFutures(futures);
logger.debug("cache saves completed");
}
public static class CounterCacheSerializer implements CacheSerializer<CounterCacheKey, ClockAndCount>
{
public void serialize(CounterCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException
{
assert(cfs.metadata().isCounter());
TableMetadata tableMetadata = cfs.metadata();
tableMetadata.id.serialize(out);
out.writeUTF(tableMetadata.indexName().orElse(""));
key.write(out);
}
public Future<Pair<CounterCacheKey, ClockAndCount>> deserialize(DataInputPlus in, final ColumnFamilyStore cfs) throws IOException
{
//Keyspace and CF name are deserialized by AutoSaving cache and used to fetch the CFS provided as a
//parameter so they aren't deserialized here, even though they are serialized by this serializer
if (cfs == null)
return null;
final CounterCacheKey cacheKey = CounterCacheKey.read(cfs.metadata(), in);
if (!cfs.metadata().isCounter() || !cfs.isCounterCacheEnabled())
return null;
return StageManager.getStage(Stage.READ).submit(new Callable<Pair<CounterCacheKey, ClockAndCount>>()
{
public Pair<CounterCacheKey, ClockAndCount> call() throws Exception
{
ByteBuffer value = cacheKey.readCounterValue(cfs);
return value == null
? null
: Pair.create(cacheKey, CounterContext.instance().getLocalClockAndCount(value));
}
});
}
}
public static class RowCacheSerializer implements CacheSerializer<RowCacheKey, IRowCacheEntry>
{
public void serialize(RowCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException
{
assert(!cfs.isIndex());//Shouldn't have row cache entries for indexes
TableMetadata tableMetadata = cfs.metadata();
tableMetadata.id.serialize(out);
out.writeUTF(tableMetadata.indexName().orElse(""));
ByteBufferUtil.writeWithLength(key.key, out);
}
public Future<Pair<RowCacheKey, IRowCacheEntry>> deserialize(DataInputPlus in, final ColumnFamilyStore cfs) throws IOException
{
//Keyspace and CF name are deserialized by AutoSaving cache and used to fetch the CFS provided as a
//parameter so they aren't deserialized here, even though they are serialized by this serializer
final ByteBuffer buffer = ByteBufferUtil.readWithLength(in);
if (cfs == null || !cfs.isRowCacheEnabled())
return null;
final int rowsToCache = cfs.metadata().params.caching.rowsPerPartitionToCache();
assert(!cfs.isIndex());//Shouldn't have row cache entries for indexes
return StageManager.getStage(Stage.READ).submit(new Callable<Pair<RowCacheKey, IRowCacheEntry>>()
{
public Pair<RowCacheKey, IRowCacheEntry> call() throws Exception
{
DecoratedKey key = cfs.decorateKey(buffer);
int nowInSec = FBUtilities.nowInSeconds();
SinglePartitionReadCommand cmd = SinglePartitionReadCommand.fullPartitionRead(cfs.metadata(), nowInSec, key);
try (ReadExecutionController controller = cmd.executionController(); UnfilteredRowIterator iter = cmd.queryMemtableAndDisk(cfs, controller))
{
CachedPartition toCache = CachedBTreePartition.create(DataLimits.cqlLimits(rowsToCache).filter(iter, nowInSec, true), nowInSec);
return Pair.create(new RowCacheKey(cfs.metadata(), key), toCache);
}
}
});
}
}
public static class KeyCacheSerializer implements CacheSerializer<KeyCacheKey, RowIndexEntry>
{
public void serialize(KeyCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException
{
RowIndexEntry entry = CacheService.instance.keyCache.getInternal(key);
if (entry == null)
return;
TableMetadata tableMetadata = cfs.metadata();
tableMetadata.id.serialize(out);
out.writeUTF(tableMetadata.indexName().orElse(""));
ByteBufferUtil.writeWithLength(key.key, out);
out.writeInt(key.desc.generation);
out.writeBoolean(true);
SerializationHeader header = new SerializationHeader(false, cfs.metadata(), cfs.metadata().regularAndStaticColumns(), EncodingStats.NO_STATS);
key.desc.getFormat().getIndexSerializer(cfs.metadata(), key.desc.version, header).serializeForCache(entry, out);
}
public Future<Pair<KeyCacheKey, RowIndexEntry>> deserialize(DataInputPlus input, ColumnFamilyStore cfs) throws IOException
{
//Keyspace and CF name are deserialized by AutoSaving cache and used to fetch the CFS provided as a
//parameter so they aren't deserialized here, even though they are serialized by this serializer
int keyLength = input.readInt();
if (keyLength > FBUtilities.MAX_UNSIGNED_SHORT)
{
throw new IOException(String.format("Corrupted key cache. Key length of %d is longer than maximum of %d",
keyLength, FBUtilities.MAX_UNSIGNED_SHORT));
}
ByteBuffer key = ByteBufferUtil.read(input, keyLength);
int generation = input.readInt();
input.readBoolean(); // backwards compatibility for "promoted indexes" boolean
SSTableReader reader;
if (cfs == null || !cfs.isKeyCacheEnabled() || (reader = findDesc(generation, cfs.getSSTables(SSTableSet.CANONICAL))) == null)
{
// The sstable doesn't exist anymore, so we can't be sure of the exact version and assume its the current version. The only case where we'll be
// wrong is during upgrade, in which case we fail at deserialization. This is not a huge deal however since 1) this is unlikely enough that
// this won't affect many users (if any) and only once, 2) this doesn't prevent the node from starting and 3) CASSANDRA-10219 shows that this
// part of the code has been broken for a while without anyone noticing (it is, btw, still broken until CASSANDRA-10219 is fixed).
RowIndexEntry.Serializer.skipForCache(input);
return null;
}
RowIndexEntry.IndexSerializer<?> indexSerializer = reader.descriptor.getFormat().getIndexSerializer(reader.metadata(),
reader.descriptor.version,
reader.header);
RowIndexEntry<?> entry = indexSerializer.deserializeForCache(input);
return Futures.immediateFuture(Pair.create(new KeyCacheKey(cfs.metadata(), reader.descriptor, key), entry));
}
private SSTableReader findDesc(int generation, Iterable<SSTableReader> collection)
{
for (SSTableReader sstable : collection)
{
if (sstable.descriptor.generation == generation)
return sstable;
}
return null;
}
}
}
| |
package org.hashids;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Hashids designed for Generating short hashes from numbers (like YouTube and Bitly), obfuscate
* database IDs, use them as forgotten password hashes, invitation codes, store shard numbers
* This is implementation of http://hashids.org v0.3.3 version.
*
* @author fanweixiao <fanweixiao@gmail.com>
* @since 0.3.3
*/
public class Hashids {
private static final String DEFAULT_ALPHABET = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
private String salt = "";
private String alphabet = "";
private String seps = "cfhistuCFHISTU";
private int minHashLength = 0;
private String guards;
public Hashids() {
this("");
}
public Hashids(String salt) {
this(salt, 0);
}
public Hashids(String salt, int minHashLength) {
this(salt, minHashLength, DEFAULT_ALPHABET);
}
public Hashids(String salt, int minHashLength, String alphabet) {
this.salt = salt;
if(minHashLength < 0)
this.minHashLength = 0;
else
this.minHashLength = minHashLength;
this.alphabet = alphabet;
String uniqueAlphabet = "";
for(int i = 0; i < this.alphabet.length(); i++){
if(!uniqueAlphabet.contains("" + this.alphabet.charAt(i))){
uniqueAlphabet += "" + this.alphabet.charAt(i);
}
}
this.alphabet = uniqueAlphabet;
int minAlphabetLength = 16;
if(this.alphabet.length() < minAlphabetLength){
throw new IllegalArgumentException("alphabet must contain at least " + minAlphabetLength + " unique characters");
}
if(this.alphabet.contains(" ")){
throw new IllegalArgumentException("alphabet cannot contains spaces");
}
// seps should contain only characters present in alphabet;
// alphabet should not contains seps
for(int i = 0; i < this.seps.length(); i++){
int j = this.alphabet.indexOf(this.seps.charAt(i));
if(j == -1){
this.seps = this.seps.substring(0, i) + " " + this.seps.substring(i + 1);
} else {
this.alphabet = this.alphabet.substring(0, j) + " " + this.alphabet.substring(j + 1);
}
}
this.alphabet = this.alphabet.replaceAll("\\s+", "");
this.seps = this.seps.replaceAll("\\s+", "");
this.seps = this.consistentShuffle(this.seps, this.salt);
double sepDiv = 3.5;
if((this.seps.equals("")) || ((this.alphabet.length() / this.seps.length()) > sepDiv)){
int seps_len = (int)Math.ceil(this.alphabet.length() / sepDiv);
if(seps_len == 1){
seps_len++;
}
if(seps_len > this.seps.length()){
int diff = seps_len - this.seps.length();
this.seps += this.alphabet.substring(0, diff);
this.alphabet = this.alphabet.substring(diff);
} else {
this.seps = this.seps.substring(0, seps_len);
}
}
this.alphabet = this.consistentShuffle(this.alphabet, this.salt);
// use double to round up
int guardDiv = 12;
int guardCount = (int)Math.ceil((double)this.alphabet.length() / guardDiv);
if(this.alphabet.length() < 3){
this.guards = this.seps.substring(0, guardCount);
this.seps = this.seps.substring(guardCount);
} else {
this.guards = this.alphabet.substring(0, guardCount);
this.alphabet = this.alphabet.substring(guardCount);
}
}
/**
* @deprecated
* should use encode() since v1.0
*/
@Deprecated
@SuppressWarnings("unused")
public String encrypt(long... numbers){
return encode(numbers);
}
/**
* @deprecated
* should use decode() since v1.0
*/
@Deprecated
@SuppressWarnings("unused")
public long[] decrypt(String hash){
return decode(hash);
}
/**
* @deprecated
* should use encodeHex() since v1.0
*/
@Deprecated
@SuppressWarnings("unused")
public String encryptHex(String hexa){
return encodeHex(hexa);
}
/**
* @deprecated
* should use decodeHex() since v1.0
*/
@Deprecated
@SuppressWarnings("unused")
public String decryptHex(String hash){
return decodeHex(hash);
}
/**
* Encrypt numbers to string
*
* @param numbers the numbers to encrypt
* @return the encrypt string
*/
public String encode(long... numbers){
for (long number : numbers) {
if (number > 9007199254740992L) {
throw new IllegalArgumentException("number can not be greater than 9007199254740992L");
}
}
String retval = "";
if(numbers.length == 0) {
return retval;
}
return this._encode(numbers);
}
/**
* Decrypt string to numbers
*
* @param hash the encrypt string
* @return decryped numbers
*/
public long[] decode(String hash){
long[] ret = {};
if(hash.equals(""))
return ret;
return this._decode(hash, this.alphabet);
}
/**
* Encrypt hexa to string
*
* @param hexa the hexa to encrypt
* @return the encrypt string
*/
public String encodeHex(String hexa){
if(!hexa.matches("^[0-9a-fA-F]+$"))
return "";
List<Long> matched = new ArrayList<Long>();
Matcher matcher = Pattern.compile("[\\w\\W]{1,12}").matcher(hexa);
while (matcher.find())
matched.add(Long.parseLong("1" + matcher.group(), 16));
// conversion
long[] result = new long[matched.size()];
for(int i = 0; i < matched.size(); i++) result[i] = matched.get(i);
return this._encode(result);
}
/**
* Decrypt string to numbers
*
* @param hash the encrypt string
* @return decryped numbers
*/
public String decodeHex(String hash){
String result = "";
long[] numbers = this.decode(hash);
for (long number : numbers) {
result += Long.toHexString(number).substring(1);
}
return result;
}
private String _encode(long... numbers){
int numberHashInt = 0;
for(int i = 0; i < numbers.length; i++){
numberHashInt += (numbers[i] % (i+100));
}
String alphabet = this.alphabet;
char ret = alphabet.toCharArray()[numberHashInt % alphabet.length()];
//char lottery = ret;
long num;
int sepsIndex, guardIndex;
String buffer, ret_str = ret + "";
char guard;
for(int i = 0; i < numbers.length; i++){
num = numbers[i];
buffer = ret + this.salt + alphabet;
alphabet = this.consistentShuffle(alphabet, buffer.substring(0, alphabet.length()));
String last = this.hash(num, alphabet);
ret_str += last;
if(i + 1 < numbers.length){
num %= ((int)last.toCharArray()[0] + i);
sepsIndex = (int)(num % this.seps.length());
ret_str += this.seps.toCharArray()[sepsIndex];
}
}
if(ret_str.length() < this.minHashLength){
guardIndex = (numberHashInt + (int)(ret_str.toCharArray()[0])) % this.guards.length();
guard = this.guards.toCharArray()[guardIndex];
ret_str = guard + ret_str;
if(ret_str.length() < this.minHashLength){
guardIndex = (numberHashInt + (int)(ret_str.toCharArray()[2])) % this.guards.length();
guard = this.guards.toCharArray()[guardIndex];
ret_str += guard;
}
}
int halfLen = alphabet.length() / 2;
while(ret_str.length() < this.minHashLength){
alphabet = this.consistentShuffle(alphabet, alphabet);
ret_str = alphabet.substring(halfLen) + ret_str + alphabet.substring(0, halfLen);
int excess = ret_str.length() - this.minHashLength;
if(excess > 0){
int start_pos = excess / 2;
ret_str = ret_str.substring(start_pos, start_pos + this.minHashLength);
}
}
return ret_str;
}
private long[] _decode(String hash, String alphabet){
ArrayList<Long> ret = new ArrayList<Long>();
int i = 0;
String regexp = "[" + this.guards + "]";
String hashBreakdown = hash.replaceAll(regexp, " ");
String[] hashArray = hashBreakdown.split(" ");
if(hashArray.length == 3 || hashArray.length == 2){
i = 1;
}
hashBreakdown = hashArray[i];
char lottery = hashBreakdown.toCharArray()[0];
hashBreakdown = hashBreakdown.substring(1);
hashBreakdown = hashBreakdown.replaceAll("[" + this.seps + "]", " ");
hashArray = hashBreakdown.split(" ");
String subHash, buffer;
for (String aHashArray : hashArray) {
subHash = aHashArray;
buffer = lottery + this.salt + alphabet;
alphabet = this.consistentShuffle(alphabet, buffer.substring(0, alphabet.length()));
ret.add(this.unhash(subHash, alphabet));
}
//transform from List<Long> to long[]
long[] arr = new long[ret.size()];
for(int k = 0; k < arr.length; k++){
arr[k] = ret.get(k);
}
if(!this._encode(arr).equals(hash)){
arr = new long[0];
}
return arr;
}
/* Private methods */
private String consistentShuffle(String alphabet, String salt){
if(salt.length() <= 0)
return alphabet;
char[] arr = salt.toCharArray();
int asc_val, j;
char tmp;
for(int i = alphabet.length() - 1, v = 0, p = 0; i > 0; i--, v++){
v %= salt.length();
asc_val = (int)arr[v];
p += asc_val;
j = (asc_val + v + p) % i;
tmp = alphabet.charAt(j);
alphabet = alphabet.substring(0, j) + alphabet.charAt(i) + alphabet.substring(j + 1);
alphabet = alphabet.substring(0, i) + tmp + alphabet.substring(i + 1);
}
return alphabet;
}
private String hash(long input, String alphabet){
String hash = "";
int alphabetLen = alphabet.length();
char[] arr = alphabet.toCharArray();
do {
hash = arr[(int)(input % alphabetLen)] + hash;
input /= alphabetLen;
} while(input > 0);
return hash;
}
private Long unhash(String input, String alphabet){
long number = 0, pos;
char[] input_arr = input.toCharArray();
for(int i = 0; i < input.length(); i++){
pos = alphabet.indexOf(input_arr[i]);
number += pos * Math.pow(alphabet.length(), input.length() - i - 1);
}
return number;
}
@SuppressWarnings("unused")
public static int checkedCast(long value) {
int result = (int) value;
if (result != value) {
// don't use checkArgument here, to avoid boxing
throw new IllegalArgumentException("Out of range: " + value);
}
return result;
}
/**
* Get version
*
* @return version
*/
@SuppressWarnings("unused")
public String getVersion() {
return "1.0.0";
}
}
| |
package com.cisco.app.dbmigrator.migratorapp.logging.codecs;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.bson.BsonTimestamp;
import org.bson.Document;
import org.bson.types.ObjectId;
import com.cisco.app.dbmigrator.migratorapp.constants.SyncConstants;
import com.cisco.app.dbmigrator.migratorapp.core.event.EventType;
import com.cisco.app.dbmigrator.migratorapp.core.event.MongoToOracleEvent;
import com.cisco.app.dbmigrator.migratorapp.core.event.MongoToOracleSyncEvent;
import com.cisco.app.dbmigrator.migratorapp.core.event.OracleParallelReadInfo;
import com.cisco.app.dbmigrator.migratorapp.core.event.OracleToMongoEvent;
import com.cisco.app.dbmigrator.migratorapp.core.event.OracleToMongoGridFsEvent;
import com.cisco.app.dbmigrator.migratorapp.core.event.OracleToMongoSyncEvent;
import com.cisco.app.dbmigrator.migratorapp.core.event.SyncEvent;
import com.cisco.app.dbmigrator.migratorapp.core.event.OracleToMongoSyncEvent.O2MSyncPollInfo;
import com.cisco.app.dbmigrator.migratorapp.core.map.ColumnAttrMapper;
import com.cisco.app.dbmigrator.migratorapp.core.map.MapType;
import com.cisco.app.dbmigrator.migratorapp.core.map.MongoToOracleMap;
import com.cisco.app.dbmigrator.migratorapp.core.map.OracleToMongoGridFsMap;
import com.cisco.app.dbmigrator.migratorapp.core.map.OracleToMongoMap;
import com.cisco.app.dbmigrator.migratorapp.core.map.SyncMap;
import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoAttribute;
import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoAttributeType;
import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoEntity;
import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoObject;
import com.cisco.app.dbmigrator.migratorapp.core.meta.oracle.NodeGroup;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.JoinType;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.JoinedTable;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.Literal;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.MatchAble;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.OracleColumn;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.OracleTable;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.SqlLiteralFactory;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.sqlcomponents.MatchOperation;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.sqlcomponents.OperationsFactory;
import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.sqlcomponents.SQLFilters;
/**
* Class with methods to create Java object event equivalent to jsonString or
* Document structure
*
* @author pnilayam
*
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class SyncMapAndEventDecoder {
private Map<String, OracleTable> referredTables = new HashMap<String, OracleTable>();
private static final Logger logger = Logger.getLogger(SyncMapAndEventDecoder.class);
private int tableRank=0;
private Literal decodeLiteral(Document document) {
String expressionType = document.getString(SyncAttrs.EXPRESSION_TYPE);
Literal literal = null;
if (expressionType != null && !expressionType.isEmpty()) {
Object literalValue = document.get(SyncAttrs.LITERAL_VALUE);
String literalType = document.getString(SyncAttrs.LITERAL_TYPE);
literal = SqlLiteralFactory.getLiteral(literalValue, literalType);
}
return literal;
}
private MatchAble decodeExpression(Document document) {
MatchAble matchAble = null;
String expressionType = document.getString(SyncAttrs.EXPRESSION_TYPE);
if (expressionType != null && !expressionType.isEmpty()) {
if (SyncAttrs.COLUMN.equalsIgnoreCase(expressionType)) {
Document columDataDoc = (Document) document.get(SyncAttrs.COLUMN_DATA);
matchAble = decodeColumn(columDataDoc);
} else {
matchAble = decodeLiteral(document);
}
}
return matchAble;
}
private MatchOperation decodeMatchOperation(Document document) {
String sqlOperation = document.getString(SyncAttrs.SQL_OPERATION);
MatchAble leftHandExpr = decodeExpression((Document) document.get(SyncAttrs.LEFT_HAND_EXPRESSION));
MatchAble rightHandExpr =null;
if(document.get(SyncAttrs.RIGHT_HAND_EXPRESSION)!=null){
rightHandExpr = decodeExpression((Document) document.get(SyncAttrs.RIGHT_HAND_EXPRESSION));
}
return OperationsFactory.getMatchExpression(leftHandExpr, rightHandExpr, sqlOperation);
}
private SQLFilters decodeFilter(List<Document> filterList) {
logger.debug("decodeFilter called " + filterList);
SQLFilters filters = new SQLFilters();
for (Document doc : filterList) {
String logicalOperator = doc.getString(SyncAttrs.LOGICAL_OPERATOR);
MatchOperation operation = decodeMatchOperation(doc);
filters.addOperation(operation, logicalOperator);
}
return filters;
}
private OracleColumn decodeColumn(Document document) {
logger.debug("Decode called for decodeColumn" + document);
OracleColumn column = new OracleColumn();
column.setColumnName(document.getString(SyncAttrs.COLUMN_NAME));
column.setColumnAlias(document.getString(SyncAttrs.COLUMN_ALIAS));
column.setColumnType(document.getString(SyncAttrs.COLUMN_TYPE));
column.setParentColumn(document.getBoolean(SyncAttrs.IS_PARENT_COLUMN, false));
column.setTableAlias(document.getString(SyncAttrs.TABLE_ALIAS));
column.setNullable(document.getBoolean(SyncAttrs.IS_NULLABLE, false));
OracleTable table = referredTables.get(column.getTableAlias());
if (table != null) {
table.addColumn(column);
}
return column;
}
private OracleTable decodeTable(Document document) {
logger.debug("Decode called for decodeTable" + document);
OracleTable table = new OracleTable();
table.setRank(++tableRank);
table.setTableName(document.getString(SyncAttrs.TABLE_NAME));
table.setTableAlias(document.getString(SyncAttrs.TABLE_ALIAS));
// TODO : use decodeColumn once UI changes are done
List<Object> keyColumns = (List<Object>) document.get(SyncAttrs.KEY_COLUMNS);
if (keyColumns != null && !keyColumns.isEmpty()) {
for (Object keyColumn : keyColumns) {
if (keyColumn instanceof String) {
table.addKeyColumn((String) keyColumn, "VARCHAR2");
} else {
OracleColumn column = decodeColumn((Document) keyColumn);
table.addKeyColumn(column);
}
}
}
// table.setKeyColumns(keyColumns);
referredTables.put(table.getTableAlias(), table);
List<Document> joinedTableDocList = (List<Document>) document.get(SyncAttrs.JOINED_TABLES);
if (joinedTableDocList != null) {
for (Document joinedTableDoc : joinedTableDocList) {
table.addJoinedTable(decodeJoinedTable(joinedTableDoc));
}
}
return table;
}
private JoinedTable decodeJoinedTable(Document document) {
JoinedTable joinedTable = new JoinedTable();
OracleTable oracleTable = new OracleTable();
oracleTable.setTableName(document.getString(SyncAttrs.TABLE_NAME));
oracleTable.setTableAlias(document.getString(SyncAttrs.TABLE_ALIAS));
referredTables.put(document.getString(SyncAttrs.TABLE_ALIAS), oracleTable);
joinedTable.setTable(oracleTable);
joinedTable.setJoinType(JoinType.valueOf(document.getString(SyncAttrs.JOIN_TYPE)));
List<Document> nestedJoinedTableDocList = (List<Document>) document.get(SyncAttrs.JOINED_TABLES);
if (nestedJoinedTableDocList != null) {
for (Document nestedJoinedTableDoc : nestedJoinedTableDocList) {
oracleTable.addJoinedTable(decodeJoinedTable(nestedJoinedTableDoc));
}
}
List<Document> filterList = (List<Document>) document.get(SyncAttrs.FILTERS);
joinedTable.setFilters(decodeFilter(filterList));
return joinedTable;
}
private MongoAttribute decodeMongoAttribute(Document doc) {
logger.debug("Decode called for MongoAttribute" + doc);
MongoAttribute attribute = new MongoAttribute();
attribute.setAttributeName(doc.getString(SyncAttrs.ATTRIBUTE_NAME));
attribute.setAttributeType(MongoAttributeType.valueOf(doc.getString(SyncAttrs.ATTRIBUTE_TYPE)));
attribute.setIdentifier(doc.getBoolean(SyncAttrs.IS_IDENTIFIER, false));
Document mappedOracleEntityDoc = (Document) doc.get(SyncAttrs.COLUMN_DATA);
if (mappedOracleEntityDoc != null && !mappedOracleEntityDoc.isEmpty())
attribute.setMappedOracleColumn(decodeColumn(mappedOracleEntityDoc));
attribute.setDefaultValue(doc.getString(SyncAttrs.DEFAULT_VALUE));
logger.debug("Decode Completed. Decode Object : " + attribute);
return attribute;
}
private MongoObject decodeMongoObject(Document document) {
logger.debug("Decode called for MongoObject" + document);
MongoObject mongoObject = new MongoObject();
mongoObject.setCollectionName(document.getString(SyncAttrs.ATTRIBUTE_NAME));
String collectionType = document.getString(SyncAttrs.ATTRIBUTE_TYPE);
mongoObject.setCollectionType(
collectionType != null ? collectionType : String.valueOf(MongoAttributeType.COLLECTION));
List<Document> sourceTablesDocList = (List<Document>) document.get(SyncAttrs.SOURCE_TABLES);
List<OracleTable> sourceTables = new ArrayList<OracleTable>();
for (Document sourceTableDoc : sourceTablesDocList) {
sourceTables.add(decodeTable(sourceTableDoc));
}
mongoObject.setSourceTables(sourceTables);
List<Document> attributeDocList = (List<Document>) document.get(SyncAttrs.ATTRIBUTES);
MongoEntity entity = null;
for (Document attributeDoc : attributeDocList) {
MongoAttributeType attributeTypeEnum = MongoAttributeType
.valueOf(attributeDoc.getString(SyncAttrs.ATTRIBUTE_TYPE));
if (attributeTypeEnum.equals(MongoAttributeType.AUTO) || attributeTypeEnum.equals(MongoAttributeType.ARRAY)
|| attributeTypeEnum.equals(MongoAttributeType.COLLECTION)) {
entity = decodeMongoObject(attributeDoc);
} else {
entity = decodeMongoAttribute(attributeDoc);
}
mongoObject.addEntity(entity);
}
List<Document> filterDocList = (List<Document>) document.get(SyncAttrs.FILTERS);
if (filterDocList != null && !filterDocList.isEmpty()) {
mongoObject.setFilters(decodeFilter(filterDocList));
}
List<Document> identifiersList = (List<Document>) document.get(SyncAttrs.IDENTIFIERS);
if (identifiersList != null && !identifiersList.isEmpty()) {
for (Document identifier : identifiersList) {
MongoAttribute attr = decodeMongoAttribute(identifier);
attr.setIdentifier(true);
mongoObject.addIdentifierEntity(attr);
}
}
/*
* if (mongoObject.getSourceTables() != null &&
* !mongoObject.getSourceTables().isEmpty()) { SelectQueryBuilder
* queryBuilder = new SelectQueryBuilder(); List<MatchAble> bindvalues =
* new ArrayList<MatchAble>(); String selectQuery =
* queryBuilder.select().from(mongoObject.getSourceTables().get(0))
* .where(mongoObject.getFilters()).getPreparedStatement(bindvalues);
* mongoObject.setSelectQuery(selectQuery);
* mongoObject.setSelectQueryBindValues(bindvalues); }
*/
List<Document> referencedColumns = (List<Document>) document.get(SyncAttrs.REFERENCED_COLUMNS);
if (referencedColumns != null && !referencedColumns.isEmpty()) {
for (Document doc : referencedColumns) {
mongoObject.addReferencedColumns(decodeColumn(doc));
}
}
logger.debug("Decode Completed. Decode Object : " + mongoObject);
return mongoObject;
}
private OracleParallelReadInfo decodeOracleParallelReadInfo(Document document) {
OracleParallelReadInfo parallelReadInfo = new OracleParallelReadInfo();
parallelReadInfo.setNumOfBuckets(document.getInteger(SyncAttrs.NUM_OF_BUCKETS, 0));
parallelReadInfo.setProcessParallel(document.getBoolean(SyncAttrs.PROCESS_PARALLEL, false));
Document columnDataDoc = (Document) document.get(SyncAttrs.COLUMN_DATA);
if (columnDataDoc != null && !columnDataDoc.isEmpty()) {
parallelReadInfo.setRangeColumn(decodeColumn(columnDataDoc));
}
return parallelReadInfo;
}
private OracleToMongoMap decodeOracleToMongoMap(Document document) {
logger.debug("Start of decode method");
OracleToMongoMap map = new OracleToMongoMap();
Document mapObjectDoc = (Document) document.get(SyncAttrs.MAP_OBJECT);
if (mapObjectDoc != null && !mapObjectDoc.isEmpty()) {
map.setMapObject(decodeMongoObject(mapObjectDoc));
}
return map;
}
private ColumnAttrMapper decodeColumnAttrMapper(Document document) {
ColumnAttrMapper mapper = new ColumnAttrMapper();
Document columnData = (Document) document.get(SyncAttrs.COLUMN_DATA);
if (columnData != null) {
mapper.setColumn(decodeColumn(columnData));
}
mapper.setParentColumn(document.getBoolean(SyncAttrs.IS_PARENT_COLUMN, false));
mapper.setSeqGenerated(document.getBoolean(SyncAttrs.IS_SEQ_GENERATED, false));
mapper.setSeqName(document.getString(SyncAttrs.SEQ_NAME));
mapper.setIgnoreList((List<String>) document.get(SyncAttrs.IGNORE_LIST));
Object literalValDoc = document.get(SyncAttrs.LITERAL_VALUE_FOR_COLUMN);
if (literalValDoc != null && !"".equals(literalValDoc)) {
if (literalValDoc instanceof Document) {
mapper.setLiteralValueForColumn(decodeLiteral((Document) literalValDoc));
} else {
Literal literal = SqlLiteralFactory.getLiteral(literalValDoc, mapper.getColumn().getColumnType());
mapper.setLiteralValueForColumn(literal);
}
}
Document mongoAttribute = (Document) document.get(SyncAttrs.ATTRIBUTE);
if (mongoAttribute != null && !mongoAttribute.isEmpty()) {
mapper.setAttribute(decodeMongoAttribute(mongoAttribute));
}
mapper.setParentAttribute(document.getBoolean(SyncAttrs.IS_PARENT_ATTRIBUTE, false));
mapper.setParentAttributeNode(document.getString(SyncAttrs.PARENT_ATTRIBUTE_NODE));
mapper.setChildAttribute(document.getBoolean(SyncAttrs.IS_CHILD_ATTRIBUTE, false));
mapper.setChildAttributeNode(document.getString(SyncAttrs.CHILD_ATTRIBUTE_NODE));
mapper.setReplacementMap((Map<String, String>) document.get(SyncAttrs.REPLACEMENT_MAP));
return mapper;
}
private NodeGroup decodeNodeGroup(Document nodeDoc) {
NodeGroup nodeGroup = new NodeGroup();
nodeGroup.setNodeName(nodeDoc.getString(SyncAttrs.NODE_NAME));
nodeGroup.setReferenceAttributes((List<String>) nodeDoc.get(SyncAttrs.REF_ATTRS));
List<Document> tableListDoc = (List<Document>) nodeDoc.get(SyncAttrs.TABLE_LIST);
if(tableListDoc!=null){
List<OracleTable> tableList = new LinkedList<OracleTable>();
for (Document tableDoc : tableListDoc) {
tableList.add(decodeTable(tableDoc));
}
nodeGroup.setTableList(tableList);
}
List<Document> columnAttrMapperDocList = (List<Document>) nodeDoc.get(SyncAttrs.COLUMN_ATTR_MAP);
if (columnAttrMapperDocList != null && !columnAttrMapperDocList.isEmpty()) {
Map<String, ColumnAttrMapper> mapperList = new HashMap<String, ColumnAttrMapper>();
for (Document columnAttrMapperDoc : columnAttrMapperDocList) {
ColumnAttrMapper mapper = decodeColumnAttrMapper(columnAttrMapperDoc);
mapperList.put(mapper.getColumn().getColumnAlias(), mapper);
}
nodeGroup.setColumnAttrMappers(mapperList);
}
List<Document> childNodeDocList = (List<Document>) nodeDoc.get(SyncAttrs.CHILD_NODES);
if(childNodeDocList!=null && !childNodeDocList.isEmpty()){
List<NodeGroup> childNodeList = new ArrayList<NodeGroup>();
for(Document childNodeDoc : childNodeDocList){
NodeGroup childNode = decodeNodeGroup(childNodeDoc);
childNodeList.add(childNode);
}
nodeGroup.setChildGroups(childNodeList);
}
return nodeGroup;
}
private MongoToOracleMap decodeMongoToOracleMap(Document mapDocument) {
MongoToOracleMap map = new MongoToOracleMap();
map.setCollectionName(mapDocument.getString(SyncAttrs.COLLECTION_NAME));
List<Document> nodeGrpListDoc = (List<Document>) mapDocument.get(SyncAttrs.NODE_TABLE_GROUP);
if (nodeGrpListDoc != null && !nodeGrpListDoc.isEmpty()) {
//Map<String, NodeGroup> tableNodeGrp = new HashMap<String, NodeGroup>();
List<NodeGroup> rootNodeList = new ArrayList<NodeGroup>();
for (Document nodeGrpDoc : nodeGrpListDoc) {
if (nodeGrpDoc != null && !nodeGrpDoc.isEmpty()) {
NodeGroup node = decodeNodeGroup(nodeGrpDoc);
rootNodeList.add(node);
//tableNodeGrp.put(node.getNodeName(), node);
}
}
map.setRootNode(rootNodeList);
}
return map;
}
public SyncMap decodeSyncMap(Document document) {
SyncMap map = null;
String mapTypeStr = document.getString(SyncAttrs.MAP_TYPE);
MapType mapTypeEnum = MapType.valueOf(mapTypeStr);
switch (mapTypeEnum) {
case OrclToMongo:
map = decodeOracleToMongoMap(document);
break;
case MongoToOrcl:
map = decodeMongoToOracleMap(document);
break;
case OrclToMongoGridFs:
map = decodeOracleToMongoGridFsMap(document);
break;
default:
return null;
}
Object _id = document.get(SyncAttrs.ID);
if (_id instanceof String) {// Coming from UI
map.setMapId(new ObjectId((String) _id));
} else if (_id instanceof ObjectId) { // Coming from Db
map.setMapId((ObjectId) _id);
}
map.setMapName(document.getString(SyncAttrs.MAP_NAME));
map.setMapType(MapType.valueOf(mapTypeStr));
map.setCreatedBy(document.getString(SyncAttrs.CREATED_BY));
if (document.getDate(SyncAttrs.CREATED_ON) != null) {
map.setCreatedOn(document.getDate(SyncAttrs.CREATED_ON));
} else {
map.setCreatedOn(new Date());// Map creation
}
map.setApprovedBy(document.getString(SyncAttrs.APPROVED_BY));
map.setApprovedOn(document.getDate(SyncAttrs.APPROVED_ON));
map.setComments(document.getString(SyncAttrs.COMMENTS));
map.setSourceDbName(document.getString(SyncAttrs.SOURCE_DB_NAME));
map.setSourceUserName(document.getString(SyncAttrs.SOURCE_USER_NAME));
map.setTargetDbName(document.getString(SyncAttrs.TARGET_DB_NAME));
map.setTargetUserName(document.getString(SyncAttrs.TARGET_USER_NAME));
return map;
}
// hook to process extra attributes in future
private MongoToOracleEvent decodeMongoToOracleEvent(Document document) {
MongoToOracleEvent event = new MongoToOracleEvent();
event.setCollectionName(document.getString(SyncAttrs.COLLECTION_NAME));
return event;
}
// hook to process extra attributes in future
private MongoToOracleSyncEvent decodeMongoToOracleSyncEvent(Document document) {
MongoToOracleSyncEvent event = new MongoToOracleSyncEvent();
BsonTimestamp lastReadTime = (BsonTimestamp) document.get(SyncAttrs.LAST_READ_TIME);
event.setRestrictedSyncEnabled(document.getBoolean(SyncAttrs.RESTRCTITED_SYNC_ENABLED, false));
if (lastReadTime == null) {
lastReadTime = new BsonTimestamp();
}
event.setLastReadTime(lastReadTime);
return event;
}
private OracleToMongoEvent decodeOracleToMongoEvent(Document document) {
logger.debug("Start of decode method");
OracleToMongoEvent event = new OracleToMongoEvent();
event.setCollectionName(document.getString(SyncAttrs.COLLECTION_NAME));
event.setSaveNulls(document.getBoolean(SyncAttrs.SAVE_NULLS, false));
Document parallelReadInfo = (Document) document.get(SyncAttrs.PARALLEL_PROCESSING_INFO);
if (parallelReadInfo != null) {
event.setParallelReadInfo(decodeOracleParallelReadInfo(parallelReadInfo));
}
List<Document> rangeDoc = (List<Document>) document.get(SyncAttrs.RANGE_FILTER);
if (rangeDoc != null) {
SQLFilters rangeFilter = decodeFilter(rangeDoc);
event.setRangeFilter(rangeFilter);
}
logger.debug("Decode method completed. Decoded document : " + event);
return event;
}
private OracleToMongoSyncEvent decodeOracleToMngSyncEvent(Document document) {
OracleToMongoSyncEvent event = new OracleToMongoSyncEvent();
event.setCollectionName(document.getString(SyncAttrs.COLLECTION_NAME));
event.setSaveNulls(document.getBoolean(SyncAttrs.SAVE_NULLS, false));
List<Document> keyAttributeDocList = (List<Document>) document.get(SyncAttrs.KEY_ATTRIBUTES);
if(keyAttributeDocList!=null){
List<MongoAttribute> keyAttrList = new ArrayList<MongoAttribute>(keyAttributeDocList.size());
for(Document keyAttrDoc : keyAttributeDocList){
keyAttrList.add(decodeMongoAttribute(keyAttrDoc));
}
event.setKeyAttributes(keyAttrList);
}
event.setPollBased(document.getBoolean(SyncAttrs.POLL_BASED, false));
if(event.isPollBased()){
O2MSyncPollInfo pollInfo = new O2MSyncPollInfo();
Document pollInfoDoc = (Document) document.get(SyncAttrs.POLL_INFO);
pollInfo.setInterval(pollInfoDoc.getInteger(SyncAttrs.INTERVAL,1));
Date lastReadTime = pollInfoDoc.getDate(SyncAttrs.LAST_READ_TIME);
if(lastReadTime!=null){
pollInfo.setLastReadTime(lastReadTime);
}else{
pollInfo.setLastReadTime(new Date());
}
pollInfo.setPollingColumn(decodeColumn((Document) pollInfoDoc.get(SyncAttrs.POLLING_COLUMN)));
pollInfo.setTimeUnit(pollInfoDoc.getString(SyncAttrs.TIME_UNIT));
event.setPollInfo(pollInfo);
}
return event;
}
public SyncEvent decodeSyncEvent(Document document) {
SyncEvent event = null;
String eventTypeStr = document.getString(SyncAttrs.EVENT_TYPE);
EventType eventTypeEnum = EventType.valueOf(eventTypeStr);
switch (eventTypeEnum) {
case OrclToMongo:
event = decodeOracleToMongoEvent(document);
break;
case MongoToOrcl:
event = decodeMongoToOracleEvent(document);
break;
case MongoToOrclSync:
event = decodeMongoToOracleSyncEvent(document);
break;
case OrclToMongoSync:
event = decodeOracleToMngSyncEvent(document);
break;
case OrclToMongoGridFs:
event = decodeOracleToMongoGridFsEvent(document);
break;
default:
return null;
}
Object _id = document.get(SyncAttrs.ID);
if (_id instanceof String) {// Coming from UI
event.setEventId(new ObjectId((String) _id));
} else if (_id instanceof ObjectId) { // Coming from Db
event.setEventId((ObjectId) _id);
}
Object mappingId = document.get(SyncAttrs.MAP_ID);
if (mappingId instanceof String) {
event.setMapId(new ObjectId((String) mappingId));
} else if (mappingId instanceof ObjectId) {
event.setMapId((ObjectId) mappingId);
}
event.setEventName(document.getString(SyncAttrs.EVENT_NAME));
event.setComments(document.getString(SyncAttrs.COMMENTS));
event.setBatchSize(document.getInteger(SyncAttrs.BATCH_SIZE, SyncConstants.DEFAULT_BATCH_SIZE));
event.setCreatedBy(document.getString(SyncAttrs.CREATED_BY));
event.setCreatedOn(document.getDate(SyncAttrs.CREATED_ON));
event.setApprovedBy(document.getString(SyncAttrs.APPROVED_BY));
event.setApprovedOn(document.getDate(SyncAttrs.APPROVED_ON));
event.setStatus(document.getString(SyncAttrs.STATUS));
event.setMapName(document.getString(SyncAttrs.MAP_NAME));
event.setRetry(document.getBoolean(SyncAttrs.IS_RETRY, false));
event.setNotifIds(document.getString(SyncAttrs.NOTIF_ALIAS));
event.setEventType(EventType.valueOf(eventTypeStr));
event.setParentEventId(document.getObjectId(SyncAttrs.PARENT_EVENT_ID));
if (event.getParentEventId() == null) {
event.setParentEventId(event.getEventId());
}
return event;
}
public static Document parsefromJson(String jsonString) {
return Document.parse(jsonString);
}
public SyncEvent decodeSyncEvent(String jsonString) {
return decodeSyncEvent(parsefromJson(jsonString));
}
public SyncMap decodeSyncMap(String jsonString) {
return decodeSyncMap(parsefromJson(jsonString));
}
private OracleToMongoGridFsMap decodeOracleToMongoGridFsMap(Document document){
OracleToMongoGridFsMap fsMap = new OracleToMongoGridFsMap();
fsMap.setCollectionName(document.getString(SyncAttrs.ATTRIBUTE_NAME));
if(document.get(SyncAttrs.STREAM_TABLE) !=null){
Document streamTable = (Document) document.get(SyncAttrs.STREAM_TABLE);
fsMap.setStreamTable(decodeTable(streamTable));
}
fsMap.setFileNameColumn(decodeColumn((Document) document.get(SyncAttrs.FILE_NAME_COLUMN)));
fsMap.setInputStreamColumn(decodeColumn((Document) document.get(SyncAttrs.INPUT_STREAM_COLUMN)));
if(document.get(SyncAttrs.META_ATTRIBUTES) !=null){
Document metaDoc = (Document)document.get(SyncAttrs.META_ATTRIBUTES);
List<Document> attributeDocList = (List<Document>) metaDoc.get(SyncAttrs.ATTRIBUTES);
if (attributeDocList != null && !attributeDocList.isEmpty()) {
Map<String, ColumnAttrMapper> mapperList = new HashMap<String, ColumnAttrMapper>();
for (Document columnAttrMapperDoc : attributeDocList) {
ColumnAttrMapper mapper = decodeColumnAttrMapper(columnAttrMapperDoc);
mapperList.put(mapper.getColumn().getColumnAlias(), mapper);
}
fsMap.setMetaAttributes(mapperList);
}
}
List<Document> filterDocList = (List<Document>) document.get(SyncAttrs.FILTERS);
if (filterDocList != null && !filterDocList.isEmpty()) {
fsMap.setFilters(decodeFilter(filterDocList));
}
return fsMap;
}
private OracleToMongoGridFsEvent decodeOracleToMongoGridFsEvent(Document document) {
logger.debug("Start of decode method");
OracleToMongoGridFsEvent event = new OracleToMongoGridFsEvent();
event.setCollectionName(document.getString(SyncAttrs.COLLECTION_NAME));
event.setSaveNulls(document.getBoolean(SyncAttrs.SAVE_NULLS, false));
List<Document> rangeDoc = (List<Document>) document.get(SyncAttrs.RANGE_FILTER);
if (rangeDoc != null) {
SQLFilters rangeFilter = decodeFilter(rangeDoc);
event.setRangeFilter(rangeFilter);
}
logger.debug("Decode method completed. Decoded document : " + event);
return event;
}
}
| |
/*
Egothor Software License version 1.00
Copyright (C) 1997-2004 Leo Galambos.
Copyright (C) 2002-2004 "Egothor developers"
on behalf of the Egothor Project.
All rights reserved.
This software is copyrighted by the "Egothor developers". If this
license applies to a single file or document, the "Egothor developers"
are the people or entities mentioned as copyright holders in that file
or document. If this license applies to the Egothor project as a
whole, the copyright holders are the people or entities mentioned in
the file CREDITS. This file can be found in the same location as this
license in the distribution.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, the list of contributors, this list of conditions, and the
following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, the list of contributors, this list of conditions, and the
disclaimer that follows these conditions in the documentation
and/or other materials provided with the distribution.
3. The name "Egothor" must not be used to endorse or promote products
derived from this software without prior written permission. For
written permission, please contact Leo.G@seznam.cz
4. Products derived from this software may not be called "Egothor",
nor may "Egothor" appear in their name, without prior written
permission from Leo.G@seznam.cz.
In addition, we request that you include in the end-user documentation
provided with the redistribution and/or in the software itself an
acknowledgement equivalent to the following:
"This product includes software developed by the Egothor Project.
http://egothor.sf.net/"
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE EGOTHOR PROJECT OR ITS CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This software consists of voluntary contributions made by many
individuals on behalf of the Egothor Project and was originally
created by Leo Galambos (Leo.G@seznam.cz).
*/
package com.hourglassapps.cpi_ii.stem.stempel.egothor;
/**
* The Diff object generates a patch string.
* <p>
* A patch string is actually a command to a stemmer telling it how to reduce a
* word to its root. For example, to reduce the word teacher to its root teach
* the patch string Db would be generated. This command tells the stemmer to
* delete the last 2 characters from the word teacher to reach the stem (the
* patch commands are applied starting from the last character in order to save
*/
public class Diff {
int sizex = 0;
int sizey = 0;
int net[][];
int way[][];
int INSERT;
int DELETE;
int REPLACE;
int NOOP;
/**
* Constructor for the Diff object.
*/
public Diff() {
this(1, 1, 1, 0);
}
/**
* Constructor for the Diff object
*
* @param ins Description of the Parameter
* @param del Description of the Parameter
* @param rep Description of the Parameter
* @param noop Description of the Parameter
*/
public Diff(int ins, int del, int rep, int noop) {
INSERT = ins;
DELETE = del;
REPLACE = rep;
NOOP = noop;
}
/**
* Apply the given patch string <tt>diff</tt> to the given string <tt>
* dest</tt>.
*
* @param dest Destination string
* @param diff Patch string
*/
public static void apply(StringBuilder dest, CharSequence diff) {
try {
if (diff == null) {
return;
}
int pos = dest.length() - 1;
if (pos < 0) {
return;
}
// orig == ""
for (int i = 0; i < diff.length() / 2; i++) {
char cmd = diff.charAt(2 * i);
char param = diff.charAt(2 * i + 1);
int par_num = (param - 'a' + 1);
switch (cmd) {
case '-':
pos = pos - par_num + 1;
break;
case 'R':
dest.setCharAt(pos, param);
break;
case 'D':
int o = pos;
pos -= par_num - 1;
/*
* delete par_num chars from index pos
*/
// String s = orig.toString();
// s = s.substring( 0, pos ) + s.substring( o + 1 );
// orig = new StringBuffer( s );
dest.delete(pos, o + 1);
break;
case 'I':
dest.insert(pos += 1, param);
break;
}
pos--;
}
} catch (StringIndexOutOfBoundsException x) {
// x.printStackTrace();
} catch (ArrayIndexOutOfBoundsException x) {
// x.printStackTrace();
}
}
/**
* Construct a patch string that transforms a to b.
*
* @param a String 1st string
* @param b String 2nd string
* @return String
*/
public synchronized String exec(String a, String b) {
if (a == null || b == null) {
return null;
}
int x;
int y;
int maxx;
int maxy;
int go[] = new int[4];
final int X = 1;
final int Y = 2;
final int R = 3;
final int D = 0;
/*
* setup memory if needed => processing speed up
*/
maxx = a.length() + 1;
maxy = b.length() + 1;
if ((maxx >= sizex) || (maxy >= sizey)) {
sizex = maxx + 8;
sizey = maxy + 8;
net = new int[sizex][sizey];
way = new int[sizex][sizey];
}
/*
* clear the network
*/
for (x = 0; x < maxx; x++) {
for (y = 0; y < maxy; y++) {
net[x][y] = 0;
}
}
/*
* set known persistent values
*/
for (x = 1; x < maxx; x++) {
net[x][0] = x;
way[x][0] = X;
}
for (y = 1; y < maxy; y++) {
net[0][y] = y;
way[0][y] = Y;
}
for (x = 1; x < maxx; x++) {
for (y = 1; y < maxy; y++) {
go[X] = net[x - 1][y] + DELETE;
// way on x costs 1 unit
go[Y] = net[x][y - 1] + INSERT;
// way on y costs 1 unit
go[R] = net[x - 1][y - 1] + REPLACE;
go[D] = net[x - 1][y - 1]
+ ((a.charAt(x - 1) == b.charAt(y - 1)) ? NOOP : 100);
// diagonal costs 0, when no change
short min = D;
if (go[min] >= go[X]) {
min = X;
}
if (go[min] > go[Y]) {
min = Y;
}
if (go[min] > go[R]) {
min = R;
}
way[x][y] = min;
net[x][y] = (short) go[min];
}
}
// read the patch string
StringBuilder result = new StringBuilder();
final char base = 'a' - 1;
char deletes = base;
char equals = base;
for (x = maxx - 1, y = maxy - 1; x + y != 0;) {
switch (way[x][y]) {
case X:
if (equals != base) {
result.append("-" + (equals));
equals = base;
}
deletes++;
x--;
break;
// delete
case Y:
if (deletes != base) {
result.append("D" + (deletes));
deletes = base;
}
if (equals != base) {
result.append("-" + (equals));
equals = base;
}
result.append('I');
result.append(b.charAt(--y));
break;
// insert
case R:
if (deletes != base) {
result.append("D" + (deletes));
deletes = base;
}
if (equals != base) {
result.append("-" + (equals));
equals = base;
}
result.append('R');
result.append(b.charAt(--y));
x--;
break;
// replace
case D:
if (deletes != base) {
result.append("D" + (deletes));
deletes = base;
}
equals++;
x--;
y--;
break;
// no change
}
}
if (deletes != base) {
result.append("D" + (deletes));
deletes = base;
}
return result.toString();
}
}
| |
package net.sf.jabref.logic.openoffice;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import net.sf.jabref.logic.journals.JournalAbbreviationLoader;
import net.sf.jabref.logic.layout.LayoutFormatterPreferences;
import net.sf.jabref.preferences.JabRefPreferences;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
public class StyleLoaderTest {
private static int numberOfInternalStyles = 2;
private StyleLoader loader;
private OpenOfficePreferences preferences;
private LayoutFormatterPreferences layoutPreferences;
private Charset encoding;
@Before
public void setUp() {
preferences = new OpenOfficePreferences(JabRefPreferences.getInstance());
layoutPreferences = JabRefPreferences.getInstance()
.getLayoutFormatterPreferences(mock(JournalAbbreviationLoader.class));
encoding = JabRefPreferences.getInstance().getDefaultEncoding();
}
@Test(expected = NullPointerException.class)
public void throwNPEWithNullPreferences() {
loader = new StyleLoader(null, layoutPreferences, mock(Charset.class));
fail();
}
@Test(expected = NullPointerException.class)
public void throwNPEWithNullRepository() {
loader = new StyleLoader(mock(OpenOfficePreferences.class),
JabRefPreferences.getInstance().getLayoutFormatterPreferences(null), mock(Charset.class));
fail();
}
@Test(expected = NullPointerException.class)
public void throwNPEWithNullCharset() {
loader = new StyleLoader(mock(OpenOfficePreferences.class), layoutPreferences, null);
fail();
}
@Test
public void testGetStylesWithEmptyExternal() {
preferences.setExternalStyles(Collections.emptyList());
loader = new StyleLoader(preferences, layoutPreferences, encoding);
assertEquals(2, loader.getStyles().size());
}
@Test
public void testAddStyleLeadsToOneMoreStyle() throws URISyntaxException {
preferences.setExternalStyles(Collections.emptyList());
loader = new StyleLoader(preferences, layoutPreferences, encoding);
String filename = Paths.get(StyleLoader.class.getResource(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH).toURI())
.toFile().getPath();
loader.addStyleIfValid(filename);
assertEquals(numberOfInternalStyles + 1, loader.getStyles().size());
}
@Test
public void testAddInvalidStyleLeadsToNoMoreStyle() {
preferences.setExternalStyles(Collections.emptyList());
loader = new StyleLoader(preferences, layoutPreferences, encoding);
int beforeAdding = loader.getStyles().size();
loader.addStyleIfValid("DefinitelyNotAValidFileNameOrWeAreExtremelyUnlucky");
assertEquals(beforeAdding, loader.getStyles().size());
}
@Test
public void testInitalizeWithOneExternalFile() throws URISyntaxException {
String filename = Paths.get(StyleLoader.class.getResource(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH).toURI())
.toFile().getPath();
preferences.setExternalStyles(Collections.singletonList(filename));
loader = new StyleLoader(preferences, layoutPreferences, encoding);
assertEquals(numberOfInternalStyles + 1, loader.getStyles().size());
}
@Test
public void testInitalizeWithIncorrectExternalFile() {
preferences.setExternalStyles(Collections.singletonList("DefinitelyNotAValidFileNameOrWeAreExtremelyUnlucky"));
loader = new StyleLoader(preferences, layoutPreferences, encoding);
assertEquals(numberOfInternalStyles, loader.getStyles().size());
}
@Test
public void testInitalizeWithOneExternalFileRemoveStyle() throws URISyntaxException {
String filename = Paths.get(StyleLoader.class.getResource(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH).toURI())
.toFile().getPath();
preferences.setExternalStyles(Collections.singletonList(filename));
loader = new StyleLoader(preferences, layoutPreferences, encoding);
List<OOBibStyle> toremove = new ArrayList<>();
int beforeRemoving = loader.getStyles().size();
for (OOBibStyle style : loader.getStyles()) {
if (!style.isFromResource()) {
toremove.add(style);
}
}
for (OOBibStyle style : toremove) {
assertTrue(loader.removeStyle(style));
}
assertEquals(beforeRemoving - 1, loader.getStyles().size());
}
@Test
public void testInitalizeWithOneExternalFileRemoveStyleUpdatesPreferences() throws URISyntaxException {
String filename = Paths.get(StyleLoader.class.getResource(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH).toURI())
.toFile().getPath();
preferences.setExternalStyles(Collections.singletonList(filename));
loader = new StyleLoader(preferences, layoutPreferences, encoding);
List<OOBibStyle> toremove = new ArrayList<>();
for (OOBibStyle style : loader.getStyles()) {
if (!style.isFromResource()) {
toremove.add(style);
}
}
for (OOBibStyle style : toremove) {
assertTrue(loader.removeStyle(style));
}
assertTrue(preferences.getExternalStyles().isEmpty());
}
@Test
public void testAddSameStyleTwiceLeadsToOneMoreStyle() throws URISyntaxException {
preferences.setExternalStyles(Collections.emptyList());
loader = new StyleLoader(preferences, layoutPreferences, encoding);
int beforeAdding = loader.getStyles().size();
String filename = Paths.get(StyleLoader.class.getResource(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH).toURI())
.toFile().getPath();
loader.addStyleIfValid(filename);
loader.addStyleIfValid(filename);
assertEquals(beforeAdding + 1, loader.getStyles().size());
}
@Test(expected = NullPointerException.class)
public void testAddNullStyleThrowsNPE() {
loader = new StyleLoader(preferences, layoutPreferences, encoding);
loader.addStyleIfValid(null);
fail();
}
@Test
public void testGetDefaultUsedStyleWhenEmpty() {
preferences.clearCurrentStyle();
loader = new StyleLoader(preferences, layoutPreferences, encoding);
OOBibStyle style = loader.getUsedStyle();
assertTrue(style.isValid());
assertEquals(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH, style.getPath());
assertEquals(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH, preferences.getCurrentStyle());
}
@Test
public void testGetStoredUsedStyle() {
preferences.setCurrentStyle(StyleLoader.DEFAULT_NUMERICAL_STYLE_PATH);
loader = new StyleLoader(preferences, layoutPreferences, encoding);
OOBibStyle style = loader.getUsedStyle();
assertTrue(style.isValid());
assertEquals(StyleLoader.DEFAULT_NUMERICAL_STYLE_PATH, style.getPath());
assertEquals(StyleLoader.DEFAULT_NUMERICAL_STYLE_PATH, preferences.getCurrentStyle());
}
@Test
public void testGtDefaultUsedStyleWhenIncorrect() {
preferences.setCurrentStyle("ljlkjlkjnljnvdlsjniuhwelfhuewfhlkuewhfuwhelu");
loader = new StyleLoader(preferences, layoutPreferences, encoding);
OOBibStyle style = loader.getUsedStyle();
assertTrue(style.isValid());
assertEquals(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH, style.getPath());
assertEquals(StyleLoader.DEFAULT_AUTHORYEAR_STYLE_PATH, preferences.getCurrentStyle());
}
@Test
public void testRemoveInternalStyleReturnsFalseAndDoNotRemove() {
preferences.setExternalStyles(Collections.emptyList());
loader = new StyleLoader(preferences, layoutPreferences, encoding);
List<OOBibStyle> toremove = new ArrayList<>();
for (OOBibStyle style : loader.getStyles()) {
if (style.isFromResource()) {
toremove.add(style);
}
}
assertFalse(loader.removeStyle(toremove.get(0)));
assertEquals(numberOfInternalStyles, loader.getStyles().size());
}
}
| |
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
/*
Verify we can read the pre-2.1 file format, do searches
against it, and add documents to it.
*/
public class TestDeletionPolicy extends LuceneTestCase {
private void verifyCommitOrder(List<? extends IndexCommit> commits) {
if (commits.isEmpty()) {
return;
}
final IndexCommit firstCommit = commits.get(0);
long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName());
assertEquals(last, firstCommit.getGeneration());
for(int i=1;i<commits.size();i++) {
final IndexCommit commit = commits.get(i);
long now = SegmentInfos.generationFromSegmentsFileName(commit.getSegmentsFileName());
assertTrue("SegmentInfos commits are out-of-order", now > last);
assertEquals(now, commit.getGeneration());
last = now;
}
}
class KeepAllDeletionPolicy extends IndexDeletionPolicy {
int numOnInit;
int numOnCommit;
Directory dir;
KeepAllDeletionPolicy(Directory dir) {
this.dir = dir;
}
@Override
public void onInit(List<? extends IndexCommit> commits) throws IOException {
verifyCommitOrder(commits);
numOnInit++;
}
@Override
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
IndexCommit lastCommit = commits.get(commits.size()-1);
DirectoryReader r = DirectoryReader.open(dir);
assertEquals("lastCommit.segmentCount()=" + lastCommit.getSegmentCount() + " vs IndexReader.segmentCount=" + r.leaves().size(), r.leaves().size(), lastCommit.getSegmentCount());
r.close();
verifyCommitOrder(commits);
numOnCommit++;
}
}
/**
* This is useful for adding to a big index when you know
* readers are not using it.
*/
class KeepNoneOnInitDeletionPolicy extends IndexDeletionPolicy {
int numOnInit;
int numOnCommit;
@Override
public void onInit(List<? extends IndexCommit> commits) throws IOException {
verifyCommitOrder(commits);
numOnInit++;
// On init, delete all commit points:
for (final IndexCommit commit : commits) {
commit.delete();
assertTrue(commit.isDeleted());
}
}
@Override
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
verifyCommitOrder(commits);
int size = commits.size();
// Delete all but last one:
for(int i=0;i<size-1;i++) {
((IndexCommit) commits.get(i)).delete();
}
numOnCommit++;
}
}
class KeepLastNDeletionPolicy extends IndexDeletionPolicy {
int numOnInit;
int numOnCommit;
int numToKeep;
int numDelete;
Set<String> seen = new HashSet<>();
public KeepLastNDeletionPolicy(int numToKeep) {
this.numToKeep = numToKeep;
}
@Override
public void onInit(List<? extends IndexCommit> commits) throws IOException {
if (VERBOSE) {
System.out.println("TEST: onInit");
}
verifyCommitOrder(commits);
numOnInit++;
// do no deletions on init
doDeletes(commits, false);
}
@Override
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
if (VERBOSE) {
System.out.println("TEST: onCommit");
}
verifyCommitOrder(commits);
doDeletes(commits, true);
}
private void doDeletes(List<? extends IndexCommit> commits, boolean isCommit) {
// Assert that we really are only called for each new
// commit:
if (isCommit) {
String fileName = ((IndexCommit) commits.get(commits.size()-1)).getSegmentsFileName();
if (seen.contains(fileName)) {
throw new RuntimeException("onCommit was called twice on the same commit point: " + fileName);
}
seen.add(fileName);
numOnCommit++;
}
int size = commits.size();
for(int i=0;i<size-numToKeep;i++) {
((IndexCommit) commits.get(i)).delete();
numDelete++;
}
}
}
static long getCommitTime(IndexCommit commit) throws IOException {
return Long.parseLong(commit.getUserData().get("commitTime"));
}
/*
* Delete a commit only when it has been obsoleted by N
* seconds.
*/
class ExpirationTimeDeletionPolicy extends IndexDeletionPolicy {
Directory dir;
double expirationTimeSeconds;
int numDelete;
public ExpirationTimeDeletionPolicy(Directory dir, double seconds) {
this.dir = dir;
this.expirationTimeSeconds = seconds;
}
@Override
public void onInit(List<? extends IndexCommit> commits) throws IOException {
if (commits.isEmpty()) {
return;
}
verifyCommitOrder(commits);
onCommit(commits);
}
@Override
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
verifyCommitOrder(commits);
IndexCommit lastCommit = commits.get(commits.size()-1);
// Any commit older than expireTime should be deleted:
double expireTime = getCommitTime(lastCommit)/1000.0 - expirationTimeSeconds;
for (final IndexCommit commit : commits) {
double modTime = getCommitTime(commit)/1000.0;
if (commit != lastCommit && modTime < expireTime) {
commit.delete();
numDelete += 1;
}
}
}
}
/*
* Test "by time expiration" deletion policy:
*/
public void testExpirationTimeDeletionPolicy() throws IOException, InterruptedException {
final double SECONDS = 2.0;
Directory dir = newDirectory();
if (dir instanceof MockDirectoryWrapper) {
// test manually deletes files
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
}
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(dir, SECONDS));
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(1.0);
IndexWriter writer = new IndexWriter(dir, conf);
ExpirationTimeDeletionPolicy policy = (ExpirationTimeDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
Map<String,String> commitData = new HashMap<>();
commitData.put("commitTime", String.valueOf(System.currentTimeMillis()));
writer.setCommitData(commitData);
writer.commit();
writer.close();
long lastDeleteTime = 0;
final int targetNumDelete = TestUtil.nextInt(random(), 1, 5);
while (policy.numDelete < targetNumDelete) {
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = System.currentTimeMillis();
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(1.0);
writer = new IndexWriter(dir, conf);
policy = (ExpirationTimeDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
for(int j=0;j<17;j++) {
addDoc(writer);
}
commitData = new HashMap<>();
commitData.put("commitTime", String.valueOf(System.currentTimeMillis()));
writer.setCommitData(commitData);
writer.commit();
writer.close();
Thread.sleep((int) (1000.0*(SECONDS/5.0)));
}
// Then simplistic check: just verify that the
// segments_N's that still exist are in fact within SECONDS
// seconds of the last one's mod time, and, that I can
// open a reader on each:
long gen = SegmentInfos.getLastCommitGeneration(dir);
String fileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
gen);
boolean oneSecondResolution = true;
while(gen > 0) {
try {
IndexReader reader = DirectoryReader.open(dir);
reader.close();
fileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
gen);
// if we are on a filesystem that seems to have only
// 1 second resolution, allow +1 second in commit
// age tolerance:
SegmentInfos sis = SegmentInfos.readCommit(dir, fileName);
long modTime = Long.parseLong(sis.getUserData().get("commitTime"));
oneSecondResolution &= (modTime % 1000) == 0;
final long leeway = (long) ((SECONDS + (oneSecondResolution ? 1.0:0.0))*1000);
assertTrue("commit point was older than " + SECONDS + " seconds (" + (lastDeleteTime - modTime) + " msec) but did not get deleted ", lastDeleteTime - modTime <= leeway);
} catch (IOException e) {
// OK
break;
}
dir.deleteFile(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
gen--;
}
dir.close();
}
/*
* Test a silly deletion policy that keeps all commits around.
*/
public void testKeepAllDeletionPolicy() throws IOException {
for(int pass=0;pass<2;pass++) {
if (VERBOSE) {
System.out.println("TEST: cycle pass=" + pass);
}
boolean useCompoundFile = (pass % 2) != 0;
Directory dir = newDirectory();
if (dir instanceof MockDirectoryWrapper) {
// test manually deletes files
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
}
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir))
.setMaxBufferedDocs(10)
.setMergeScheduler(new SerialMergeScheduler());
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(dir, conf);
KeepAllDeletionPolicy policy = (KeepAllDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
final boolean needsMerging;
{
DirectoryReader r = DirectoryReader.open(dir);
needsMerging = r.leaves().size() != 1;
r.close();
}
if (needsMerging) {
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
if (VERBOSE) {
System.out.println("TEST: open writer for forceMerge");
}
writer = new IndexWriter(dir, conf);
policy = (KeepAllDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.forceMerge(1);
writer.close();
}
assertEquals(needsMerging ? 2:1, policy.numOnInit);
// If we are not auto committing then there should
// be exactly 2 commits (one per close above):
assertEquals(1 + (needsMerging ? 1:0), policy.numOnCommit);
// Test listCommits
Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
// 2 from closing writer
assertEquals(1 + (needsMerging ? 1:0), commits.size());
// Make sure we can open a reader on each commit:
for (final IndexCommit commit : commits) {
IndexReader r = DirectoryReader.open(commit);
r.close();
}
// Simplistic check: just verify all segments_N's still
// exist, and, I can open a reader on each:
long gen = SegmentInfos.getLastCommitGeneration(dir);
while(gen > 0) {
IndexReader reader = DirectoryReader.open(dir);
reader.close();
dir.deleteFile(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
gen--;
if (gen > 0) {
// Now that we've removed a commit point, which
// should have orphan'd at least one index file.
// Open & close a writer and assert that it
// actually removed something:
int preCount = dir.listAll().length;
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy));
writer.close();
int postCount = dir.listAll().length;
assertTrue(postCount < preCount);
}
}
dir.close();
}
}
/* Uses KeepAllDeletionPolicy to keep all commits around,
* then, opens a new IndexWriter on a previous commit
* point. */
public void testOpenPriorSnapshot() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(new MockAnalyzer(random())).
setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir)).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
);
KeepAllDeletionPolicy policy = (KeepAllDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
for(int i=0;i<10;i++) {
addDoc(writer);
if ((1+i)%2 == 0)
writer.commit();
}
writer.close();
Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
assertEquals(5, commits.size());
IndexCommit lastCommit = null;
for (final IndexCommit commit : commits) {
if (lastCommit == null || commit.getGeneration() > lastCommit.getGeneration())
lastCommit = commit;
}
assertTrue(lastCommit != null);
// Now add 1 doc and merge
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy));
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.forceMerge(1);
writer.close();
assertEquals(6, DirectoryReader.listCommits(dir).size());
// Now open writer on the commit just before merge:
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Should undo our rollback:
writer.rollback();
DirectoryReader r = DirectoryReader.open(dir);
// Still merged, still 11 docs
assertEquals(1, r.leaves().size());
assertEquals(11, r.numDocs());
r.close();
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
writer.close();
// Now 8 because we made another commit
assertEquals(7, DirectoryReader.listCommits(dir).size());
r = DirectoryReader.open(dir);
// Not fully merged because we rolled it back, and now only
// 10 docs
assertTrue(r.leaves().size() > 1);
assertEquals(10, r.numDocs());
r.close();
// Re-merge
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy));
writer.forceMerge(1);
writer.close();
r = DirectoryReader.open(dir);
assertEquals(1, r.leaves().size());
assertEquals(10, r.numDocs());
r.close();
// Now open writer on the commit just before merging,
// but this time keeping only the last commit:
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Reader still sees fully merged index, because writer
// opened on the prior commit has not yet committed:
r = DirectoryReader.open(dir);
assertEquals(1, r.leaves().size());
assertEquals(10, r.numDocs());
r.close();
writer.close();
// Now reader sees not-fully-merged index:
r = DirectoryReader.open(dir);
assertTrue(r.leaves().size() > 1);
assertEquals(10, r.numDocs());
r.close();
dir.close();
}
/* Test keeping NO commit points. This is a viable and
* useful case eg where you want to build a big index and
* you know there are no readers.
*/
public void testKeepNoneOnInitDeletionPolicy() throws IOException {
for(int pass=0;pass<2;pass++) {
boolean useCompoundFile = (pass % 2) != 0;
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy())
.setMaxBufferedDocs(10);
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(dir, conf);
KeepNoneOnInitDeletionPolicy policy = (KeepNoneOnInitDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(1.0);
writer = new IndexWriter(dir, conf);
policy = (KeepNoneOnInitDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.forceMerge(1);
writer.close();
assertEquals(2, policy.numOnInit);
// If we are not auto committing then there should
// be exactly 2 commits (one per close above):
assertEquals(2, policy.numOnCommit);
// Simplistic check: just verify the index is in fact
// readable:
IndexReader reader = DirectoryReader.open(dir);
reader.close();
dir.close();
}
}
/*
* Test a deletion policy that keeps last N commits.
*/
public void testKeepLastNDeletionPolicy() throws IOException {
final int N = 5;
for(int pass=0;pass<2;pass++) {
boolean useCompoundFile = (pass % 2) != 0;
Directory dir = newDirectory();
if (dir instanceof MockDirectoryWrapper) {
// test manually deletes files
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
}
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for(int j=0;j<N+1;j++) {
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(dir, conf);
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
for(int i=0;i<17;i++) {
addDoc(writer);
}
writer.forceMerge(1);
writer.close();
}
assertTrue(policy.numDelete > 0);
assertEquals(N+1, policy.numOnInit);
assertEquals(N+1, policy.numOnCommit);
// Simplistic check: just verify only the past N segments_N's still
// exist, and, I can open a reader on each:
long gen = SegmentInfos.getLastCommitGeneration(dir);
for(int i=0;i<N+1;i++) {
try {
IndexReader reader = DirectoryReader.open(dir);
reader.close();
if (i == N) {
fail("should have failed on commits prior to last " + N);
}
} catch (IOException e) {
if (i != N) {
throw e;
}
}
if (i < N) {
dir.deleteFile(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
}
gen--;
}
dir.close();
}
}
/*
* Test a deletion policy that keeps last N commits
* around, through creates.
*/
public void testKeepLastNDeletionPolicyWithCreates() throws IOException {
final int N = 10;
for(int pass=0;pass<2;pass++) {
boolean useCompoundFile = (pass % 2) != 0;
Directory dir = newDirectory();
if (dir instanceof MockDirectoryWrapper) {
// test manually deletes files
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
}
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(new KeepLastNDeletionPolicy(N))
.setMaxBufferedDocs(10);
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(dir, conf);
KeepLastNDeletionPolicy policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.close();
Term searchTerm = new Term("content", "aaa");
Query query = new TermQuery(searchTerm);
for(int i=0;i<N+1;i++) {
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
writer = new IndexWriter(dir, conf);
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
for(int j=0;j<17;j++) {
addDocWithID(writer, i*(N+1)+j);
}
// this is a commit
writer.close();
conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(dir, conf);
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.deleteDocuments(new Term("id", "" + (i*(N+1)+3)));
// this is a commit
writer.close();
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(16, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy));
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
// This will not commit: there are no changes
// pending because we opened for "create":
writer.close();
}
assertEquals(3*(N+1)+1, policy.numOnInit);
assertEquals(3*(N+1)+1, policy.numOnCommit);
IndexReader rwReader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(rwReader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(0, hits.length);
// Simplistic check: just verify only the past N segments_N's still
// exist, and, I can open a reader on each:
long gen = SegmentInfos.getLastCommitGeneration(dir);
int expectedCount = 0;
rwReader.close();
for(int i=0;i<N+1;i++) {
try {
IndexReader reader = DirectoryReader.open(dir);
// Work backwards in commits on what the expected
// count should be.
searcher = newSearcher(reader);
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(expectedCount, hits.length);
if (expectedCount == 0) {
expectedCount = 16;
} else if (expectedCount == 16) {
expectedCount = 17;
} else if (expectedCount == 17) {
expectedCount = 0;
}
reader.close();
if (i == N) {
fail("should have failed on commits before last " + N);
}
} catch (IOException e) {
if (i != N) {
throw e;
}
}
if (i < N) {
dir.deleteFile(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
}
gen--;
}
dir.close();
}
}
private void addDocWithID(IndexWriter writer, int id) throws IOException {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(newStringField("id", "" + id, Field.Store.NO));
writer.addDocument(doc);
}
private void addDoc(IndexWriter writer) throws IOException
{
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
writer.addDocument(doc);
}
}
| |
package org.sagebionetworks.table.worker;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.common.util.progress.ProgressCallback;
import org.sagebionetworks.repo.manager.EntityManager;
import org.sagebionetworks.repo.manager.SemaphoreManager;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.asynch.AsynchJobStatusManager;
import org.sagebionetworks.repo.manager.table.ColumnModelManager;
import org.sagebionetworks.repo.manager.table.TableEntityManager;
import org.sagebionetworks.repo.manager.table.TableQueryManager;
import org.sagebionetworks.repo.manager.table.TableViewManager;
import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.asynch.AsynchJobState;
import org.sagebionetworks.repo.model.asynch.AsynchronousJobStatus;
import org.sagebionetworks.repo.model.dao.FileHandleDao;
import org.sagebionetworks.repo.model.dbo.dao.table.CSVToRowIterator;
import org.sagebionetworks.repo.model.dbo.dao.table.TableModelTestUtils;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.repo.model.table.DownloadFromTableRequest;
import org.sagebionetworks.repo.model.table.DownloadFromTableResult;
import org.sagebionetworks.repo.model.table.EntityField;
import org.sagebionetworks.repo.model.table.EntityView;
import org.sagebionetworks.repo.model.table.Query;
import org.sagebionetworks.repo.model.table.RowReferenceSet;
import org.sagebionetworks.repo.model.table.RowSet;
import org.sagebionetworks.repo.model.table.SortDirection;
import org.sagebionetworks.repo.model.table.SortItem;
import static org.sagebionetworks.repo.model.table.TableConstants.*;
import org.sagebionetworks.repo.model.table.TableEntity;
import org.sagebionetworks.repo.model.table.TableUnavailableException;
import org.sagebionetworks.repo.model.table.ViewType;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.table.cluster.utils.TableModelUtils;
import org.sagebionetworks.workers.util.semaphore.LockUnavilableException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import au.com.bytecode.opencsv.CSVReader;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.google.common.collect.Lists;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:test-context.xml" })
public class TableCSVDownloadWorkerIntegrationTest {
// This test can be slow when run from outside of Amazon.
public static final int MAX_WAIT_MS = 1000 * 60;
@Autowired
AsynchJobStatusManager asynchJobStatusManager;
@Autowired
StackConfiguration config;
@Autowired
FileHandleDao fileHandleDao;
@Autowired
EntityManager entityManager;
@Autowired
TableEntityManager tableEntityManager;
@Autowired
TableQueryManager tableQueryManger;
@Autowired
ColumnModelManager columnManager;
@Autowired
UserManager userManager;
@Autowired
AmazonS3Client s3Client;
@Autowired
SemaphoreManager semphoreManager;
@Autowired
TableViewManager tableViewManager;
private UserInfo adminUserInfo;
RowReferenceSet referenceSet;
List<ColumnModel> schema;
List<String> headers;
private String tableId;
private List<String> toDelete;
S3FileHandle fileHandle;
ProgressCallback mockProgressCallback;
@Before
public void before() throws NotFoundException{
// Only run this test if the table feature is enabled.
Assume.assumeTrue(config.getTableEnabled());
semphoreManager.releaseAllLocksAsAdmin(new UserInfo(true));
mockProgressCallback = Mockito.mock(ProgressCallback.class);
// Start with an empty queue.
asynchJobStatusManager.emptyAllQueues();
// Get the admin user
adminUserInfo = userManager.getUserInfo(BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId());
toDelete = new LinkedList<String>();
}
@After
public void after(){
if(config.getTableEnabled()){
if(adminUserInfo != null){
for(String id: toDelete){
try {
entityManager.deleteEntity(adminUserInfo, id);
} catch (Exception e) {}
}
}
if(fileHandle != null){
s3Client.deleteObject(fileHandle.getBucketName(), fileHandle.getKey());
fileHandleDao.delete(fileHandle.getId());
}
}
}
@Test
public void testRoundTrip() throws Exception{
List<String[]> input = createTable();
String sql = "select * from "+tableId;
// Wait for the table to be ready
RowSet result = waitForConsistentQuery(adminUserInfo, sql);
assertNotNull(result);
// Now download the data from this table as a csv
DownloadFromTableRequest request = new DownloadFromTableRequest();
request.setSql(sql);
request.setWriteHeader(true);
request.setIncludeRowIdAndRowVersion(true);
List<String[]> results = downloadCSV(request);
checkResults(results, input, true);
}
@Test
public void testRoundTripSorted() throws Exception{
List<String[]> input = createTable();
String sql = "select * from "+tableId;
// Wait for the table to be ready
RowSet result = waitForConsistentQuery(adminUserInfo, sql);
assertNotNull(result);
// Now download the data from this table as a csv
DownloadFromTableRequest request = new DownloadFromTableRequest();
request.setSql(sql);
request.setWriteHeader(true);
request.setIncludeRowIdAndRowVersion(true);
SortItem sortItem = new SortItem();
sortItem.setColumn("c");
sortItem.setDirection(SortDirection.DESC);
request.setSort(Lists.newArrayList(sortItem));
List<String[]> results = downloadCSV(request);
input = Lists.newArrayList(input.get(0), input.get(4), input.get(2), input.get(1), input.get(3));
checkResults(results, input, true);
}
@Test
public void testRoundTripWithZeroResults() throws Exception {
createTable();
String sql = "select * from " + tableId + " where a = 'xxxxxx'";
// Wait for the table to be ready
RowSet result = waitForConsistentQuery(adminUserInfo, sql);
assertNotNull(result);
// Now download the data from this table as a csv
DownloadFromTableRequest request = new DownloadFromTableRequest();
request.setSql(sql);
request.setWriteHeader(true);
request.setIncludeRowIdAndRowVersion(true);
List<String[]> results = downloadCSV(request);
checkResults(results, Lists.<String[]> newArrayList(new String[] { "a", "b", "c" }), true);
}
@Test
public void testDownloadWitoutWaitForSql() throws Exception {
List<String[]> input = createTable();
String sql = "select * from " + tableId;
// download the data from this table as a csv
DownloadFromTableRequest request = new DownloadFromTableRequest();
request.setSql(sql);
request.setWriteHeader(true);
request.setIncludeRowIdAndRowVersion(false);
List<String[]> results = downloadCSV(request);
checkResults(results, input, false);
}
@Test
public void testDownloadViewWithoutEtag() throws Exception{
// Create a project view to query
EntityView projectView = createProjectView();
// CSV download from the view
DownloadFromTableRequest request = new DownloadFromTableRequest();
request.setSql("select * from "+projectView.getId());
request.setWriteHeader(true);
request.setIncludeRowIdAndRowVersion(true);
// null should default to false
request.setIncludeEntityEtag(null);
List<String[]> results = downloadCSV(request);
assertEquals(4, results.size());
String[] headers = results.get(0);
String headerString = Arrays.toString(headers);
String[] expected = new String[]{ROW_ID, ROW_VERSION, EntityField.name.name()};
String expectedString = Arrays.toString(expected);
assertEquals(expectedString, headerString);
}
@Test
public void testDownloadViewWithEtag() throws Exception{
// Create a project view to query
EntityView projectView = createProjectView();
// CSV download from the view
DownloadFromTableRequest request = new DownloadFromTableRequest();
request.setSql("select * from "+projectView.getId());
request.setWriteHeader(true);
request.setIncludeRowIdAndRowVersion(true);
request.setIncludeEntityEtag(true);
List<String[]> results = downloadCSV(request);
assertEquals(4, results.size());
String[] headers = results.get(0);
String headerString = Arrays.toString(headers);
String[] expected = new String[]{ROW_ID, ROW_VERSION, ROW_ETAG, EntityField.name.name()};
String expectedString = Arrays.toString(expected);
assertEquals(expectedString, headerString);
}
private void checkResults(List<String[]> results, List<String[]> input, boolean includeRowAndVersion) throws IOException,
FileNotFoundException {
assertNotNull(results);
assertEquals(input.size(), results.size());
for (int i = 0; i < input.size(); i++) {
assertArrayEquals(input.get(i), Arrays.copyOfRange(results.get(i), includeRowAndVersion ? 2 : 0, results.get(i).length));
}
}
/**
* Create a project view with three rows.
* @return
* @throws Exception
*/
private EntityView createProjectView() throws Exception{
String uuid = UUID.randomUUID().toString();
List<String> projectIds = new LinkedList<String>();
// Create three projects
for(int i=0; i<3; i++){
Project project = new Project();
project.setName(uuid+"-"+i);
String projectId = entityManager.createEntity(adminUserInfo, project, null);
projectIds.add(projectId);
}
toDelete.addAll(projectIds);
// Create a projectView
ColumnModel nameColumn = columnManager.createColumnModel(adminUserInfo, EntityField.name.getColumnModel());
schema = Lists.newArrayList(nameColumn);
headers = TableModelUtils.getIds(schema);
EntityView view = new EntityView();
view.setName(uuid+"-view");
view.setScopeIds(projectIds);
view.setColumnIds(Lists.newArrayList(nameColumn.getId()));
view.setType(ViewType.project);
tableId = entityManager.createEntity(adminUserInfo, view, null);
toDelete.add(tableId);
tableViewManager.setViewSchemaAndScope(adminUserInfo, headers, projectIds, ViewType.project, tableId);
// Wait for the three rows to appear in the view
int expectedRowCount = 3;
waitForConsistentQuery(adminUserInfo, "SELECT * FROM "+tableId, expectedRowCount);
return entityManager.getEntity(adminUserInfo, tableId, EntityView.class);
}
private List<String[]> createTable() throws NotFoundException, IOException {
// Create one column of each type
List<ColumnModel> temp = new LinkedList<ColumnModel>();
temp.add(TableModelTestUtils.createColumn(0L, "a", ColumnType.STRING));
temp.add(TableModelTestUtils.createColumn(0L, "b", ColumnType.INTEGER));
temp.add(TableModelTestUtils.createColumn(0L, "c", ColumnType.DOUBLE));
schema = new LinkedList<ColumnModel>();
for (ColumnModel cm : temp) {
cm = columnManager.createColumnModel(adminUserInfo, cm);
schema.add(cm);
}
List<String> headers = TableModelUtils.getIds(schema);
// Create the table.
TableEntity table = new TableEntity();
table.setName(UUID.randomUUID().toString());
table.setColumnIds(headers);
tableId = entityManager.createEntity(adminUserInfo, table, null);
// Bind the columns. This is normally done at the service layer but the workers cannot depend on that layer.
tableEntityManager.setTableSchema(adminUserInfo, headers, tableId);
// Create some CSV data
List<String[]> input = new ArrayList<String[]>(3);
input.add(new String[] { "a", "b", "c" });
input.add(new String[] { "AAA", "2", "1.1" });
input.add(new String[] { null, "3", "1.2" });
input.add(new String[] { "FFF", "4", null });
input.add(new String[] { "ZZZ", null, "1.3" });
// This is the starting input stream
CSVReader reader = TableModelTestUtils.createReader(input);
// Write the CSV to the table
CSVToRowIterator iterator = new CSVToRowIterator(schema, reader, true, null);
tableEntityManager.appendRowsAsStream(adminUserInfo, tableId, schema, iterator,
null, null, null);
return input;
}
/**
* Download a CSV for the given request.
* @param request
* @return
* @throws InterruptedException
* @throws NotFoundException
* @throws Exception
*/
List<String[]> downloadCSV(DownloadFromTableRequest request) throws Exception {
// submit the job
AsynchronousJobStatus status = asynchJobStatusManager.startJob(adminUserInfo, request);
// Wait for the job to complete.
status = waitForStatus(status);
assertNotNull(status);
assertNotNull(status.getResponseBody());
assertTrue(status.getResponseBody() instanceof DownloadFromTableResult);
DownloadFromTableResult response = (DownloadFromTableResult) status.getResponseBody();
assertNotNull(response.getEtag());
assertNotNull(response.getResultsFileHandleId());
// Get the filehandle
fileHandle = (S3FileHandle) fileHandleDao.get(response.getResultsFileHandleId());
// Read the CSV
CSVReader csvReader;
assertEquals("text/csv", fileHandle.getContentType());
assertNotNull(fileHandle.getFileName());
assertNotNull(fileHandle.getContentMd5());
// Download the file
File temp = File.createTempFile("DownloadCSV", ".csv");
try{
s3Client.getObject(new GetObjectRequest(fileHandle.getBucketName(), fileHandle.getKey()), temp);
// Load the CSV data
csvReader = new CSVReader(new FileReader(temp));
try {
return csvReader.readAll();
} finally {
csvReader.close();
}
}finally{
temp.delete();
}
}
private RowSet waitForConsistentQuery(UserInfo user, String sql) throws Exception {
Integer expectedRowCount = null;
return waitForConsistentQuery(user, sql,expectedRowCount);
}
/**
* Wait for the query results for a given query.
* @param user
* @param sql
* @param expectedRowCount If not null, then will continue to wait while the row count is less than the expected count.
* @return
* @throws Exception
*/
private RowSet waitForConsistentQuery(UserInfo user, String sql, Integer expectedRowCount) throws Exception {
long start = System.currentTimeMillis();
boolean runQuery = true;
boolean runCount = false;
boolean returnFacets = false;
Query query = new Query();
query.setSql(sql);
while(true){
try {
RowSet results = tableQueryManger.querySinglePage(mockProgressCallback, adminUserInfo, query, runQuery, runCount, returnFacets).getQueryResult().getQueryResults();
if(expectedRowCount != null) {
if(results.getRows() == null || results.getRows().size() < expectedRowCount) {
System.out.println("Waiting for row count: "+expectedRowCount);
Thread.sleep(1000);
continue;
}
}
return results;
} catch (LockUnavilableException e) {
System.out.println("Waiting for table lock: "+e.getLocalizedMessage());
} catch (TableUnavailableException e) {
System.out.println("Waiting for table index worker to build table. Status: "+e.getStatus());
}
assertTrue("Timed out waiting for table index worker to make the table available.", (System.currentTimeMillis()-start) < MAX_WAIT_MS);
Thread.sleep(1000);
}
}
private AsynchronousJobStatus waitForStatus(AsynchronousJobStatus status) throws InterruptedException, DatastoreException, NotFoundException{
long start = System.currentTimeMillis();
while(!AsynchJobState.COMPLETE.equals(status.getJobState())){
assertFalse("Job Failed: "+status.getErrorDetails(), AsynchJobState.FAILED.equals(status.getJobState()));
System.out.println("Waiting for job to complete: Message: "+status.getProgressMessage()+" progress: "+status.getProgressCurrent()+"/"+status.getProgressTotal());
assertTrue("Timed out waiting for table status",(System.currentTimeMillis()-start) < MAX_WAIT_MS);
Thread.sleep(1000);
// Get the status again
status = this.asynchJobStatusManager.getJobStatus(adminUserInfo, status.getJobId());
}
return status;
}
}
| |
package com.vivifram.second.hitalk.ui.page.layout;
import android.graphics.Rect;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewTreeObserver;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import com.vivifram.second.hitalk.R;
import com.vivifram.second.hitalk.bean.blackboard.BnItem;
import com.vivifram.second.hitalk.bean.blackboard.CommentItem;
import com.vivifram.second.hitalk.bean.blackboard.FavortItem;
import com.vivifram.second.hitalk.ui.recycleview.blackboard.BNAdapter;
import com.vivifram.second.hitalk.ui.recycleview.blackboard.CommentConfig;
import com.vivifram.second.hitalk.ui.recycleview.blackboard.DivItemDecoration;
import com.vivifram.second.hitalk.ui.springview.container.BlackboardRotationFooter;
import com.vivifram.second.hitalk.ui.springview.container.BlackboardRotationHeader;
import com.vivifram.second.hitalk.ui.springview.widget.SpringView;
import com.vivifram.second.hitalk.ui.view.BGATitlebar;
import com.vivifram.second.hitalk.ui.view.CommentListView;
import com.zuowei.utils.common.NLog;
import com.zuowei.utils.common.NToast;
import com.zuowei.utils.common.TagUtil;
import com.zuowei.utils.common.Utils;
import java.util.List;
/**
* Created by zuowei on 16-8-11.
*/
public class BlackboardFragmentLayout extends BaseFragmentLayout
implements SpringView.OnFreshListener{
public final static int TYPE_PULLREFRESH = 1;
public final static int TYPE_LOADREFRESH = 2;
public interface OnTitleClickListener{
void onLeftClick();
void onCenterClick();
void onRightClick();
}
public interface IBlackboardItemsRefreshListener{
void onRefresh();
void onLoadMore();
}
public interface BnItemOnClickListener {
void deleteBn(final String bnId);
void addFavort(final int circlePosition);
void deleteFavort(final int circlePosition, final String favortId);
void deleteComment(final int circlePosition, final String commentId);
void addComment(final String content, final CommentConfig config);
}
private SpringView mSv;
private RecyclerView mBlackboardItemRv;
private LinearLayoutManager mLayoutManager;
private LinearLayout mCommentLayout;
private EditText mCet;
private CommentConfig mCommentConfig;
private int mSelectBnItemH;
private int mSelectCommentItemOffset;
private int mCurrentKeyboardH;
private int mScreenHeight;
private int mEditTextBodyHeight;
private RelativeLayout mBodyLayout;
private BGATitlebar mTitleBar;
private BNAdapter mBnAdapter;
private ImageView mSendIv;
private IBlackboardItemsRefreshListener mBcItemsRefreshListener;
private BnItemOnClickListener mBnItemOnClickListener;
private OnTitleClickListener mOnTitleClickListener;
public BlackboardFragmentLayout(View root) {
super(root);
}
@Override
public void onViewCreate(View root) {
super.onViewCreate(root);
init();
}
private void init() {
mTitleBar = (BGATitlebar) findViewById(R.id.titleBar);
mTitleBar.setDelegate(new BGATitlebar.BGATitlebarDelegate(){
@Override
public void onClickLeftCtv() {
if (mOnTitleClickListener != null) {
mOnTitleClickListener.onLeftClick();
}
}
@Override
public void onClickTitleCtv() {
if (mOnTitleClickListener != null) {
mOnTitleClickListener.onCenterClick();
}
}
@Override
public void onClickRightCtv() {
if (mOnTitleClickListener != null) {
mOnTitleClickListener.onRightClick();
}
}
});
mSv = (SpringView) findViewById(R.id.bbSv);
mSv.setListener(this);
mSv.setHeader(new BlackboardRotationHeader(mCtx,false));
mSv.setFooter(new BlackboardRotationFooter(mCtx,false));
mSv.setGive(SpringView.Give.BOTH);
mSv.setType(SpringView.Type.FOLLOW);
mBlackboardItemRv = (RecyclerView) findViewById(R.id.blackBoardRv);
mLayoutManager = new LinearLayoutManager(mCtx);
mBlackboardItemRv.setLayoutManager(mLayoutManager);
mBlackboardItemRv.addItemDecoration(new DivItemDecoration(2, true));
mBlackboardItemRv.setHasFixedSize(true);
mBnAdapter = new BNAdapter(mAct,this);
mBlackboardItemRv.setAdapter(mBnAdapter);
mCommentLayout = (LinearLayout) findViewById(R.id.editCommentLayout);
mCet = (EditText) mCommentLayout.findViewById(R.id.bcEt);
mSendIv = (ImageView) mCommentLayout.findViewById(R.id.sendIv);
mBlackboardItemRv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (mCommentLayout.getVisibility() == View.VISIBLE) {
updateEditTextBodyVisible(View.GONE, null);
return true;
}
return false;
}
});
/*mBlackboardItemRv.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
super.onScrollStateChanged(recyclerView, newState);
if(newState != RecyclerView.SCROLL_STATE_IDLE){
Glide.with(mCtx).pauseRequests();
}
}
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
Glide.with(mCtx).resumeRequests();
}
});*/
mSendIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mBnItemOnClickListener != null) {
String content = mCet.getText().toString().trim();
if(TextUtils.isEmpty(content)){
NToast.shortToast(mAppCtx,mAppCtx.getString(R.string.commentNull));
return;
}
mBnItemOnClickListener.addComment(content, mCommentConfig);
}
updateEditTextBodyVisible(View.GONE, null);
}
});
setViewTreeObserver();
}
private void setViewTreeObserver() {
mBodyLayout = (RelativeLayout) findViewById(R.id.contentLayout);
final ViewTreeObserver swipeRefreshLayoutVTO = mBodyLayout.getViewTreeObserver();
swipeRefreshLayoutVTO.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
Rect r = new Rect();
mBodyLayout.getWindowVisibleDisplayFrame(r);
int statusBarH = getStatusBarHeight();
int screenH = mBodyLayout.getRootView().getHeight();
if(r.top != statusBarH ){
r.top = statusBarH;
}
int keyboardH = screenH - (r.bottom - r.top);
if(keyboardH == mCurrentKeyboardH){
return;
}
mCurrentKeyboardH = keyboardH;
mScreenHeight = screenH;
mEditTextBodyHeight = mCet.getHeight();
if(keyboardH<150){
updateEditTextBodyVisible(View.GONE, null);
return;
}
if(mLayoutManager!=null && mCommentConfig != null){
mLayoutManager.scrollToPositionWithOffset(mCommentConfig.mBnPosition + BNAdapter.HEADVIEW_SIZE, getRecycleViewOffset(mCommentConfig));
}
}
});
}
private int getStatusBarHeight() {
int result = 0;
int resourceId = mAppCtx.getResources().getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = mAppCtx.getResources().getDimensionPixelSize(resourceId);
}
return result;
}
private int getRecycleViewOffset(CommentConfig commentConfig) {
if(commentConfig == null)
return 0;
int rcOffset = mScreenHeight - mSelectBnItemH - mCurrentKeyboardH - mEditTextBodyHeight - mTitleBar.getHeight();
if(commentConfig.mCommentType == CommentConfig.Type.REPLY){
rcOffset = rcOffset + mSelectCommentItemOffset;
}
return rcOffset;
}
public void updateEditTextBodyVisible(int visibility, CommentConfig commentConfig) {
mCommentConfig = commentConfig;
mCommentLayout.setVisibility(visibility);
measureBNItemHighAndCommentItemOffset(commentConfig);
if(View.VISIBLE==visibility){
mCet.requestFocus();
Utils.showSoftInput( mCet.getContext(), mCet);
}else if(View.GONE==visibility){
Utils.hideSoftInput( mCet.getContext(), mCet);
}
}
private void measureBNItemHighAndCommentItemOffset(CommentConfig commentConfig){
if(commentConfig == null)
return;
int firstPosition = mLayoutManager.findFirstVisibleItemPosition();
View selectCircleItem = mLayoutManager.getChildAt(commentConfig.mBnPosition + BNAdapter.HEADVIEW_SIZE - firstPosition);
if(selectCircleItem != null){
mSelectBnItemH = selectCircleItem.getHeight();
}
if(commentConfig.mCommentType == CommentConfig.Type.REPLY){
CommentListView commentLv = (CommentListView) selectCircleItem.findViewById(R.id.commentList);
if(commentLv!=null){
View selectCommentItem = commentLv.getChildAt(commentConfig.mCommentPosition);
if(selectCommentItem != null){
mSelectCommentItemOffset = 0;
View parentView = selectCommentItem;
do {
int subItemBottom = parentView.getBottom();
parentView = (View) parentView.getParent();
if(parentView != null){
mSelectCommentItemOffset += (parentView.getHeight() - subItemBottom);
}
} while (parentView != null && parentView != selectCircleItem);
}
}
}
}
public void setBnItemOnClickListener(BnItemOnClickListener bnItemOnClickListener){
mBnItemOnClickListener = bnItemOnClickListener;
if (mBnAdapter != null) {
mBnAdapter.setBnItemOnClickListener(bnItemOnClickListener);
}
}
public void setBcItemsRefreshListener(IBlackboardItemsRefreshListener mBcItemsRefreshListener) {
this.mBcItemsRefreshListener = mBcItemsRefreshListener;
}
public void setmOnTitleClickListener(OnTitleClickListener onTitleClickListener){
mOnTitleClickListener = onTitleClickListener;
}
public void update2DeleteBn(String bnId) {
if (bnId != null) {
List<BnItem> bnItems = mBnAdapter.getDatas();
for(int i=0; i<bnItems.size(); i++){
if(bnId.equals(bnItems.get(i).getId())){
bnItems.remove(i);
mBnAdapter.notifyItemRemoved(i + 1);
mBnAdapter.notifyItemRangeChanged(i + 1,mBnAdapter.getItemCount() - i - 1);
return;
}
}
}
}
public void update2AddFavorite(int bnPosition, FavortItem addItem) {
if(addItem != null){
BnItem item = mBnAdapter.getDatas().get(bnPosition);
item.getFavorters().add(addItem);
item.setHasFavort(true);
mBnAdapter.notifyItemChanged(bnPosition + 1);
}
}
public void update2DeleteFavort(int bnPosition, String favortId) {
BnItem item = mBnAdapter.getDatas().get(bnPosition);
List<FavortItem> items = item.getFavorters();
for(int i=0; i<items.size(); i++){
if(favortId.equals(items.get(i).getId())){
items.remove(i);
mBnAdapter.notifyItemChanged(bnPosition + 1);
return;
}
}
if (items.size() == 0){
item.setHasFavort(false);
}
}
public void update2AddComment(int bnPosition, CommentItem addItem) {
if(addItem != null){
BnItem item = mBnAdapter.getDatas().get(bnPosition);
item.getComments().add(addItem);
item.setHasComment(true);
NLog.i(TagUtil.makeTag(getClass()),"update2AddComment bnPosition = "+bnPosition+",addItem = "+addItem);
mBnAdapter.notifyItemChanged(bnPosition + 1);
}
mCet.setText("");
}
public void update2DeleteComment(int bnPosition, String commentId) {
BnItem item = mBnAdapter.getDatas().get(bnPosition);
List<CommentItem> items = item.getComments();
for(int i=0; i<items.size(); i++){
if(commentId.equals(items.get(i).getId())){
items.remove(i);
mBnAdapter.notifyItemChanged(bnPosition + 1);
return;
}
}
if (items.size() == 0){
item.setHasComment(false);
}
}
public void update2loadData(int loadType, List<BnItem> datas) {
if (loadType == TYPE_PULLREFRESH){
mBnAdapter.setDatas(datas);
}else if(loadType == TYPE_LOADREFRESH){
mBnAdapter.getDatas().addAll(datas);
}
mBnAdapter.notifyDataSetChanged();
NLog.i(TagUtil.makeTag(getClass()),"update2loadData finish");
finishFresh();
}
public void publish(BnItem bnItem) {
NLog.i(TagUtil.makeTag(BlackboardFragmentLayout.class),"publish bnItem = "+bnItem);
if (bnItem != null){
mBnAdapter.getDatas().add(0,bnItem);
mBnAdapter.notifyDataSetChanged();
}
}
public void finishFresh(){
if (mSv != null) {
mSv.onFinishFreshAndLoad();
}
}
@Override
public void onRefresh() {
if (mBcItemsRefreshListener != null) {
mBcItemsRefreshListener.onRefresh();
}else {
mSv.onFinishFreshAndLoad();
}
}
@Override
public void onLoadmore() {
if (mBcItemsRefreshListener != null) {
mBcItemsRefreshListener.onLoadMore();
}else {
mSv.onFinishFreshAndLoad();
}
}
public BnItem getLastBt(){
if (mBnAdapter != null){
return mBnAdapter.getItem(mBnAdapter.getItemCount() - 1);
}
return null;
}
public BnItem getItem(int position){
if (mBnAdapter != null){
return mBnAdapter.getItem(position);
}
return null;
}
public BnItem getItemRelative(int bnPosition){
if (mBnAdapter != null){
return mBnAdapter.getItem(bnPosition + 1);
}
return null;
}
}
| |
package com.squeed.chromecast.hipstacaster;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.MediaRouteActionProvider;
import android.support.v7.media.MediaRouteSelector;
import android.support.v7.media.MediaRouter;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MenuItem.OnMenuItemClickListener;
import android.view.View;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.GridView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.google.cast.ApplicationChannel;
import com.google.cast.ApplicationMetadata;
import com.google.cast.ApplicationSession;
import com.google.cast.CastContext;
import com.google.cast.CastDevice;
import com.google.cast.MediaRouteAdapter;
import com.google.cast.MediaRouteHelper;
import com.google.cast.MediaRouteStateChangeListener;
import com.google.cast.SessionError;
import com.squeed.chromecast.hipstacaster.dto.Photo;
import com.squeed.chromecast.hipstacaster.grid.BitmapImageItem;
import com.squeed.chromecast.hipstacaster.grid.GridViewAdapter;
import com.squeed.chromecast.hipstacaster.grid.ImageItem;
import com.squeed.chromecast.hipstacaster.img.Callback;
import com.squeed.chromecast.hipstacaster.img.DrawableManager;
import com.squeed.chromecast.hipstacaster.rest.LoadImageListTask;
/**
* Demonstrative Activity for the HipstaCaster Android client. Somewhat based on the Android demo tic-tac-toe game from
* https://github.com/googlecast/cast-android-tictactoe
*
* @author Erik
*
*/
public class HipstaActivity extends ActionBarActivity implements MediaRouteAdapter {
private static final String DEFAULT_SEARCH_TAG = "sarek";
private static final String TAG = HipstaActivity.class.getSimpleName();
private static final com.google.cast.Logger sLog = new com.google.cast.Logger(TAG, true);
private static final String APP_NAME = "fc91668a-cf4b-4a18-9611-f2c120d0bf07_1";
private static final String PROTOCOL = "com.squeed.chromecast.hipstacaster";
static final int IMAGES_PER_FETCH = 20;
private ApplicationSession mSession;
private SessionListener mSessionListener;
private CustomHipstaCasterStream mMessageStream;
private GridView gridView;
private GridViewAdapter customGridAdapter;
private TextView mInfoView;
private ProgressBar spinner;
private SharedPreferences preferences;
private CastContext mCastContext;
private CastDevice mSelectedDevice;
private MediaRouter mMediaRouter;
private MediaRouteSelector mMediaRouteSelector;
private MediaRouter.Callback mMediaRouterCallback;
private ArrayList<ImageItem> photoSet;
private DrawableManager drawableManager;
private boolean allowLoading = false;
private boolean isLoadingMore = false;
private int index = 0;
/** Set when an image is clicked, can be use to supply start index of slideshow */
private int offset = 0;
private int pageOffset = 0;
private int loadedInCurrentBatch = 0;
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.main);
initUIComponents();
loadInitialBatch();
initMediaRouter();
}
private void loadInitialBatch() {
preferences = PreferenceManager.getDefaultSharedPreferences(HipstaActivity.this);
// This flipped check is just for the first load, to make sure the scrollistener and this load doesn't interfere with each other at startup.
if(!allowLoading) {
allowLoading = false;
isLoadingMore = true;
new LoadImageListTask(this, pageOffset, IMAGES_PER_FETCH).execute(preferences.getString("tagsPref", DEFAULT_SEARCH_TAG));
}
}
private void initUIComponents() {
drawableManager = new DrawableManager();
mInfoView = (TextView) findViewById(R.id.status);
mInfoView.setText("Loading...");
gridView = (GridView) findViewById(R.id.gridView);
gridView.setOnScrollListener(new GridViewOnScrollListener());
spinner = (ProgressBar) findViewById(R.id.myspinner);
spinner.setVisibility(View.GONE);
photoSet = new ArrayList<ImageItem>();
customGridAdapter = new GridViewAdapter(this, R.layout.row_grid, photoSet);
gridView.setAdapter(customGridAdapter);
}
private void initMediaRouter() {
mSessionListener = new SessionListener();
mMessageStream = new CustomHipstaCasterStream();
mCastContext = new CastContext(getApplicationContext());
MediaRouteHelper.registerMinimalMediaRouteProvider(mCastContext, this);
mMediaRouter = MediaRouter.getInstance(getApplicationContext());
mMediaRouteSelector = MediaRouteHelper.buildMediaRouteSelector(
MediaRouteHelper.CATEGORY_CAST, APP_NAME, null);
mMediaRouterCallback = new MediaRouterCallback();
}
/**
* Typically invoked
* @param list
*/
public void onPhotoListLoaded(List<Photo> list) {
loadedInCurrentBatch = 0;
mInfoView.setText("Loaded " + list.size() + " images definitions from Flickr");
for(Photo p : list) {
photoSet.add(new BitmapImageItem(drawableManager.drawableToBitmap(getResources().getDrawable(R.drawable.user_placeholder)), p.getOwnerName(), p.getFullsizeUrl(), p.getTitle(), p.getDescription()));
}
index = pageOffset * IMAGES_PER_FETCH;
for(Photo p : list) {
drawableManager.fetchDrawableOnThread(p.getSquareUrl(), index, new Callback() {
@Override
public void updateGridView(final Bitmap bitmap, final int position) {
runOnUiThread(new Runnable() {
@Override
public void run() {
BitmapImageItem itemAtPosition = (BitmapImageItem) gridView.getItemAtPosition(position);
itemAtPosition.setImage(bitmap);
gridView.invalidateViews();
loadedInCurrentBatch++;
if(loadedInCurrentBatch >= IMAGES_PER_FETCH) {
allowLoading = true;
loadedInCurrentBatch = 0;
}
}
});
}
});
index++;
}
gridView.setAlpha(1.0f);
}
/**
* Called when the options menu is first created.
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.menu, menu);
initChromeCastMenuItem(menu);
initSettingsMenuItem(menu);
initRefreshMenuItem(menu);
initSlideShowMenuItem(menu);
return true;
}
private void initChromeCastMenuItem(Menu menu) {
MenuItem mediaRouteMenuItem = menu.findItem(R.id.media_route_menu_item);
MediaRouteActionProvider mediaRouteActionProvider =
(MediaRouteActionProvider) MenuItemCompat.getActionProvider(mediaRouteMenuItem);
mediaRouteActionProvider.setRouteSelector(mMediaRouteSelector);
}
private void initSlideShowMenuItem(Menu menu) {
MenuItem slideShowMenuItem = menu.findItem(R.id.action_slideshow);
slideShowMenuItem.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
if(mSession != null && mSession.hasStarted() && photoSet != null && photoSet.size() > 0) {
sendPhotoSet();
Toast.makeText(HipstaActivity.this, "Starting slideshow on ChromeCast!", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(HipstaActivity.this, "Please connect to cast device first.", Toast.LENGTH_SHORT).show();
}
return false;
}
});
}
private void initRefreshMenuItem(Menu menu) {
MenuItem refreshMenuItem = menu.findItem(R.id.action_refresh);
refreshMenuItem.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
if(gridView != null) {
((GridViewAdapter) gridView.getAdapter()).clear();
gridView.setAlpha(0.2f);
pageOffset = 0;
new LoadImageListTask(HipstaActivity.this, pageOffset, IMAGES_PER_FETCH).execute(preferences.getString("tagsPref", DEFAULT_SEARCH_TAG));
}
return false;
}
});
}
private void initSettingsMenuItem(Menu menu) {
MenuItem settingsMenuItem = menu.findItem(R.id.action_settings);
settingsMenuItem.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
Intent settingsActivity = new Intent(getBaseContext(),
Preferences.class);
startActivity(settingsActivity);
return false;
}
});
}
/**
* Called on application start.
*/
@Override
protected void onStart() {
super.onStart();
mMediaRouter.addCallback(mMediaRouteSelector, mMediaRouterCallback,
MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
}
// Currently no custom behaviour in onPause and onStop. So no overriding.
/**
* Ends any existing application session with a Chromecast device.
*/
private void endSession() {
if ((mSession != null) && (mSession.hasStarted())) {
try {
if (mSession.hasChannel()) {
// TODO perhaps notify receiever app that we're disconnecting?
}
mSession.endSession();
} catch (IOException e) {
Log.e(TAG, "Failed to end the session.", e);
} catch (IllegalStateException e) {
Log.e(TAG, "Unable to end session.", e);
} finally {
mSession = null;
}
}
}
/**
* Unregisters the media route provider and disposes the CastContext.
*/
@Override
public void onDestroy() {
Log.i(TAG, "ENTER - onDestroy");
// These two has been moved from onStop. We don't want slideshow or view photo on Chromecast to stop unless Hipstacaster app closes for real.
endSession();
mMediaRouter.removeCallback(mMediaRouterCallback);
MediaRouteHelper.unregisterMediaRouteProvider(mCastContext);
mCastContext.dispose();
mCastContext = null;
super.onDestroy();
}
/**
* Returns the screen configuration to portrait mode whenever changed.
*/
@Override
public void onConfigurationChanged(Configuration newConfig) {
Log.i(TAG, "ENTER - onConfigurationChanged");
super.onConfigurationChanged(newConfig);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
private void setSelectedDevice(CastDevice device) {
mSelectedDevice = device;
if (mSelectedDevice != null) {
Log.i(TAG, "ENTER - setSelectedDevice: " + device.toString());
mSession = new ApplicationSession(mCastContext, mSelectedDevice);
mSession.setListener(mSessionListener);
try {
mSession.startSession(APP_NAME);
mInfoView.setText("Session started!");
} catch (IOException e) {
Log.e(TAG, "Failed to open a session", e);
mInfoView.setText("Failed to open a session");
}
} else {
endSession();
}
}
/**
* Called when a user selects a route.
*/
private void onRouteSelected(android.support.v7.media.MediaRouter.RouteInfo route) {
sLog.d("onRouteSelected: %s", route.getName());
MediaRouteHelper.requestCastDeviceForRoute(route);
}
/**
* Called when a user unselects a route.
*/
private void onRouteUnselected(android.support.v7.media.MediaRouter.RouteInfo route) {
sLog.d("onRouteUnselected: %s", route.getName());
setSelectedDevice(null);
}
/**
* A class which listens to session start events. On detection, it attaches the message
* stream.
*/
private class SessionListener implements ApplicationSession.Listener {
@Override
public void onSessionStarted(ApplicationMetadata appMetadata) {
Log.i(TAG, "SessionListener.onStarted");
ApplicationChannel channel = mSession.getChannel();
if (channel == null) {
Log.w(TAG, "onStarted: channel is null");
return;
}
channel.attachMessageStream(mMessageStream);
mInfoView.setText("Session started");
}
@Override
public void onSessionStartFailed(SessionError error) {
sLog.d("SessionListener.onStartFailed: %s", error);
}
@Override
public void onSessionEnded(SessionError error) {
sLog.d("SessionListener.onEnded: %s", error);
}
}
/**
* An extension of the MessageStream with some local details.
*/
private class CustomHipstaCasterStream extends HipstaCasterMessageStream {
/**
* Displays an error dialog.
*/
@Override
protected void onError(String errorMessage) {
buildAlertDialog("Error", errorMessage);
}
/**
* Displays a message that the slideshow has ended.
*/
@Override
protected void onSlideShowEnded() {
buildAlertDialog("Message from ChromeCast", "Slideshow has ended");
}
/**
* Updates the textView with the currently viewed photo from the slideshoe (n of m)
*/
@Override
protected void onCurrentSlideShowImageMessage(String message) {
mInfoView.setText(message);
}
}
/**
* An extension of the MediaRoute.Callback so we can invoke our own onRoute selected/unselected
*/
private class MediaRouterCallback extends MediaRouter.Callback {
@Override
public void onRouteSelected(MediaRouter router, android.support.v7.media.MediaRouter.RouteInfo route) {
Log.i(TAG, "onRouteSelected: " + route);
HipstaActivity.this.onRouteSelected(route);
}
@Override
public void onRouteUnselected(MediaRouter router, android.support.v7.media.MediaRouter.RouteInfo route) {
Log.i(TAG, "onRouteUnselected: " + route);
HipstaActivity.this.onRouteUnselected(route);
}
}
@Override
public void onDeviceAvailable(CastDevice device, String routeId,
MediaRouteStateChangeListener listener) {
sLog.d("onDeviceAvailable: %s (route %s)", device, routeId);
setSelectedDevice(device);
}
/**
* Sends image data for a single image to the receiver app.
* @param title
* @param url
* @param ownerName
* @param description
*/
public void openPhoto(String title, String url, String ownerName, String description) {
if(isCastSessionActive()) {
mMessageStream.openPhotoOnChromecast(title, url, ownerName, description);
}
}
/**
* Sends the current set of photos to the chromecast, including an offset index which can be used to start the slideshow
* at any position of the set.
*/
public void sendPhotoSet() {
if(isCastSessionActive() && photoSet != null && photoSet.size() > 0) {
mMessageStream.sendPhotoSetToChromecast(photoSet, offset);
}
}
private boolean isCastSessionActive() {
return mSession != null && mSession.hasChannel() && mSession.hasStarted();
}
public void showSpinner() {
isLoadingMore = true;
this.runOnUiThread(new Runnable() {
@Override
public void run() {
spinner.setVisibility(View.VISIBLE);
}
});
}
public void hideSpinner() {
isLoadingMore = false;
this.runOnUiThread(new Runnable() {
@Override
public void run() {
spinner.setVisibility(View.GONE);
}
});
}
/**
* Sets the current offset (e.g. selected image index)
*
* @param offset
*/
public void setOffset(int offset) {
this.offset = offset;
}
@Override
public void onSetVolume(double volume) {
}
@Override
public void onUpdateVolume(double delta) {
}
private void buildAlertDialog(String title, String msg) {
new AlertDialog.Builder(HipstaActivity.this)
.setTitle(title)
.setMessage(msg)
.setCancelable(false)
.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
})
.create()
.show();
}
private class GridViewOnScrollListener implements OnScrollListener {
@Override
public void onScroll(AbsListView view, int firstVisibleItem,
int visibleItemCount, int totalItemCount) {
int lastInScreen = firstVisibleItem + visibleItemCount;
if (totalItemCount > 0 && (lastInScreen == totalItemCount) && !isLoadingMore && !(pageOffset*IMAGES_PER_FETCH > gridView.getCount()) && allowLoading) {
isLoadingMore = true;
new LoadImageListTask(HipstaActivity.this, ++pageOffset, IMAGES_PER_FETCH).execute(preferences.getString("tagsPref", DEFAULT_SEARCH_TAG));
}
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer;
import java.util.ArrayList;
import java.util.Stack;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
/**
* Expression processor factory for pruning. Each processor tries to
* convert the expression subtree into a pruning expression.
*
* It can be used for partition prunner and list bucketing pruner.
*/
public abstract class PrunerExpressionOperatorFactory {
/**
* If all children are candidates and refer only to one table alias then this
* expr is a candidate else it is not a candidate but its children could be
* final candidates.
*/
public static class GenericFuncExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
ExprNodeDesc newfd = null;
ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd;
boolean unknown = false;
if (FunctionRegistry.isOpAndOrNot(fd)) {
// do nothing because "And" and "Or" and "Not" supports null value
// evaluation
// NOTE: In the future all UDFs that treats null value as UNKNOWN (both
// in parameters and return
// values) should derive from a common base class UDFNullAsUnknown, so
// instead of listing the classes
// here we would test whether a class is derived from that base class.
// If All childs are null, set unknown to true
boolean isAllNull = true;
for (Object child : nodeOutputs) {
ExprNodeDesc child_nd = (ExprNodeDesc) child;
if (!(child_nd instanceof ExprNodeConstantDesc
&& ((ExprNodeConstantDesc) child_nd).getValue() == null)) {
isAllNull = false;
}
}
unknown = isAllNull;
} else if (!FunctionRegistry.isConsistentWithinQuery(fd.getGenericUDF())) {
// If it's a non-deterministic UDF, set unknown to true
unknown = true;
} else {
// If any child is null, set unknown to true
for (Object child : nodeOutputs) {
ExprNodeDesc child_nd = (ExprNodeDesc) child;
if (child_nd instanceof ExprNodeConstantDesc
&& ((ExprNodeConstantDesc) child_nd).getValue() == null) {
unknown = true;
}
}
}
if (unknown) {
newfd = new ExprNodeConstantDesc(fd.getTypeInfo(), null);
} else {
// Create the list of children
ArrayList<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
for (Object child : nodeOutputs) {
children.add((ExprNodeDesc) child);
}
// Create a copy of the function descriptor
newfd = new ExprNodeGenericFuncDesc(fd.getTypeInfo(), fd.getGenericUDF(), children);
}
return newfd;
}
}
/**
* FieldExprProcessor.
*
*/
public static class FieldExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
ExprNodeFieldDesc fnd = (ExprNodeFieldDesc) nd;
boolean unknown = false;
int idx = 0;
ExprNodeDesc left_nd = null;
for (Object child : nodeOutputs) {
ExprNodeDesc child_nd = (ExprNodeDesc) child;
if (child_nd instanceof ExprNodeConstantDesc
&& ((ExprNodeConstantDesc) child_nd).getValue() == null) {
unknown = true;
}
left_nd = child_nd;
}
assert (idx == 0);
ExprNodeDesc newnd = null;
if (unknown) {
newnd = new ExprNodeConstantDesc(fnd.getTypeInfo(), null);
} else {
newnd = new ExprNodeFieldDesc(fnd.getTypeInfo(), left_nd, fnd.getFieldName(),
fnd.getIsList());
}
return newnd;
}
}
/**
* Processor for column expressions.
*/
public static abstract class ColumnExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
ExprNodeDesc newcd = null;
ExprNodeColumnDesc cd = (ExprNodeColumnDesc) nd;
newcd = processColumnDesc(procCtx, cd);
return newcd;
}
/**
* Process column desc. It should be done by subclass.
*
* @param procCtx
* @param cd
* @return
*/
protected abstract ExprNodeDesc processColumnDesc(NodeProcessorCtx procCtx,
ExprNodeColumnDesc cd);
}
/**
* Processor for constants and null expressions. For such expressions the
* processor simply clones the exprNodeDesc and returns it.
*/
public static class DefaultExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
if (nd instanceof ExprNodeConstantDesc) {
return ((ExprNodeConstantDesc) nd).clone();
}
return new ExprNodeConstantDesc(((ExprNodeDesc)nd).getTypeInfo(), null);
}
}
/**
* Instantiate default expression processor.
* @return
*/
public static final NodeProcessor getDefaultExprProcessor() {
return new DefaultExprProcessor();
}
/**
* Instantiate generic function processor.
*
* @return
*/
public static final NodeProcessor getGenericFuncProcessor() {
return new GenericFuncExprProcessor();
}
/**
* Instantiate field processor.
*
* @return
*/
public static final NodeProcessor getFieldProcessor() {
return new FieldExprProcessor();
}
}
| |
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.LinkedList;
import java.util.Objects;
import org.apache.lucene.analysis.NumericTokenStream; // for javadocs
import org.apache.lucene.document.DoubleField; // for javadocs
import org.apache.lucene.document.FloatField; // for javadocs
import org.apache.lucene.document.IntField; // for javadocs
import org.apache.lucene.document.LongField; // for javadocs
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.index.Term; // for javadocs
/**
* <p>A {@link Query} that matches numeric values within a
* specified range. To use this, you must first index the
* numeric values using {@link IntField}, {@link
* FloatField}, {@link LongField} or {@link DoubleField} (expert: {@link
* NumericTokenStream}). If your terms are instead textual,
* you should use {@link TermRangeQuery}.</p>
*
* <p>You create a new NumericRangeQuery with the static
* factory methods, eg:
*
* <pre class="prettyprint">
* Query q = NumericRangeQuery.newFloatRange("weight", 0.03f, 0.10f, true, true);
* </pre>
*
* matches all documents whose float valued "weight" field
* ranges from 0.03 to 0.10, inclusive.
*
* <p>The performance of NumericRangeQuery is much better
* than the corresponding {@link TermRangeQuery} because the
* number of terms that must be searched is usually far
* fewer, thanks to trie indexing, described below.</p>
*
* <p>You can optionally specify a <a
* href="#precisionStepDesc"><code>precisionStep</code></a>
* when creating this query. This is necessary if you've
* changed this configuration from its default (4) during
* indexing. Lower values consume more disk space but speed
* up searching. Suitable values are between <b>1</b> and
* <b>8</b>. A good starting point to test is <b>4</b>,
* which is the default value for all <code>Numeric*</code>
* classes. See <a href="#precisionStepDesc">below</a> for
* details.
*
* <p>This query defaults to {@linkplain
* MultiTermQuery#CONSTANT_SCORE_REWRITE}.
* With precision steps of ≤4, this query can be run with
* one of the BooleanQuery rewrite methods without changing
* BooleanQuery's default max clause count.
*
* <br><h3>How it works</h3>
*
* <p>See the publication about <a target="_blank" href="http://www.panfmp.org">panFMP</a>,
* where this algorithm was described (referred to as <code>TrieRangeQuery</code>):
*
* <blockquote><strong>Schindler, U, Diepenbroek, M</strong>, 2008.
* <em>Generic XML-based Framework for Metadata Portals.</em>
* Computers & Geosciences 34 (12), 1947-1955.
* <a href="http://dx.doi.org/10.1016/j.cageo.2008.02.023"
* target="_blank">doi:10.1016/j.cageo.2008.02.023</a></blockquote>
*
* <p><em>A quote from this paper:</em> Because Apache Lucene is a full-text
* search engine and not a conventional database, it cannot handle numerical ranges
* (e.g., field value is inside user defined bounds, even dates are numerical values).
* We have developed an extension to Apache Lucene that stores
* the numerical values in a special string-encoded format with variable precision
* (all numerical values like doubles, longs, floats, and ints are converted to
* lexicographic sortable string representations and stored with different precisions
* (for a more detailed description of how the values are stored,
* see {@link NumericUtils}). A range is then divided recursively into multiple intervals for searching:
* The center of the range is searched only with the lowest possible precision in the <em>trie</em>,
* while the boundaries are matched more exactly. This reduces the number of terms dramatically.</p>
*
* <p>For the variant that stores long values in 8 different precisions (each reduced by 8 bits) that
* uses a lowest precision of 1 byte, the index contains only a maximum of 256 distinct values in the
* lowest precision. Overall, a range could consist of a theoretical maximum of
* <code>7*255*2 + 255 = 3825</code> distinct terms (when there is a term for every distinct value of an
* 8-byte-number in the index and the range covers almost all of them; a maximum of 255 distinct values is used
* because it would always be possible to reduce the full 256 values to one term with degraded precision).
* In practice, we have seen up to 300 terms in most cases (index with 500,000 metadata records
* and a uniform value distribution).</p>
*
* <h3><a name="precisionStepDesc">Precision Step</a></h3>
* <p>You can choose any <code>precisionStep</code> when encoding values.
* Lower step values mean more precisions and so more terms in index (and index gets larger). The number
* of indexed terms per value is (those are generated by {@link NumericTokenStream}):
* <p style="font-family:serif">
* indexedTermsPerValue = <b>ceil</b><big>(</big>bitsPerValue / precisionStep<big>)</big>
* </p>
* As the lower precision terms are shared by many values, the additional terms only
* slightly grow the term dictionary (approx. 7% for <code>precisionStep=4</code>), but have a larger
* impact on the postings (the postings file will have more entries, as every document is linked to
* <code>indexedTermsPerValue</code> terms instead of one). The formula to estimate the growth
* of the term dictionary in comparison to one term per value:
* <p>
* <!-- the formula in the alt attribute was transformed from latex to PNG with http://1.618034.com/latex.php (with 110 dpi): -->
* <img src="doc-files/nrq-formula-1.png" alt="\mathrm{termDictOverhead} = \sum\limits_{i=0}^{\mathrm{indexedTermsPerValue}-1} \frac{1}{2^{\mathrm{precisionStep}\cdot i}}">
* </p>
* <p>On the other hand, if the <code>precisionStep</code> is smaller, the maximum number of terms to match reduces,
* which optimizes query speed. The formula to calculate the maximum number of terms that will be visited while
* executing the query is:
* <p>
* <!-- the formula in the alt attribute was transformed from latex to PNG with http://1.618034.com/latex.php (with 110 dpi): -->
* <img src="doc-files/nrq-formula-2.png" alt="\mathrm{maxQueryTerms} = \left[ \left( \mathrm{indexedTermsPerValue} - 1 \right) \cdot \left(2^\mathrm{precisionStep} - 1 \right) \cdot 2 \right] + \left( 2^\mathrm{precisionStep} - 1 \right)">
* </p>
* <p>For longs stored using a precision step of 4, <code>maxQueryTerms = 15*15*2 + 15 = 465</code>, and for a precision
* step of 2, <code>maxQueryTerms = 31*3*2 + 3 = 189</code>. But the faster search speed is reduced by more seeking
* in the term enum of the index. Because of this, the ideal <code>precisionStep</code> value can only
* be found out by testing. <b>Important:</b> You can index with a lower precision step value and test search speed
* using a multiple of the original step value.</p>
*
* <p>Good values for <code>precisionStep</code> are depending on usage and data type:
* <ul>
* <li>The default for all data types is <b>4</b>, which is used, when no <code>precisionStep</code> is given.
* <li>Ideal value in most cases for <em>64 bit</em> data types <em>(long, double)</em> is <b>6</b> or <b>8</b>.
* <li>Ideal value in most cases for <em>32 bit</em> data types <em>(int, float)</em> is <b>4</b>.
* <li>For low cardinality fields larger precision steps are good. If the cardinality is < 100, it is
* fair to use {@link Integer#MAX_VALUE} (see below).
* <li>Steps <b>≥64</b> for <em>long/double</em> and <b>≥32</b> for <em>int/float</em> produces one token
* per value in the index and querying is as slow as a conventional {@link TermRangeQuery}. But it can be used
* to produce fields, that are solely used for sorting (in this case simply use {@link Integer#MAX_VALUE} as
* <code>precisionStep</code>). Using {@link IntField},
* {@link LongField}, {@link FloatField} or {@link DoubleField} for sorting
* is ideal, because building the field cache is much faster than with text-only numbers.
* These fields have one term per value and therefore also work with term enumeration for building distinct lists
* (e.g. facets / preselected values to search for).
* Sorting is also possible with range query optimized fields using one of the above <code>precisionSteps</code>.
* </ul>
*
* <p>Comparisons of the different types of RangeQueries on an index with about 500,000 docs showed
* that {@link TermRangeQuery} in boolean rewrite mode (with raised {@link BooleanQuery} clause count)
* took about 30-40 secs to complete, {@link TermRangeQuery} in constant score filter rewrite mode took 5 secs
* and executing this class took <100ms to complete (on an Opteron64 machine, Java 1.5, 8 bit
* precision step). This query type was developed for a geographic portal, where the performance for
* e.g. bounding boxes or exact date/time stamps is important.</p>
*
* @since 2.9
**/
public final class NumericRangeQuery<T extends Number> extends MultiTermQuery {
private NumericRangeQuery(final String field, final int precisionStep, final NumericType dataType,
T min, T max, final boolean minInclusive, final boolean maxInclusive
) {
super(field);
if (precisionStep < 1)
throw new IllegalArgumentException("precisionStep must be >=1");
this.precisionStep = precisionStep;
this.dataType = Objects.requireNonNull(dataType, "NumericType must not be null");
this.min = min;
this.max = max;
this.minInclusive = minInclusive;
this.maxInclusive = maxInclusive;
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>long</code>
* range using the given <a href="#precisionStepDesc"><code>precisionStep</code></a>.
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Long> newLongRange(final String field, final int precisionStep,
Long min, Long max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, precisionStep, NumericType.LONG, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>long</code>
* range using the default <code>precisionStep</code> {@link NumericUtils#PRECISION_STEP_DEFAULT} (16).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Long> newLongRange(final String field,
Long min, Long max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, NumericUtils.PRECISION_STEP_DEFAULT, NumericType.LONG, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>int</code>
* range using the given <a href="#precisionStepDesc"><code>precisionStep</code></a>.
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Integer> newIntRange(final String field, final int precisionStep,
Integer min, Integer max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, precisionStep, NumericType.INT, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>int</code>
* range using the default <code>precisionStep</code> {@link NumericUtils#PRECISION_STEP_DEFAULT_32} (8).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Integer> newIntRange(final String field,
Integer min, Integer max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, NumericUtils.PRECISION_STEP_DEFAULT_32, NumericType.INT, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>double</code>
* range using the given <a href="#precisionStepDesc"><code>precisionStep</code></a>.
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>.
* {@link Double#NaN} will never match a half-open range, to hit {@code NaN} use a query
* with {@code min == max == Double.NaN}. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Double> newDoubleRange(final String field, final int precisionStep,
Double min, Double max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, precisionStep, NumericType.DOUBLE, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>double</code>
* range using the default <code>precisionStep</code> {@link NumericUtils#PRECISION_STEP_DEFAULT} (16).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>.
* {@link Double#NaN} will never match a half-open range, to hit {@code NaN} use a query
* with {@code min == max == Double.NaN}. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Double> newDoubleRange(final String field,
Double min, Double max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, NumericUtils.PRECISION_STEP_DEFAULT, NumericType.DOUBLE, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>float</code>
* range using the given <a href="#precisionStepDesc"><code>precisionStep</code></a>.
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>.
* {@link Float#NaN} will never match a half-open range, to hit {@code NaN} use a query
* with {@code min == max == Float.NaN}. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Float> newFloatRange(final String field, final int precisionStep,
Float min, Float max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, precisionStep, NumericType.FLOAT, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a <code>NumericRangeQuery</code>, that queries a <code>float</code>
* range using the default <code>precisionStep</code> {@link NumericUtils#PRECISION_STEP_DEFAULT_32} (8).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to <code>null</code>.
* {@link Float#NaN} will never match a half-open range, to hit {@code NaN} use a query
* with {@code min == max == Float.NaN}. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
*/
public static NumericRangeQuery<Float> newFloatRange(final String field,
Float min, Float max, final boolean minInclusive, final boolean maxInclusive
) {
return new NumericRangeQuery<>(field, NumericUtils.PRECISION_STEP_DEFAULT_32, NumericType.FLOAT, min, max, minInclusive, maxInclusive);
}
@Override @SuppressWarnings("unchecked")
protected TermsEnum getTermsEnum(final Terms terms, AttributeSource atts) throws IOException {
// very strange: java.lang.Number itself is not Comparable, but all subclasses used here are
if (min != null && max != null && ((Comparable<T>) min).compareTo(max) > 0) {
return TermsEnum.EMPTY;
}
return new NumericRangeTermsEnum(terms.iterator());
}
/** Returns <code>true</code> if the lower endpoint is inclusive */
public boolean includesMin() { return minInclusive; }
/** Returns <code>true</code> if the upper endpoint is inclusive */
public boolean includesMax() { return maxInclusive; }
/** Returns the lower value of this range query */
public T getMin() { return min; }
/** Returns the upper value of this range query */
public T getMax() { return max; }
/** Returns the precision step. */
public int getPrecisionStep() { return precisionStep; }
@Override
public String toString(final String field) {
final StringBuilder sb = new StringBuilder();
if (!getField().equals(field)) sb.append(getField()).append(':');
return sb.append(minInclusive ? '[' : '{')
.append((min == null) ? "*" : min.toString())
.append(" TO ")
.append((max == null) ? "*" : max.toString())
.append(maxInclusive ? ']' : '}')
.append(ToStringUtils.boost(getBoost()))
.toString();
}
@Override
@SuppressWarnings({"unchecked","rawtypes"})
public final boolean equals(final Object o) {
if (o==this) return true;
if (!super.equals(o))
return false;
if (o instanceof NumericRangeQuery) {
final NumericRangeQuery q=(NumericRangeQuery)o;
return (
(q.min == null ? min == null : q.min.equals(min)) &&
(q.max == null ? max == null : q.max.equals(max)) &&
minInclusive == q.minInclusive &&
maxInclusive == q.maxInclusive &&
precisionStep == q.precisionStep
);
}
return false;
}
@Override
public final int hashCode() {
int hash = super.hashCode();
hash += precisionStep^0x64365465;
if (min != null) hash += min.hashCode()^0x14fa55fb;
if (max != null) hash += max.hashCode()^0x733fa5fe;
return hash +
(Boolean.valueOf(minInclusive).hashCode()^0x14fa55fb)+
(Boolean.valueOf(maxInclusive).hashCode()^0x733fa5fe);
}
// members (package private, to be also fast accessible by NumericRangeTermEnum)
final int precisionStep;
final NumericType dataType;
final T min, max;
final boolean minInclusive,maxInclusive;
// used to handle float/double infinity correcty
static final long LONG_NEGATIVE_INFINITY =
NumericUtils.doubleToSortableLong(Double.NEGATIVE_INFINITY);
static final long LONG_POSITIVE_INFINITY =
NumericUtils.doubleToSortableLong(Double.POSITIVE_INFINITY);
static final int INT_NEGATIVE_INFINITY =
NumericUtils.floatToSortableInt(Float.NEGATIVE_INFINITY);
static final int INT_POSITIVE_INFINITY =
NumericUtils.floatToSortableInt(Float.POSITIVE_INFINITY);
/**
* Subclass of FilteredTermsEnum for enumerating all terms that match the
* sub-ranges for trie range queries, using flex API.
* <p>
* WARNING: This term enumeration is not guaranteed to be always ordered by
* {@link Term#compareTo}.
* The ordering depends on how {@link NumericUtils#splitLongRange} and
* {@link NumericUtils#splitIntRange} generates the sub-ranges. For
* {@link MultiTermQuery} ordering is not relevant.
*/
private final class NumericRangeTermsEnum extends FilteredTermsEnum {
private BytesRef currentLowerBound, currentUpperBound;
private final LinkedList<BytesRef> rangeBounds = new LinkedList<>();
NumericRangeTermsEnum(final TermsEnum tenum) {
super(tenum);
switch (dataType) {
case LONG:
case DOUBLE: {
// lower
long minBound;
if (dataType == NumericType.LONG) {
minBound = (min == null) ? Long.MIN_VALUE : min.longValue();
} else {
assert dataType == NumericType.DOUBLE;
minBound = (min == null) ? LONG_NEGATIVE_INFINITY
: NumericUtils.doubleToSortableLong(min.doubleValue());
}
if (!minInclusive && min != null) {
if (minBound == Long.MAX_VALUE) break;
minBound++;
}
// upper
long maxBound;
if (dataType == NumericType.LONG) {
maxBound = (max == null) ? Long.MAX_VALUE : max.longValue();
} else {
assert dataType == NumericType.DOUBLE;
maxBound = (max == null) ? LONG_POSITIVE_INFINITY
: NumericUtils.doubleToSortableLong(max.doubleValue());
}
if (!maxInclusive && max != null) {
if (maxBound == Long.MIN_VALUE) break;
maxBound--;
}
NumericUtils.splitLongRange(new NumericUtils.LongRangeBuilder() {
@Override
public final void addRange(BytesRef minPrefixCoded, BytesRef maxPrefixCoded) {
rangeBounds.add(minPrefixCoded);
rangeBounds.add(maxPrefixCoded);
}
}, precisionStep, minBound, maxBound);
break;
}
case INT:
case FLOAT: {
// lower
int minBound;
if (dataType == NumericType.INT) {
minBound = (min == null) ? Integer.MIN_VALUE : min.intValue();
} else {
assert dataType == NumericType.FLOAT;
minBound = (min == null) ? INT_NEGATIVE_INFINITY
: NumericUtils.floatToSortableInt(min.floatValue());
}
if (!minInclusive && min != null) {
if (minBound == Integer.MAX_VALUE) break;
minBound++;
}
// upper
int maxBound;
if (dataType == NumericType.INT) {
maxBound = (max == null) ? Integer.MAX_VALUE : max.intValue();
} else {
assert dataType == NumericType.FLOAT;
maxBound = (max == null) ? INT_POSITIVE_INFINITY
: NumericUtils.floatToSortableInt(max.floatValue());
}
if (!maxInclusive && max != null) {
if (maxBound == Integer.MIN_VALUE) break;
maxBound--;
}
NumericUtils.splitIntRange(new NumericUtils.IntRangeBuilder() {
@Override
public final void addRange(BytesRef minPrefixCoded, BytesRef maxPrefixCoded) {
rangeBounds.add(minPrefixCoded);
rangeBounds.add(maxPrefixCoded);
}
}, precisionStep, minBound, maxBound);
break;
}
default:
// should never happen
throw new IllegalArgumentException("Invalid NumericType");
}
}
private void nextRange() {
assert rangeBounds.size() % 2 == 0;
currentLowerBound = rangeBounds.removeFirst();
assert currentUpperBound == null || currentUpperBound.compareTo(currentLowerBound) <= 0 :
"The current upper bound must be <= the new lower bound";
currentUpperBound = rangeBounds.removeFirst();
}
@Override
protected final BytesRef nextSeekTerm(BytesRef term) {
while (rangeBounds.size() >= 2) {
nextRange();
// if the new upper bound is before the term parameter, the sub-range is never a hit
if (term != null && term.compareTo(currentUpperBound) > 0)
continue;
// never seek backwards, so use current term if lower bound is smaller
return (term != null && term.compareTo(currentLowerBound) > 0) ?
term : currentLowerBound;
}
// no more sub-range enums available
assert rangeBounds.isEmpty();
currentLowerBound = currentUpperBound = null;
return null;
}
@Override
protected final AcceptStatus accept(BytesRef term) {
while (currentUpperBound == null || term.compareTo(currentUpperBound) > 0) {
if (rangeBounds.isEmpty())
return AcceptStatus.END;
// peek next sub-range, only seek if the current term is smaller than next lower bound
if (term.compareTo(rangeBounds.getFirst()) < 0)
return AcceptStatus.NO_AND_SEEK;
// step forward to next range without seeking, as next lower range bound is less or equal current term
nextRange();
}
return AcceptStatus.YES;
}
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.interactions;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeFalse;
import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE;
import static org.openqa.selenium.testing.Ignore.Driver.SAFARI;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.Platform;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.Color;
import org.openqa.selenium.support.Colors;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.JavascriptEnabled;
import org.openqa.selenium.testing.NotYetImplemented;
import org.openqa.selenium.testing.TestUtilities;
/**
* Tests interaction through the advanced gestures API of keyboard handling.
*/
@Ignore(value = {SAFARI, MARIONETTE},
reason = "Safari: not implemented (issue 4136)",
issues = {4136})
public class BasicKeyboardInterfaceTest extends JUnit4TestBase {
private Actions getBuilder(WebDriver driver) {
return new Actions(driver);
}
@JavascriptEnabled
@Test
public void testBasicKeyboardInput() {
driver.get(pages.javascriptPage);
WebElement keyReporter = driver.findElement(By.id("keyReporter"));
Action sendLowercase = getBuilder(driver).sendKeys(keyReporter, "abc def").build();
sendLowercase.perform();
assertThat(keyReporter.getAttribute("value"), is("abc def"));
}
@JavascriptEnabled
@Ignore({IE})
@Test
public void testSendingKeyDownOnly() {
driver.get(pages.javascriptPage);
WebElement keysEventInput = driver.findElement(By.id("theworks"));
Action pressShift = getBuilder(driver).keyDown(keysEventInput, Keys.SHIFT).build();
pressShift.perform();
WebElement keyLoggingElement = driver.findElement(By.id("result"));
String logText = keyLoggingElement.getText();
Action releaseShift = getBuilder(driver).keyUp(keysEventInput, Keys.SHIFT).build();
releaseShift.perform();
assertTrue("Key down event not isolated, got: " + logText,
logText.endsWith("keydown"));
}
@JavascriptEnabled
@Ignore({IE})
@Test
public void testSendingKeyUp() {
driver.get(pages.javascriptPage);
WebElement keysEventInput = driver.findElement(By.id("theworks"));
Action pressShift = getBuilder(driver).keyDown(keysEventInput, Keys.SHIFT).build();
pressShift.perform();
WebElement keyLoggingElement = driver.findElement(By.id("result"));
String eventsText = keyLoggingElement.getText();
assertTrue("Key down should be isolated for this test to be meaningful. " +
"Got events: " + eventsText, eventsText.endsWith("keydown"));
Action releaseShift = getBuilder(driver).keyUp(keysEventInput, Keys.SHIFT).build();
releaseShift.perform();
eventsText = keyLoggingElement.getText();
assertTrue("Key up event not isolated. Got events: " + eventsText,
eventsText.endsWith("keyup"));
}
@JavascriptEnabled
@Ignore({IE, HTMLUNIT})
@Test
public void testSendingKeysWithShiftPressed() {
driver.get(pages.javascriptPage);
WebElement keysEventInput = driver.findElement(By.id("theworks"));
keysEventInput.click();
String existingResult = getFormEvents();
Action pressShift = getBuilder(driver).keyDown(keysEventInput, Keys.SHIFT).build();
pressShift.perform();
Action sendLowercase = getBuilder(driver).sendKeys(keysEventInput, "ab").build();
sendLowercase.perform();
Action releaseShift = getBuilder(driver).keyUp(keysEventInput, Keys.SHIFT).build();
releaseShift.perform();
String expectedEvents = " keydown keydown keypress keyup keydown keypress keyup keyup";
assertThatFormEventsFiredAreExactly("Shift key not held",
existingResult + expectedEvents);
assertThat(keysEventInput.getAttribute("value"), is("AB"));
}
@JavascriptEnabled
@Test
public void testSendingKeysToActiveElement() {
driver.get(pages.bodyTypingPage);
Action someKeys = getBuilder(driver).sendKeys("ab").build();
someKeys.perform();
assertThatBodyEventsFiredAreExactly("keypress keypress");
assertThatFormEventsFiredAreExactly("");
}
@Test
public void testBasicKeyboardInputOnActiveElement() {
driver.get(pages.javascriptPage);
WebElement keyReporter = driver.findElement(By.id("keyReporter"));
keyReporter.click();
Action sendLowercase = getBuilder(driver).sendKeys("abc def").build();
sendLowercase.perform();
assertThat(keyReporter.getAttribute("value"), is("abc def"));
}
@Ignore(value = {IE, SAFARI}, reason = "untested")
@NotYetImplemented(HTMLUNIT)
@JavascriptEnabled
@Test
public void canGenerateKeyboardShortcuts() {
driver.get(appServer.whereIs("keyboard_shortcut.html"));
WebElement body = driver.findElement(By.xpath("//body"));
assertBackgroundColor(body, Colors.WHITE);
new Actions(driver).keyDown(Keys.SHIFT).sendKeys("1").keyUp(Keys.SHIFT).perform();
assertBackgroundColor(body, Colors.GREEN);
new Actions(driver).keyDown(Keys.ALT).sendKeys("1").keyUp(Keys.ALT).perform();
assertBackgroundColor(body, Colors.LIGHTBLUE);
new Actions(driver)
.keyDown(Keys.SHIFT).keyDown(Keys.ALT)
.sendKeys("1")
.keyUp(Keys.SHIFT).keyUp(Keys.ALT)
.perform();
assertBackgroundColor(body, Colors.SILVER);
}
@Test
@NotYetImplemented(HTMLUNIT)
public void testSelectionSelectBySymbol() {
driver.get(pages.javascriptPage);
WebElement keyReporter = driver.findElement(By.id("keyReporter"));
getBuilder(driver).click(keyReporter).sendKeys("abc def").perform();
assertThat(keyReporter.getAttribute("value"), is("abc def"));
getBuilder(driver).click(keyReporter)
.keyDown(Keys.SHIFT)
.sendKeys(Keys.LEFT)
.sendKeys(Keys.LEFT)
.keyUp(Keys.SHIFT)
.sendKeys(Keys.DELETE)
.perform();
assertThat(keyReporter.getAttribute("value"), is("abc d"));
}
@Test
@Ignore(IE)
@NotYetImplemented(HTMLUNIT)
public void testSelectionSelectByWord() {
assumeFalse(
"MacOS has alternative keyboard",
TestUtilities.getEffectivePlatform().is(Platform.MAC));
driver.get(pages.javascriptPage);
WebElement keyReporter = driver.findElement(By.id("keyReporter"));
getBuilder(driver).click(keyReporter).sendKeys("abc def").perform();
assertThat(keyReporter.getAttribute("value"), is("abc def"));
getBuilder(driver).click(keyReporter)
.keyDown(Keys.SHIFT)
.keyDown(Keys.CONTROL)
.sendKeys(Keys.LEFT)
.keyUp(Keys.CONTROL)
.keyUp(Keys.SHIFT)
.sendKeys(Keys.DELETE)
.perform();
assertThat(keyReporter.getAttribute("value"), is("abc "));
}
@Test
@Ignore(IE)
@NotYetImplemented(HTMLUNIT)
public void testSelectionSelectAll() {
assumeFalse(
"MacOS has alternative keyboard",
TestUtilities.getEffectivePlatform().is(Platform.MAC));
driver.get(pages.javascriptPage);
WebElement keyReporter = driver.findElement(By.id("keyReporter"));
getBuilder(driver).click(keyReporter).sendKeys("abc def").perform();
assertThat(keyReporter.getAttribute("value"), is("abc def"));
getBuilder(driver).click(keyReporter)
.keyDown(Keys.CONTROL)
.sendKeys("a")
.keyUp(Keys.CONTROL)
.sendKeys(Keys.DELETE)
.perform();
assertThat(keyReporter.getAttribute("value"), is(""));
}
private void assertBackgroundColor(WebElement el, Colors expected) {
Color actual = Color.fromString(el.getCssValue("background-color"));
assertThat(actual, is(expected.getColorValue()));
}
private void assertThatFormEventsFiredAreExactly(String message, String expected) {
assertThat(message, getFormEvents(), is(expected.trim()));
}
private String getFormEvents() {
return driver.findElement(By.id("result")).getText().trim();
}
private void assertThatFormEventsFiredAreExactly(String expected) {
assertThatFormEventsFiredAreExactly("", expected);
}
private void assertThatBodyEventsFiredAreExactly(String expected) {
assertThat(driver.findElement(By.id("body_result")).getText().trim(), is(expected.trim()));
}
}
| |
/*
* @(#)ConnectorHandle.java
*
* Copyright (c) 1996-2010 by the original authors of JHotDraw
* and all its contributors.
* All rights reserved.
*
* The copyright of this software is owned by the authors and
* contributors of the JHotDraw project ("the copyright holders").
* You may not use, copy or modify this software, except in
* accordance with the license agreement you entered into with
* the copyright holders. For details see accompanying license terms.
*/
package org.jhotdraw.draw.handle;
import org.jhotdraw.draw.handle.HandleAttributeKeys;
import org.jhotdraw.draw.handle.Handle;
import org.jhotdraw.draw.handle.AbstractHandle;
import org.jhotdraw.draw.*;
import org.jhotdraw.draw.connector.Connector;
import org.jhotdraw.draw.ConnectionFigure;
import java.util.*;
import javax.swing.undo.*;
import org.jhotdraw.util.*;
import java.awt.*;
import java.awt.geom.*;
/**
* A {@link Handle} associated to a {@link Connector} which allows to create a
* new {@link ConnectionFigure} by dragging the handle to another connector.
*
* @author Werner Randelshofer.
* @version $Id: ConnectorHandle.java -1 $
*/
public class ConnectorHandle extends AbstractHandle {
/**
* Holds the ConnectionFigure which is currently being created.
*/
private ConnectionFigure createdConnection;
/**
* The prototype for the ConnectionFigure to be created
*/
private ConnectionFigure prototype;
/**
* The Connector.
*/
private Connector connector;
/**
* The current connectable Figure.
*/
private Figure connectableFigure;
/**
* The current connectable Connector.
*/
private Connector connectableConnector;
/**
* All connectors of the connectable Figure.
*/
protected Collection<Connector> connectors = Collections.emptyList();
/** Creates a new instance. */
public ConnectorHandle(Connector connector, ConnectionFigure prototype) {
super(connector.getOwner());
this.connector = connector;
this.prototype = prototype;
}
public Point2D.Double getLocationOnDrawing() {
return connector.getAnchor();
}
public Point getLocation() {
return view.drawingToView(connector.getAnchor());
}
@Override
public void draw(Graphics2D g) {
Graphics2D gg = (Graphics2D) g.create();
gg.transform(view.getDrawingToViewTransform());
for (Connector c : connectors) {
c.draw(gg);
}
if (createdConnection == null) {
drawCircle(g,
getEditor().getHandleAttribute(HandleAttributeKeys.DISCONNECTED_CONNECTOR_HANDLE_FILL_COLOR),
getEditor().getHandleAttribute(HandleAttributeKeys.DISCONNECTED_CONNECTOR_HANDLE_STROKE_COLOR));
} else {
drawCircle(g,
getEditor().getHandleAttribute(HandleAttributeKeys.CONNECTED_CONNECTOR_HANDLE_FILL_COLOR),
getEditor().getHandleAttribute(HandleAttributeKeys.CONNECTED_CONNECTOR_HANDLE_STROKE_COLOR));
Point p = view.drawingToView(createdConnection.getEndPoint());
g.setColor((Color) getEditor().getHandleAttribute(HandleAttributeKeys.CONNECTED_CONNECTOR_HANDLE_FILL_COLOR));
int width = getHandlesize();
g.fillOval(p.x - width / 2, p.y - width / 2, width, width);
g.setColor((Color) getEditor().getHandleAttribute(HandleAttributeKeys.CONNECTED_CONNECTOR_HANDLE_STROKE_COLOR));
g.drawOval(p.x - width / 2, p.y - width / 2, width, width);
}
}
public void trackStart(Point anchor, int modifiersEx) {
setConnection(createConnection());
ResourceBundleUtil labels = ResourceBundleUtil.getBundle("org.jhotdraw.draw.Labels");
Point2D.Double p = getLocationOnDrawing();
getConnection().setStartPoint(p);
getConnection().setEndPoint(p);
view.getDrawing().add(getConnection());
}
public void trackStep(Point anchor, Point lead, int modifiersEx) {
//updateConnectors(lead);
Point2D.Double p = view.viewToDrawing(lead);
fireAreaInvalidated(getDrawingArea());
Figure figure = findConnectableFigure(p, view.getDrawing());
if (figure != connectableFigure) {
connectableFigure = figure;
repaintConnectors();
}
connectableConnector = findConnectableConnector(figure, p);
if (connectableConnector != null) {
p = connectableConnector.getAnchor();
}
getConnection().willChange();
getConnection().setEndPoint(p);
getConnection().changed();
fireAreaInvalidated(getDrawingArea());
}
@Override
public Rectangle getDrawingArea() {
if (getConnection() != null) {
Rectangle r = new Rectangle(
view.drawingToView(getConnection().getEndPoint()));
r.grow(getHandlesize(), getHandlesize());
return r;
} else {
return new Rectangle(); // empty rectangle
}
}
public void trackEnd(Point anchor, Point lead, int modifiersEx) {
Point2D.Double p = view.viewToDrawing(lead);
view.getConstrainer().constrainPoint(p);
Figure f = findConnectableFigure(p, view.getDrawing());
connectableConnector = findConnectableConnector(f, p);
if (connectableConnector != null) {
final Drawing drawing = view.getDrawing();
final ConnectionFigure c = getConnection();
getConnection().setStartConnector(connector);
getConnection().setEndConnector(connectableConnector);
getConnection().updateConnection();
view.clearSelection();
view.addToSelection(c);
view.getDrawing().fireUndoableEditHappened(new AbstractUndoableEdit() {
@Override
public String getPresentationName() {
ResourceBundleUtil labels = ResourceBundleUtil.getBundle("org.jhotdraw.draw.Labels");
return labels.getString("edit.createConnectionFigure.text");
}
@Override
public void undo() throws CannotUndoException {
super.undo();
drawing.remove(c);
}
@Override
public void redo() throws CannotRedoException {
super.redo();
drawing.add(c);
view.clearSelection();
view.addToSelection(c);
}
});
} else {
view.getDrawing().remove(getConnection());
fireAreaInvalidated(getDrawingArea());
}
connectableConnector = null;
connectors = Collections.emptyList();
setConnection(null);
setTargetFigure(null);
}
/**
* Creates the ConnectionFigure. By default the figure prototype is
* cloned.
*/
protected ConnectionFigure createConnection() {
return (ConnectionFigure) prototype.clone();
}
protected void setConnection(ConnectionFigure newConnection) {
createdConnection = newConnection;
}
protected ConnectionFigure getConnection() {
return createdConnection;
}
protected Figure getTargetFigure() {
return connectableFigure;
}
protected void setTargetFigure(Figure newTargetFigure) {
connectableFigure = newTargetFigure;
}
private Figure findConnectableFigure(Point2D.Double p, Drawing drawing) {
for (Figure figure : drawing.getFiguresFrontToBack()) {
if (!figure.includes(getConnection()) &&
figure.isConnectable() &&
figure.contains(p)) {
return figure;
}
}
return null;
}
/**
* Finds a connection end figure.
*/
protected Connector findConnectableConnector(Figure connectableFigure, Point2D.Double p) {
Connector target = (connectableFigure == null) ? null : connectableFigure.findConnector(p, getConnection());
if ((connectableFigure != null) && connectableFigure.isConnectable() && !connectableFigure.includes(getOwner()) && getConnection().canConnect(connector, target)) {
return target;
}
return null;
}
protected Rectangle basicGetBounds() {
Rectangle r = new Rectangle(getLocation());
int h = getHandlesize();
r.x -= h / 2;
r.y -= h / 2;
r.width = r.height = h;
return r;
}
@Override
public boolean isCombinableWith(Handle handle) {
return false;
}
/**
* Updates the list of connectors that we draw when the user
* moves or drags the mouse over a figure to which can connect.
*/
public void repaintConnectors() {
Rectangle2D.Double invalidArea = null;
for (Connector c : connectors) {
if (invalidArea == null) {
invalidArea = c.getDrawingArea();
} else {
invalidArea.add(c.getDrawingArea());
}
}
connectors = (connectableFigure == null) ? new java.util.LinkedList<Connector>() : connectableFigure.getConnectors(prototype);
for (Connector c : connectors) {
if (invalidArea == null) {
invalidArea = c.getDrawingArea();
} else {
invalidArea.add(c.getDrawingArea());
}
}
if (invalidArea != null) {
view.getComponent().repaint(
view.drawingToView(invalidArea));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util.logging;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import org.apache.harmony.logging.internal.nls.Messages;
import org.apache.harmony.kernel.vm.VM;
/**
* <code>Level</code> objects are used to indicate the level of logging. There
* are a set of predefined logging levels, each associated with an integer
* value. Enabling a certain logging level also enables all logging levels with
* larger values.
* <p>
* The predefined levels in ascending order are FINEST, FINER, FINE, CONFIG,
* INFO, WARNING, SEVERE. There are two additional predefined levels, which are
* ALL and OFF. ALL indicates logging all messages, and OFF indicates logging no
* messages.
* </p>
*
*/
public class Level implements Serializable {
private static final long serialVersionUID = -8176160795706313070L;
private static final List<Level> levels = new ArrayList<Level>(9);
/**
* The OFF level provides no logging messages.
*/
public static final Level OFF = new Level("OFF", Integer.MAX_VALUE); //$NON-NLS-1$
/**
* The SEVERE level indicates a severe failure.
*/
public static final Level SEVERE = new Level("SEVERE", 1000); //$NON-NLS-1$
/**
* The WARNING level indicates a warning.
*/
public static final Level WARNING = new Level("WARNING", 900); //$NON-NLS-1$
/**
* The INFO level indicates an informative message.
*/
public static final Level INFO = new Level("INFO", 800); //$NON-NLS-1$
/**
* The CONFIG level indicates a static configuration message.
*/
public static final Level CONFIG = new Level("CONFIG", 700); //$NON-NLS-1$
/**
* The FINE level provides tracing messages.
*/
public static final Level FINE = new Level("FINE", 500); //$NON-NLS-1$
/**
* The FINER level provides more detailed tracing messages.
*/
public static final Level FINER = new Level("FINER", 400); //$NON-NLS-1$
/**
* The FINEST level provides highly detailed tracing messages.
*/
public static final Level FINEST = new Level("FINEST", 300); //$NON-NLS-1$
/**
* The ALL level provides all logging messages.
*/
public static final Level ALL = new Level("ALL", Integer.MIN_VALUE); //$NON-NLS-1$
/**
* Parses a level name into a <code>Level</code> object.
*
* @param name
* the name of the desired level, which cannot be null
* @return a <code>Level</code> object with the specified name
* @throws NullPointerException
* if <code>name</code> is <code>null</code>.
* @throws IllegalArgumentException
* if <code>name</code> is not valid.
*/
public static final Level parse(String name) {
if (name == null) {
// logging.1C=The 'name' parameter is null.
throw new NullPointerException(Messages.getString("logging.1C")); //$NON-NLS-1$
}
boolean isNameAnInt;
int nameAsInt;
try {
nameAsInt = Integer.parseInt(name);
isNameAnInt = true;
} catch (NumberFormatException e) {
nameAsInt = 0;
isNameAnInt = false;
}
synchronized (levels) {
for (Level level : levels) {
if (name.equals(level.getName())) {
return level;
}
}
if (isNameAnInt) {
/*
* Loop through levels a second time, so that the
* returned instance will be passed on the order of construction.
*/
for (Level level : levels) {
if (nameAsInt == level.intValue()) {
return level;
}
}
}
}
if (!isNameAnInt) {
// logging.1D=Cannot parse this name: {0}
throw new IllegalArgumentException(Messages.getString("logging.1D", name)); //$NON-NLS-1$
}
return new Level(name, nameAsInt);
}
/**
* The name of this Level.
*
* @serial
*/
private final String name;
/**
* The integer value indicating the level.
*
* @serial
*/
private final int value;
/**
* The name of the resource bundle used to localize the level name.
*
* @serial
*/
private final String resourceBundleName;
/**
* The resource bundle associated with this level, used to localize the
* level name.
*/
private transient ResourceBundle rb;
/**
* Constructs an instance of <code>Level</code> taking the supplied name
* and level value.
*
* @param name name of the level
* @param level an integer value indicating the level
* @throws NullPointerException if <code>name</code> is <code>null</code>.
*/
protected Level(String name, int level) {
this(name, level, null);
}
/**
* Constructs an instance of <code>Level</code> taking the supplied name
* and level value.
*
* @param name name of the level
* @param level an integer value indicating the level
* @param resourceBundleName the name of the resource bundle to use
* @throws NullPointerException if <code>name</code> is <code>null</code>.
*/
protected Level(String name, int level, String resourceBundleName) {
if (name == null) {
// logging.1C=The 'name' parameter is null.
throw new NullPointerException(Messages.getString("logging.1C")); //$NON-NLS-1$
}
this.name = name;
this.value = level;
this.resourceBundleName = resourceBundleName;
if (resourceBundleName != null) {
try {
rb = ResourceBundle.getBundle(resourceBundleName,
Locale.getDefault(), VM.callerClassLoader());
} catch (MissingResourceException e) {
rb = null;
}
}
synchronized (levels) {
levels.add(this);
}
}
/**
* Gets the name of this <code>Level</code>.
*
* @return the name of this <code>Level</code>
*/
public String getName() {
return this.name;
}
/**
* Gets the name of the resource bundle associated with this
* <code>Level</code>.
*
* @return the name of the resource bundle associated with this
* <code>Level</code>
*/
public String getResourceBundleName() {
return this.resourceBundleName;
}
/**
* Gets the integer value indicating this <code>Level</code>.
*
* @return the integer value indicating this <code>Level</code>
*/
public final int intValue() {
return this.value;
}
/**
* <p>
* Serialization helper method to maintain singletons and add any new
* levels.
* </p>
*
* @return The resolved instance.
*/
private Object readResolve() {
synchronized (levels) {
for (Level level : levels) {
if (value != level.value) {
continue;
}
if (!name.equals(name)) {
continue;
}
if (resourceBundleName == level.resourceBundleName) {
return level;
} else if (resourceBundleName != null
&& resourceBundleName.equals(level.resourceBundleName)) {
return level;
}
}
// This is a new value, so add it.
levels.add(this);
return this;
}
}
/**
* <p>Serialization helper to setup transient resource bundle instance.</p>
* @param in The input stream to read the instance data from.
* @throws IOException if an IO error occurs.
* @throws ClassNotFoundException if a class is not found.
*/
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
if (resourceBundleName != null) {
try {
rb = ResourceBundle.getBundle(resourceBundleName);
} catch (MissingResourceException e) {
rb = null;
}
}
}
/**
* Gets the localized name of this level. The default locale is used. If no
* resource bundle is associated with this <code>Level</code>, the
* original level name is returned.
*
* @return the localized name of this level
*/
public String getLocalizedName() {
if (rb == null) {
return name;
}
try {
return rb.getString(name);
} catch (MissingResourceException e) {
return name;
}
}
/**
* Compares two <code>Level</code> objects for equality. They are
* considered to be equal if they have the same value.
*
* @param o the other object to be compared with
* @return <code>true</code> if this object equals to the supplied object,
* otherwise <code>false</code>
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Level)) {
return false;
}
return ((Level) o).intValue() == this.value;
}
/**
* Returns the hash code of this <code>Level</code> object.
*
* @return the hash code of this <code>Level</code> object
*/
@Override
public int hashCode() {
return this.value;
}
/**
* Returns the string representation of this <code>Level</code> object.
* Usually this will include its name.
*
* @return the string representation of this <code>Level</code> object
*/
@Override
public final String toString() {
return this.name;
}
}
| |
/**
* Copyright (c) 2003-2016 The Apereo Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/ecl2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.content.util;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import javax.activation.MimetypesFileTypeMap;
import javax.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.content.api.ContentCollection;
import org.sakaiproject.content.api.ContentCollectionEdit;
import org.sakaiproject.content.cover.ContentHostingService;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.content.api.ContentResourceEdit;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourcePropertiesEdit;
import org.sakaiproject.event.api.NotificationService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.IdUsedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.exception.ServerOverloadException;
import org.sakaiproject.exception.TypeException;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.util.Resource;
import org.sakaiproject.util.ResourceLoader;
@SuppressWarnings({ "deprecation", "restriction" })
@Slf4j
public class ZipContentUtil {
private static final String ZIP_EXTENSION = ".zip";
private static final int BUFFER_SIZE = 32000;
private static final MimetypesFileTypeMap mime = new MimetypesFileTypeMap();
public static final String PREFIX = "resources.";
public static final String REQUEST = "request.";
private static final String STATE_HOME_COLLECTION_ID = PREFIX + REQUEST + "collection_home";
private static final String STATE_HOME_COLLECTION_DISPLAY_NAME = PREFIX + REQUEST + "collection_home_display_name";
public static final String STATE_MESSAGE = "message";
/**
* Maximum number of files to extract from a zip archive (1000)
*/
public static final int MAX_ZIP_EXTRACT_FILES_DEFAULT = 1000;
private static Integer MAX_ZIP_EXTRACT_FILES;
private static final String DEFAULT_RESOURCECLASS = "org.sakaiproject.localization.util.ContentProperties";
private static final String DEFAULT_RESOURCEBUNDLE = "org.sakaiproject.localization.bundle.content.content";
private static final String RESOURCECLASS = "resource.class.content";
private static final String RESOURCEBUNDLE = "resource.bundle.content";
private static ResourceLoader rb = Resource.getResourceLoader(ServerConfigurationService.getString(RESOURCECLASS, DEFAULT_RESOURCECLASS), ServerConfigurationService.getString(RESOURCEBUNDLE, DEFAULT_RESOURCEBUNDLE));
public static int getMaxZipExtractFiles() {
if(MAX_ZIP_EXTRACT_FILES == null){
MAX_ZIP_EXTRACT_FILES = ServerConfigurationService.getInt(org.sakaiproject.content.api.ContentHostingService.RESOURCES_ZIP_EXPAND_MAX,MAX_ZIP_EXTRACT_FILES_DEFAULT);
}
if (MAX_ZIP_EXTRACT_FILES <= 0) {
MAX_ZIP_EXTRACT_FILES = MAX_ZIP_EXTRACT_FILES_DEFAULT; // any less than this is useless so probably a mistake
log.warn("content.zip.expand.maxfiles is set to a value less than or equal to 0, defaulting to "+MAX_ZIP_EXTRACT_FILES_DEFAULT);
}
return MAX_ZIP_EXTRACT_FILES;
}
public void compressSelectedResources(String siteId, String siteTitle, List<String> selectedFolderIds, List<String> selectedFiles, HttpServletResponse response) {
Map<String, ContentResource> resourcesToZip = new HashMap<>();
try {
// Add any files in the selected folders to the files to be in the zip.
if (selectedFolderIds.size() > 0) {
for (String selectedFolder : selectedFolderIds) {
List<ContentResource> folderContents = ContentHostingService.getAllResources(selectedFolder);
for (ContentResource folderFile : folderContents) {
resourcesToZip.put(folderFile.getId(), folderFile);
}
}
}
// Add any selected files to the list of resources to be in the zip.
for (String selectedFile : selectedFiles) {
ContentResource contentFile = ContentHostingService.getResource(selectedFile);
resourcesToZip.put(contentFile.getId(), contentFile);
}
} catch (IdUnusedException | PermissionException | TypeException e) {
// shouldn't happen by this stage.
log.error(e.getMessage(), e);
}
try (OutputStream zipOut = response.getOutputStream(); ZipOutputStream out = new ZipOutputStream(zipOut)) {
// If in dropbox need to add the word Dropbox to the end of the zip filename - use the first entry in the resourcesToZip map to find if we are in the dropthe user ID.
if (!resourcesToZip.isEmpty()) {
String firstContentResourceId = resourcesToZip.entrySet().iterator().next().getKey();
if (ContentHostingService.isInDropbox(firstContentResourceId) && ServerConfigurationService.getBoolean("dropbox.zip.haveDisplayname", true)) {
response.setHeader("Content-disposition", "inline; filename=" + siteId + "DropBox.zip");
} else {
response.setHeader("Content-disposition", "inline; filename=" + siteTitle + ".zip");
}
} else {
// Return an empty zip.
response.setHeader("Content-disposition", "inline; filename=" + siteTitle + ".zip");
}
response.setContentType("application/zip");
for (ContentResource contentResource : resourcesToZip.values()) {
// Find the file path.
int siteIdPosition = contentResource.getId().indexOf(siteId);
String rootId = contentResource.getId().substring(0, siteIdPosition) + siteId + "/";
storeContentResource(rootId, contentResource, out);
}
} catch (IOException ioe) {
log.error(ioe.getMessage(), ioe);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
/**
* Compresses a ContentCollection to a new zip archive with the same folder name
*
* @param reference sakai entity reference
* @throws Exception on failure
*/
public void compressFolder(Reference reference) {
File temp = null;
FileInputStream fis = null;
ToolSession toolSession = SessionManager.getCurrentToolSession();
try {
// Create the compressed archive in the filesystem
ZipOutputStream out = null;
try {
temp = File.createTempFile("sakai_content-", ".tmp");
ContentCollection collection = ContentHostingService.getCollection(reference.getId());
out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(temp),BUFFER_SIZE),java.nio.charset.StandardCharsets.UTF_8);
storeContentCollection(reference.getId(),collection,out);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
}
// Store the compressed archive in the repository
String resourceId = reference.getId().substring(0,reference.getId().lastIndexOf(Entity.SEPARATOR));
String resourceName = extractName(resourceId);
String homeCollectionId = (String) toolSession.getAttribute(STATE_HOME_COLLECTION_ID);
if(homeCollectionId != null && homeCollectionId.equals(reference.getId())){
//place the zip file into the home folder of the resource tool
resourceId = reference.getId() + resourceName;
String homeName = (String) toolSession.getAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME);
if(homeName != null){
resourceName = homeName;
}
}
int count = 0;
ContentResourceEdit resourceEdit = null;
String displayName="";
while(true){
try{
String newResourceId = resourceId;
String newResourceName = resourceName;
displayName=newResourceName;
count++;
if(count > 1){
//previous naming convention failed, try another one
newResourceId += "_" + count;
newResourceName += "_" + count;
}
newResourceId += ZIP_EXTENSION;
newResourceName += ZIP_EXTENSION;
ContentCollectionEdit currentEdit;
if(reference.getId().split(Entity.SEPARATOR).length>3) {
currentEdit = (ContentCollectionEdit) ContentHostingService.getCollection(resourceId + Entity.SEPARATOR);
displayName = currentEdit.getProperties().getProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME);
if (displayName != null && displayName.length() > 0) {
displayName += ZIP_EXTENSION;
}
else {
displayName = newResourceName;
}
}
resourceEdit = ContentHostingService.addResource(newResourceId);
//success, so keep track of name/id
resourceId = newResourceId;
resourceName = newResourceName;
break;
}catch(IdUsedException e){
//do nothing, just let it loop again
}catch(Exception e){
throw new Exception(e);
}
}
fis = new FileInputStream(temp);
resourceEdit.setContent(fis);
resourceEdit.setContentType(mime.getContentType(resourceId));
ResourcePropertiesEdit props = resourceEdit.getPropertiesEdit();
props.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, displayName);
ContentHostingService.commitResource(resourceEdit, NotificationService.NOTI_NONE);
}
catch (PermissionException pE){
addAlert(toolSession, rb.getString("permission_error_zip"));
log.warn(pE.getMessage(), pE);
}
catch (Exception e) {
addAlert(toolSession, rb.getString("generic_error_zip"));
log.error(e.getMessage(), e);
}
finally {
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
}
}
if (temp != null && temp.exists()) {
if (!temp.delete()) {
log.warn("failed to remove temp file");
}
}
}
}
private void addAlert(ToolSession toolSession, String alert){
String errorMessage = (String) toolSession.getAttribute(STATE_MESSAGE);
if(errorMessage == null){
errorMessage = alert;
}else{
errorMessage += "\n\n" + alert;
}
toolSession.setAttribute(STATE_MESSAGE, errorMessage);
}
/**
* Extracts a compressed (zip) ContentResource to a new folder with the same name.
*
* @param reference the sakai entity reference
* @throws Exception on failure
* @deprecated 11 Oct 2011 -AZ, use {@link #extractArchive(String)} instead
*/
public void extractArchive(Reference reference) throws Exception {
if (reference == null) {
throw new IllegalArgumentException("reference cannot be null");
}
extractArchive(reference.getId());
}
/**
* Extracts a compressed (zip) ContentResource to a new folder with the same name.
*
* @param referenceId the sakai entity reference id
* @throws Exception on failure
*/
public void extractArchive(String referenceId) throws Exception {
ContentResource resource = ContentHostingService.getResource(referenceId);
String rootCollectionId = extractZipCollectionPrefix(resource);
// Prepare Collection
ContentCollectionEdit rootCollection = ContentHostingService.addCollection(rootCollectionId);
ResourcePropertiesEdit prop = rootCollection.getPropertiesEdit();
prop.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, extractZipCollectionName(resource));
ContentHostingService.commitCollection(rootCollection);
// Extract Zip File
File temp = null;
try {
temp = exportResourceToFile(resource);
boolean extracted = false;
for (String charsetName: getZipCharsets()) {
Charset charset;
try {
charset = Charset.forName(charsetName);
} catch (IllegalCharsetNameException | UnsupportedCharsetException e) {
log.warn(String.format("%s is not a legal charset.", charsetName));
continue;
}
ZipFile zipFile = null;
try {
zipFile = new ZipFile(temp, charset);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry nextElement = entries.nextElement();
if (!nextElement.getName().contains("__MACOSX")){
if (nextElement.isDirectory()) {
createContentCollection(rootCollectionId, nextElement);
}
else {
if(!nextElement.getName().contains(".DS_Store")){
createContentResource(rootCollectionId, nextElement, zipFile);
}
}
}
}
extracted = true;
break;
} catch (Exception e) {
log.error(e.getMessage(), e);
log.warn(String.format("Cannot extract archive %s with charset %s.", referenceId, charset));
} finally {
if (zipFile != null){
zipFile.close();
}
}
}
if (!extracted) {
log.warn(String.format("Cannot extract archives %s with any charset %s.", referenceId, getZipCharsets()));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
temp.delete();
}
}
/**
* Get a list of the files in a zip and their size
* @param reference the sakai entity reference
* @return a map of file names to file sizes in the zip archive
* @deprecated 11 Oct 2011 -AZ, use {@link #getZipManifest(String)}
*/
public Map<String, Long> getZipManifest(Reference reference) {
if (reference == null) {
throw new IllegalArgumentException("reference cannot be null");
}
return getZipManifest(reference.getId());
}
/**
* Get a list of the files in a zip and their size
* @param referenceId the sakai entity reference id
* @return a map of file names to file sizes in the zip archive
*/
public Map<String, Long> getZipManifest(String referenceId) {
Map<String, Long> ret = new HashMap<String, Long>();
ContentResource resource;
try {
resource = ContentHostingService.getResource(referenceId);
} catch (PermissionException e1) {
return null;
} catch (IdUnusedException e1) {
return null;
} catch (TypeException e1) {
return null;
}
// Extract Zip File
File temp = null;
try {
temp = exportResourceToFile(resource);
boolean extracted = false;
for (String charsetName: getZipCharsets()) {
Charset charset;
try {
charset = Charset.forName(charsetName);
} catch (IllegalCharsetNameException | UnsupportedCharsetException e) {
log.warn(String.format("%s is not a legal charset.", charsetName));
continue;
}
ZipFile zipFile = null;
try {
zipFile = new ZipFile(temp, charset);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
int i = 0;
//use <= getMAX_ZIP_EXTRACT_SIZE() so the returned value will be
//larger than the max and then rejected
while (entries.hasMoreElements() && i <= getMaxZipExtractFiles()) {
ZipEntry nextElement = entries.nextElement();
ret.put(nextElement.getName(), nextElement.getSize());
i++;
}
extracted = true;
break;
} catch (Exception e) {
log.warn(String.format("Cannot get menifest of %s with charset %s.", referenceId, charset));
} finally {
if (zipFile != null){
zipFile.close();
}
}
}
if (!extracted) {
log.warn(String.format("Cannot get menifest of %s with any charset %s.", referenceId, getZipCharsets()));
}
}
catch (Exception e) {
log.error(e.getMessage(), e);
}
finally {
if (temp.exists()) {
if (!temp.delete()) {
log.warn("uanble to delete temp file!");
}
}
}
return ret;
}
/**
* Creates a new ContentResource extracted from ZipFile
*
* @param rootCollectionId
* @param nextElement
* @param zipFile
* @throws Exception
*/
private void createContentResource(String rootCollectionId,
ZipEntry nextElement, ZipFile zipFile) throws Exception {
String resourceId = rootCollectionId + nextElement.getName();
String resourceName = extractName(nextElement.getName());
ContentResourceEdit resourceEdit;
try {
resourceEdit = ContentHostingService.addResource(resourceId);
} catch (IdUsedException iue) {
// resource exists, update instead
log.debug("Content resource with ID " + resourceId + " exists. Editing instead.");
resourceEdit = ContentHostingService.editResource(resourceId);
}
resourceEdit.setContent(zipFile.getInputStream(nextElement));
resourceEdit.setContentType(mime.getContentType(resourceName));
ResourcePropertiesEdit props = resourceEdit.getPropertiesEdit();
props.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, resourceName);
ContentHostingService.commitResource(resourceEdit, NotificationService.NOTI_NONE);
}
/**
* Creates a new ContentCollection in the rootCollectionId with the element.getName()
*
* @param rootCollectionId
* @param element
* @throws Exception
*/
private void createContentCollection(String rootCollectionId,
ZipEntry element) throws Exception {
String resourceId = rootCollectionId + element.getName();
String resourceName = extractName(element.getName());
ContentCollectionEdit collection;
try {
collection = ContentHostingService.addCollection(resourceId);
} catch (IdUsedException iue) {
// collection exists, update instead
log.debug("Content collection with ID " + resourceId + " exists. Editing instead.");
collection = ContentHostingService.editCollection(resourceId);
}
ResourcePropertiesEdit props = collection.getPropertiesEdit();
props.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, resourceName);
ContentHostingService.commitCollection(collection);
}
/**
* Exports a the ContentResource zip file to the operating system
*
* @param resource
* @return
*/
private File exportResourceToFile(ContentResource resource) {
File temp = null;
FileOutputStream out = null;
try {
temp = File.createTempFile("sakai_content-", ".tmp");
temp.deleteOnExit();
// Write content to file
out = new FileOutputStream(temp);
IOUtils.copy(resource.streamContent(),out);
out.flush();
} catch (IOException e) {
log.error(e.getMessage(), e);
} catch (ServerOverloadException e) {
log.error(e.getMessage(), e);
}
finally {
if (out !=null) {
try {
out.close();
} catch (IOException e) {
}
}
}
return temp;
}
/**
* Iterates the collection.getMembers() and streams content resources recursively to the ZipOutputStream
*
* @param rootId
* @param collection
* @param out
* @throws Exception
*/
private void storeContentCollection(String rootId, ContentCollection collection, ZipOutputStream out) throws Exception {
List<String> members = collection.getMembers();
if (members.isEmpty()) storeEmptyFolder(rootId,collection,out);
else {
for (String memberId: members) {
if (memberId.endsWith(Entity.SEPARATOR)) {
ContentCollection memberCollection = ContentHostingService.getCollection(memberId);
storeContentCollection(rootId,memberCollection,out);
}
else {
ContentResource resource = ContentHostingService.getResource(memberId);
storeContentResource(rootId, resource, out);
}
}
}
}
/**
* Add an empty folder to the zip
*
* @param rootId
* @param resource
* @param out
* @throws Exception
*/
private void storeEmptyFolder(String rootId, ContentCollection resource, ZipOutputStream out) throws Exception {
String folderName = resource.getId().substring(rootId.length(),resource.getId().length());
if(ContentHostingService.isInDropbox(rootId) && ServerConfigurationService.getBoolean("dropbox.zip.haveDisplayname", true)) {
try {
folderName = getContainingFolderDisplayName(rootId, folderName);
} catch (Exception e) {
log.warn("Unexpected error when trying to create empty folder for Zip archive {} : {}", extractName(rootId), e.getMessage());
return;
}
}
ZipEntry zipEntry = new ZipEntry(folderName);
out.putNextEntry(zipEntry);
out.closeEntry();
}
/**
* Streams content resource to the ZipOutputStream
*
* @param rootId
* @param resource
* @param out
* @throws Exception
*/
private void storeContentResource(String rootId, ContentResource resource, ZipOutputStream out) throws Exception {
String filename = resource.getId().substring(rootId.length(),resource.getId().length());
//Inorder to have username as the folder name rather than having eids
if(ContentHostingService.isInDropbox(rootId) && ServerConfigurationService.getBoolean("dropbox.zip.haveDisplayname", true)) {
try {
filename = getContainingFolderDisplayName(rootId, filename);
} catch(TypeException e){
log.warn("Unexpected error occurred when trying to create Zip archive:" + extractName(rootId), e.getCause());
return;
} catch(IdUnusedException e ){
log.warn("Unexpected error occurred when trying to create Zip archive:" + extractName(rootId), e.getCause());
return;
} catch(PermissionException e){
log.warn("Unexpected error occurred when trying to create Zip archive:" + extractName(rootId), e.getCause());
return;
} catch (Exception e) {
log.warn("Unexpected error occurred when trying to create Zip archive:" + extractName(rootId), e.getCause());
return;
}
}
ZipEntry zipEntry = new ZipEntry(filename);
zipEntry.setSize(resource.getContentLength());
out.putNextEntry(zipEntry);
InputStream contentStream = null;
try {
contentStream = resource.streamContent();
IOUtils.copy(contentStream, out);
} finally {
if (contentStream != null) {
contentStream.close();
}
}
}
private String extractZipCollectionPrefix(ContentResource resource) {
String idPrefix = resource.getContainingCollection().getId() +
extractZipCollectionName(resource) +
Entity.SEPARATOR;
return idPrefix;
}
private String extractName(String collectionName) {
String[] tmp = collectionName.split(Entity.SEPARATOR);
return tmp[tmp.length-1];
}
private String extractZipCollectionName(ContentResource resource) {
String tmp = extractName(resource.getId());
return tmp.substring(0, tmp.lastIndexOf("."));
}
private List<String> getZipCharsets() {
String[] charsetConfig = ServerConfigurationService.getStrings("content.zip.expand.charsets");
if (charsetConfig == null) {
charsetConfig = new String[0];
}
List<String> charsets = new ArrayList<>(Arrays.asList(charsetConfig));
// Add UTF-8 as fallback
charsets.add("UTF-8");
return charsets;
}
private String getContainingFolderDisplayName(String rootId,String filename) throws IdUnusedException, TypeException, PermissionException {
//dont manipulate filename when you are a zip file from a root folder level
if(!(rootId.split("/").length > 3) && (filename.split("/").length<2) && filename.endsWith(".zip")){
return filename;
}
String filenameArr[] = filename.split(Entity.SEPARATOR);
//return rootId when you you zip from sub folder level and gives something like "group-user/site-id/user-id/" when zipping from root folder level by using filenameArr
String contentEditStr = (rootId.split("/").length > 3)?rootId:rootId+filenameArr[0]+Entity.SEPARATOR;
ContentCollectionEdit collectionEdit = (ContentCollectionEdit) ContentHostingService.getCollection(contentEditStr);
ResourcePropertiesEdit props = collectionEdit.getPropertiesEdit();
String displayName = props.getProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME);
//returns displayname along with the filename for zipping from sub folder level
if(contentEditStr.equals(rootId)) {
return displayName +Entity.SEPARATOR+ filename;
}
else { // just replaces the user-id with the displayname and returns the filename
return filename.replaceFirst(filenameArr[0],displayName);
}
}
}
| |
package hygieia.builder;
import com.capitalone.dashboard.model.BuildStatus;
import com.capitalone.dashboard.model.TestCapability;
import com.capitalone.dashboard.model.TestCaseStatus;
import com.capitalone.dashboard.model.TestResult;
import com.capitalone.dashboard.model.TestSuite;
import com.capitalone.dashboard.model.TestSuiteType;
import com.capitalone.dashboard.request.BuildDataCreateRequest;
import com.capitalone.dashboard.request.TestDataCreateRequest;
import com.google.common.collect.Lists;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.model.Run;
import hudson.model.TaskListener;
import hygieia.transformer.CucumberJsonToTestResultTransformer;
import hygieia.utils.HygieiaUtils;
import org.json.simple.JSONArray;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Logger;
public class CucumberTestBuilder {
private static final Logger logger = Logger.getLogger(CucumberTestBuilder.class.getName());
public CucumberTestBuilder() {
}
private TestResult buildTestResults(Run run, TaskListener listener, String filePattern, FilePath filePath, String directory, BuildDataCreateRequest buildDataCreateRequest, String testType) {
List<FilePath> testFiles = null;
try {
EnvVars envVars = run.getEnvironment(listener);
FilePath rootDirectory = filePath.withSuffix(directory);
if (envVars != null) {
filePattern = envVars.expand(filePattern);
}
testFiles = Lists.newArrayList(HygieiaUtils.getArtifactFiles(rootDirectory, filePattern, new ArrayList<FilePath>()));
listener.getLogger().println("Hygieia Test Result Publisher - Looking for file pattern '" + filePattern + "' in directory " + rootDirectory.getRemote());
} catch (IOException e) {
e.printStackTrace();
listener.getLogger().println("Hygieia Test Result Publisher" + Arrays.toString(e.getStackTrace()));
} catch (InterruptedException e) {
e.printStackTrace();
listener.getLogger().println("Hygieia Test Result Publisher - InterruptedException on " + Arrays.toString(e.getStackTrace()));
}
List<TestCapability> capabilities = getCapabilities(testFiles, listener, String.valueOf(buildDataCreateRequest.getNumber()));
return buildTestResultObject(capabilities, buildDataCreateRequest, testType);
}
private List<TestCapability> getCapabilities(List<FilePath> testFiles, TaskListener listener, String executionId) {
List<TestCapability> capabilities = new ArrayList<>();
JSONParser parser = new JSONParser();
CucumberJsonToTestResultTransformer cucumberTransformer = new CucumberJsonToTestResultTransformer();
for (FilePath file : testFiles) {
try {
listener.getLogger().println("Hygieia Test Publisher: Processing file: " + file.getRemote());
JSONArray cucumberJson = (JSONArray) parser.parse(file.readToString());
TestCapability cap = new TestCapability();
cap.setType(TestSuiteType.Functional);
List<TestSuite> testSuites = cucumberTransformer.transformer(cucumberJson);
cap.setDescription(getCapabilityDescription(file));
cap.getTestSuites().addAll(testSuites); //add test suites
long duration = 0;
int testSuiteSkippedCount = 0, testSuiteSuccessCount = 0, testSuiteFailCount = 0, testSuiteUnknownCount = 0;
for (TestSuite t : testSuites) {
duration += t.getDuration();
switch (t.getStatus()) {
case Success:
testSuiteSuccessCount++;
break;
case Failure:
testSuiteFailCount++;
break;
case Skipped:
testSuiteSkippedCount++;
break;
default:
testSuiteUnknownCount++;
break;
}
}
if (testSuiteFailCount > 0) {
cap.setStatus(TestCaseStatus.Failure);
} else if (testSuiteSkippedCount > 0) {
cap.setStatus(TestCaseStatus.Skipped);
} else if (testSuiteSuccessCount > 0) {
cap.setStatus(TestCaseStatus.Success);
} else {
cap.setStatus(TestCaseStatus.Unknown);
}
cap.setFailedTestSuiteCount(testSuiteFailCount);
cap.setSkippedTestSuiteCount(testSuiteSkippedCount);
cap.setSuccessTestSuiteCount(testSuiteSuccessCount);
cap.setUnknownStatusTestSuiteCount(testSuiteUnknownCount);
cap.setTotalTestSuiteCount(testSuites.size());
cap.setDuration(duration);
cap.setExecutionId(executionId);
capabilities.add(cap);
} catch (FileNotFoundException e) {
listener.getLogger().println("Hygieia Publisher: Test File Not Found: " + file.getRemote());
} catch (ParseException e) {
listener.getLogger().println("Hygieia Publisher: Error Parsing File: " + file.getRemote());
} catch (IOException | InterruptedException e) {
listener.getLogger().println("Hygieia Publisher: Error Reading File: " + file.getName());
}
}
return capabilities;
}
private static String getCapabilityDescription(FilePath file) {
String newFileName = file.getRemote().replace(file.getName(), "");
boolean isUnix = newFileName.endsWith("/");
int lastFolderIndex;
newFileName = newFileName.substring(0, newFileName.length() - 1);
if (isUnix) {
lastFolderIndex = newFileName.lastIndexOf("/");
} else {
lastFolderIndex = newFileName.lastIndexOf("\\");
}
if (lastFolderIndex > 0) {
return newFileName.substring(lastFolderIndex);
}
return newFileName;
}
private TestResult buildTestResultObject(List<TestCapability> capabilities, BuildDataCreateRequest buildDataCreateRequest, String testType) {
if (!capabilities.isEmpty()) {
// There are test suites so let's construct a TestResult to encapsulate these results
TestResult testResult = new TestResult();
testResult.setType(TestSuiteType.fromString(testType));
testResult.setDescription(buildDataCreateRequest.getJobName());
testResult.setExecutionId(String.valueOf(buildDataCreateRequest.getNumber()));
testResult.setUrl(buildDataCreateRequest.getBuildUrl() + String.valueOf(buildDataCreateRequest.getNumber()) + "/");
testResult.setDuration(buildDataCreateRequest.getDuration());
testResult.setEndTime(buildDataCreateRequest.getStartTime() + buildDataCreateRequest.getDuration());
testResult.setStartTime(buildDataCreateRequest.getStartTime());
testResult.getTestCapabilities().addAll(capabilities); //add all capabilities
testResult.setTotalCount(capabilities.size());
testResult.setTimestamp(System.currentTimeMillis());
int testCapabilitySkippedCount = 0, testCapabilitySuccessCount = 0, testCapabilityFailCount = 0;
int testCapabilityUnknownCount = 0;
// Calculate counts based on test suites
for (TestCapability cap : capabilities) {
switch (cap.getStatus()) {
case Success:
testCapabilitySuccessCount++;
break;
case Failure:
testCapabilityFailCount++;
break;
case Skipped:
testCapabilitySkippedCount++;
break;
default:
testCapabilityUnknownCount++;
break;
}
}
testResult.setSuccessCount(testCapabilitySuccessCount);
testResult.setFailureCount(testCapabilityFailCount);
testResult.setSkippedCount(testCapabilitySkippedCount);
testResult.setUnknownStatusCount(testCapabilityUnknownCount);
return testResult;
}
return null;
}
public TestDataCreateRequest getTestDataCreateRequest(Run run, TaskListener listener, BuildStatus buildStatus, FilePath filePath, String applicationName, String environmentName, String testType, String filePattern, String directory, String jenkinsName, String buildId) {
BuildDataCreateRequest buildDataCreateRequest = new BuildBuilder()
.createBuildRequestFromRun(run, jenkinsName, listener, buildStatus, false);
TestResult testResult = buildTestResults(run, listener, filePattern, filePath, directory, buildDataCreateRequest, testType);
if (testResult != null) {
TestDataCreateRequest request = new TestDataCreateRequest();
EnvVars env = null;
try {
env = run.getEnvironment(listener);
} catch (IOException | InterruptedException e) {
logger.warning("Error getting environment variables");
}
if (env != null) {
request.setServerUrl(env.get("JENKINS_URL"));
} else {
String jobPath = "/job" + "/" + buildDataCreateRequest.getJobName() + "/";
int ind = buildDataCreateRequest.getJobUrl().indexOf(jobPath);
request.setServerUrl(buildDataCreateRequest.getJobUrl().substring(0, ind));
}
request.setTestJobId(buildId);
request.setType(testResult.getType());
request.setTestJobName(buildDataCreateRequest.getJobName());
request.setTestJobUrl(buildDataCreateRequest.getJobUrl());
request.setTimestamp(testResult.getTimestamp());
request.setNiceName(jenkinsName);
request.setDescription(testResult.getDescription());
request.setDuration(testResult.getDuration());
request.setEndTime(testResult.getEndTime());
request.setExecutionId(testResult.getExecutionId());
request.setFailureCount(testResult.getFailureCount());
request.setSkippedCount(testResult.getSkippedCount());
request.setStartTime(testResult.getStartTime());
request.setSuccessCount(testResult.getSuccessCount());
request.setTotalCount(testResult.getTotalCount());
request.setUnknownStatusCount(testResult.getUnknownStatusCount());
request.getTestCapabilities().addAll(testResult.getTestCapabilities());
request.setTargetAppName(applicationName);
request.setTargetEnvName(environmentName);
return request;
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.HashMap;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.google.inject.servlet.ServletModule;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.JerseyTest;
import com.sun.jersey.test.framework.WebAppDescriptor;
public class TestNMWebServicesContainers extends JerseyTest {
private static Context nmContext;
private static ResourceView resourceView;
private static ApplicationACLsManager aclsManager;
private static LocalDirsHandlerService dirsHandler;
private static WebApp nmWebApp;
private static Configuration conf = new Configuration();
private static final File testRootDir = new File("target",
TestNMWebServicesContainers.class.getSimpleName());
private static File testLogDir = new File("target",
TestNMWebServicesContainers.class.getSimpleName() + "LogDir");
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
nmContext = new NodeManager.NMContext(null, null) {
public NodeId getNodeId() {
return NodeId.newInstance("testhost.foo.com", 8042);
};
public int getHttpPort() {
return 1234;
};
};
resourceView = new ResourceView() {
@Override
public long getVmemAllocatedForContainers() {
// 15.5G in bytes
return new Long("16642998272");
}
@Override
public long getPmemAllocatedForContainers() {
// 16G in bytes
return new Long("17179869184");
}
@Override
public boolean isVmemCheckEnabled() {
return true;
}
@Override
public boolean isPmemCheckEnabled() {
return true;
}
};
conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath());
NodeHealthCheckerService healthChecker = new NodeHealthCheckerService();
healthChecker.init(conf);
dirsHandler = healthChecker.getDiskHandler();
aclsManager = new ApplicationACLsManager(conf);
nmWebApp = new NMWebApp(resourceView, aclsManager, dirsHandler);
bind(JAXBContextResolver.class);
bind(NMWebServices.class);
bind(GenericExceptionHandler.class);
bind(Context.class).toInstance(nmContext);
bind(WebApp.class).toInstance(nmWebApp);
bind(ResourceView.class).toInstance(resourceView);
bind(ApplicationACLsManager.class).toInstance(aclsManager);
bind(LocalDirsHandlerService.class).toInstance(dirsHandler);
serve("/*").with(GuiceContainer.class);
}
});
public class GuiceServletConfig extends GuiceServletContextListener {
@Override
protected Injector getInjector() {
return injector;
}
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
testRootDir.mkdirs();
testLogDir.mkdir();
}
@AfterClass
static public void cleanup() {
FileUtil.fullyDelete(testRootDir);
FileUtil.fullyDelete(testLogDir);
}
public TestNMWebServicesContainers() {
super(new WebAppDescriptor.Builder(
"org.apache.hadoop.yarn.server.nodemanager.webapp")
.contextListenerClass(GuiceServletConfig.class)
.filterClass(com.google.inject.servlet.GuiceFilter.class)
.contextPath("jersey-guice-filter").servletPath("/").build());
}
@Test
public void testNodeContainersNone() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node")
.path("containers").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("apps isn't NULL", JSONObject.NULL, json.get("containers"));
}
private HashMap<String, String> addAppContainers(Application app)
throws IOException {
Dispatcher dispatcher = new AsyncDispatcher();
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
app.getAppId(), 1);
Container container1 = new MockContainer(appAttemptId, dispatcher, conf,
app.getUser(), app.getAppId(), 1);
Container container2 = new MockContainer(appAttemptId, dispatcher, conf,
app.getUser(), app.getAppId(), 2);
nmContext.getContainers()
.put(container1.getContainerId(), container1);
nmContext.getContainers()
.put(container2.getContainerId(), container2);
app.getContainers().put(container1.getContainerId(), container1);
app.getContainers().put(container2.getContainerId(), container2);
HashMap<String, String> hash = new HashMap<String, String>();
hash.put(container1.getContainerId().toString(), container1
.getContainerId().toString());
hash.put(container2.getContainerId().toString(), container2
.getContainerId().toString());
return hash;
}
@Test
public void testNodeContainers() throws JSONException, Exception {
testNodeHelper("containers", MediaType.APPLICATION_JSON);
}
@Test
public void testNodeContainersSlash() throws JSONException, Exception {
testNodeHelper("containers/", MediaType.APPLICATION_JSON);
}
// make sure default is json output
@Test
public void testNodeContainersDefault() throws JSONException, Exception {
testNodeHelper("containers/", "");
}
public void testNodeHelper(String path, String media) throws JSONException,
Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
ClientResponse response = r.path("ws").path("v1").path("node").path(path)
.accept(media).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
JSONObject info = json.getJSONObject("containers");
assertEquals("incorrect number of elements", 1, info.length());
JSONArray conInfo = info.getJSONArray("container");
assertEquals("incorrect number of elements", 4, conInfo.length());
for (int i = 0; i < conInfo.length(); i++) {
verifyNodeContainerInfo(
conInfo.getJSONObject(i),
nmContext.getContainers().get(
ConverterUtils.toContainerId(conInfo.getJSONObject(i).getString(
"id"))));
}
}
@Test
public void testNodeSingleContainers() throws JSONException, Exception {
testNodeSingleContainersHelper(MediaType.APPLICATION_JSON);
}
@Test
public void testNodeSingleContainersSlash() throws JSONException, Exception {
testNodeSingleContainersHelper(MediaType.APPLICATION_JSON);
}
@Test
public void testNodeSingleContainersDefault() throws JSONException, Exception {
testNodeSingleContainersHelper("");
}
public void testNodeSingleContainersHelper(String media)
throws JSONException, Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
HashMap<String, String> hash = addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
for (String id : hash.keySet()) {
ClientResponse response = r.path("ws").path("v1").path("node")
.path("containers").path(id).accept(media).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeContainerInfo(json.getJSONObject("container"), nmContext
.getContainers().get(ConverterUtils.toContainerId(id)));
}
}
@Test
public void testSingleContainerInvalid() throws JSONException, Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
try {
r.path("ws").path("v1").path("node").path("containers")
.path("container_foo_1234").accept(MediaType.APPLICATION_JSON)
.get(JSONObject.class);
fail("should have thrown exception on invalid user query");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: invalid container id, container_foo_1234",
message);
WebServicesTestUtils.checkStringMatch("exception type",
"BadRequestException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
}
}
@Test
public void testSingleContainerInvalid2() throws JSONException, Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
try {
r.path("ws").path("v1").path("node").path("containers")
.path("container_1234_0001").accept(MediaType.APPLICATION_JSON)
.get(JSONObject.class);
fail("should have thrown exception on invalid user query");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: invalid container id, container_1234_0001",
message);
WebServicesTestUtils.checkStringMatch("exception type",
"BadRequestException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
}
}
@Test
public void testSingleContainerWrong() throws JSONException, Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
try {
r.path("ws").path("v1").path("node").path("containers")
.path("container_1234_0001_01_000005")
.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
fail("should have thrown exception on invalid user query");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils
.checkStringMatch(
"exception message",
"java.lang.Exception: container with id, container_1234_0001_01_000005, not found",
message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
@Test
public void testNodeSingleContainerXML() throws JSONException, Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
HashMap<String, String> hash = addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
for (String id : hash.keySet()) {
ClientResponse response = r.path("ws").path("v1").path("node")
.path("containers").path(id).accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("container");
assertEquals("incorrect number of elements", 1, nodes.getLength());
verifyContainersInfoXML(nodes,
nmContext.getContainers().get(ConverterUtils.toContainerId(id)));
}
}
@Test
public void testNodeContainerXML() throws JSONException, Exception {
WebResource r = resource();
Application app = new MockApp(1);
nmContext.getApplications().put(app.getAppId(), app);
addAppContainers(app);
Application app2 = new MockApp(2);
nmContext.getApplications().put(app2.getAppId(), app2);
addAppContainers(app2);
ClientResponse response = r.path("ws").path("v1").path("node")
.path("containers").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("container");
assertEquals("incorrect number of elements", 4, nodes.getLength());
}
public void verifyContainersInfoXML(NodeList nodes, Container cont)
throws JSONException, Exception {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyNodeContainerInfoGeneric(cont,
WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "user"),
WebServicesTestUtils.getXmlInt(element, "exitCode"),
WebServicesTestUtils.getXmlString(element, "diagnostics"),
WebServicesTestUtils.getXmlString(element, "nodeId"),
WebServicesTestUtils.getXmlInt(element, "totalMemoryNeededMB"),
WebServicesTestUtils.getXmlString(element, "containerLogsLink"));
}
}
public void verifyNodeContainerInfo(JSONObject info, Container cont)
throws JSONException, Exception {
assertEquals("incorrect number of elements", 8, info.length());
verifyNodeContainerInfoGeneric(cont, info.getString("id"),
info.getString("state"), info.getString("user"),
info.getInt("exitCode"), info.getString("diagnostics"),
info.getString("nodeId"), info.getInt("totalMemoryNeededMB"),
info.getString("containerLogsLink"));
}
public void verifyNodeContainerInfoGeneric(Container cont, String id,
String state, String user, int exitCode, String diagnostics,
String nodeId, int totalMemoryNeededMB, String logsLink)
throws JSONException, Exception {
WebServicesTestUtils.checkStringMatch("id", cont.getContainerId()
.toString(), id);
WebServicesTestUtils.checkStringMatch("state", cont.getContainerState()
.toString(), state);
WebServicesTestUtils.checkStringMatch("user", cont.getUser().toString(),
user);
assertEquals("exitCode wrong", 0, exitCode);
WebServicesTestUtils
.checkStringMatch("diagnostics", "testing", diagnostics);
WebServicesTestUtils.checkStringMatch("nodeId", nmContext.getNodeId()
.toString(), nodeId);
assertEquals("totalMemoryNeededMB wrong",
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
totalMemoryNeededMB);
String shortLink =
ujoin("containerlogs", cont.getContainerId().toString(),
cont.getUser());
assertTrue("containerLogsLink wrong", logsLink.contains(shortLink));
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.slaves;
import hudson.FilePath;
import hudson.Functions;
import hudson.model.Computer;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Used by {@link Computer} to keep track of workspaces that are actively in use.
*
* <p>
* SUBJECT TO CHANGE! Do not use this from plugins directly.
*
* @author Kohsuke Kawaguchi
* @since 1.319
* @see Computer#getWorkspaceList()
*/
public final class WorkspaceList {
private static final String WORKSPACE_NAME_SUFFIX = "_";
/**
* Book keeping for workspace allocation.
*/
public static final class Entry {
/**
* Who acquired this workspace?
*/
//TODO: review and check whether we can do it private
public final Thread holder = Thread.currentThread();
/**
* When?
*/
//TODO: review and check whether we can do it private
public final long time = System.currentTimeMillis();
/**
* From where?
*/
//TODO: review and check whether we can do it private
public final Exception source = new Exception();
/**
* True makes the caller of {@link WorkspaceList#allocate(FilePath)} wait
* for this workspace.
*/
//TODO: review and check whether we can do it private
public final boolean quick;
//TODO: review and check whether we can do it private
public final FilePath path;
private Entry(FilePath path, boolean quick) {
this.path = path;
this.quick = quick;
}
public Thread getHolder() {
return holder;
}
public long getTime() {
return time;
}
public Exception getSource() {
return source;
}
public boolean isQuick() {
return quick;
}
public FilePath getPath() {
return path;
}
@Override
public String toString() {
String s = path+" owned by "+holder.getName()+" from "+new Date(time);
if(quick) s+=" (quick)";
s+="\n"+Functions.printThrowable(source);
return s;
}
}
/**
* Represents a leased workspace that needs to be returned later.
*/
public static abstract class Lease {
//TODO: review and check whether we can do it private
public final FilePath path;
protected Lease(FilePath path) {
this.path = path;
}
public FilePath getPath() {
return path;
}
/**
* Releases this lease.
*/
public abstract void release();
/**
* Creates a dummy {@link Lease} object that does no-op in the release.
*/
public static Lease createDummyLease(FilePath p) {
return new Lease(p) {
public void release() {
// noop
}
};
}
}
private final Map<FilePath,Entry> inUse = new HashMap<FilePath,Entry>();
public WorkspaceList() {
}
/**
* Allocates a workspace by adding some variation to the given base to make it unique.
*/
public synchronized Lease allocate(FilePath base) throws InterruptedException {
for (int i=1; ; i++) {
//Workspace suffix was changed from @ to _, because of some issues with SCMs.
//see http://issues.hudson-ci.org/browse/HUDSON-4791
FilePath candidate = i==1 ? base : base.withSuffix(WORKSPACE_NAME_SUFFIX + i);
Entry e = inUse.get(candidate);
if(e!=null && !e.quick)
continue;
return acquire(candidate);
}
}
/**
* Just record that this workspace is being used, without paying any attention to the sycnhronization support.
*/
public synchronized Lease record(FilePath p) {
log("recorded "+p);
Entry old = inUse.put(p, new Entry(p, false));
if (old!=null)
throw new AssertionError("Tried to record a workspace already owned: "+old);
return lease(p);
}
/**
* Releases an allocated or acquired workspace.
*/
private synchronized void _release(FilePath p) {
Entry old = inUse.remove(p);
if (old==null)
throw new AssertionError("Releasing unallocated workspace "+p);
notifyAll();
}
/**
* Acquires the given workspace. If necessary, this method blocks until it's made available.
*
* @return
* The same {@link FilePath} as given to this method.
*/
public synchronized Lease acquire(FilePath p) throws InterruptedException {
return acquire(p,false);
}
/**
* See {@link #acquire(FilePath)}
*
* @param quick
* If true, indicates that the acquired workspace will be returned quickly.
* This makes other calls to {@link #allocate(FilePath)} to wait for the release of this workspace.
*/
public synchronized Lease acquire(FilePath p, boolean quick) throws InterruptedException {
while (inUse.containsKey(p))
wait();
log("acquired "+p);
inUse.put(p,new Entry(p,quick));
return lease(p);
}
/**
* Wraps a path into a valid lease.
*/
private Lease lease(FilePath p) {
return new Lease(p) {
public void release() {
_release(path);
}
};
}
private void log(String msg) {
if (LOGGER.isLoggable(Level.FINE))
LOGGER.fine(Thread.currentThread().getName() + " " + msg);
}
private static final Logger LOGGER = Logger.getLogger(WorkspaceList.class.getName());
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hwpf.model;
import java.io.IOException;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.hwpf.model.io.HWPFOutputStream;
import org.apache.poi.hwpf.usermodel.CharacterProperties;
import org.apache.poi.hwpf.usermodel.ParagraphProperties;
import org.apache.poi.hwpf.sprm.ParagraphSprmUncompressor;
import org.apache.poi.hwpf.sprm.CharacterSprmUncompressor;
/**
* Represents a document's stylesheet. A word documents formatting is stored as
* compressed styles that are based on styles contained in the stylesheet. This
* class also contains static utility functions to uncompress different
* formatting properties.
*
* @author Ryan Ackley
*/
public final class StyleSheet implements HDFType {
public static final int NIL_STYLE = 4095;
private static final int PAP_TYPE = 1;
private static final int CHP_TYPE = 2;
private static final int SEP_TYPE = 4;
private static final int TAP_TYPE = 5;
private final static ParagraphProperties NIL_PAP = new ParagraphProperties();
private final static CharacterProperties NIL_CHP = new CharacterProperties();
private int _stshiLength;
private int _baseLength;
private int _flags;
private int _maxIndex;
private int _maxFixedIndex;
private int _stylenameVersion;
private int[] _rgftc;
StyleDescription[] _styleDescriptions;
/**
* StyleSheet constructor. Loads a document's stylesheet information,
*
* @param tableStream A byte array containing a document's raw stylesheet
* info. Found by using FileInformationBlock.getFcStshf() and
* FileInformationBLock.getLcbStshf()
*/
public StyleSheet(byte[] tableStream, int offset)
{
int startOffset = offset;
_stshiLength = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
int stdCount = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_baseLength = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_flags = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_maxIndex = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_maxFixedIndex = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_stylenameVersion = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_rgftc = new int[3];
_rgftc[0] = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_rgftc[1] = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
_rgftc[2] = LittleEndian.getShort(tableStream, offset);
offset += LittleEndian.SHORT_SIZE;
offset = startOffset + LittleEndian.SHORT_SIZE + _stshiLength;
_styleDescriptions = new StyleDescription[stdCount];
for(int x = 0; x < stdCount; x++)
{
int stdSize = LittleEndian.getShort(tableStream, offset);
//get past the size
offset += 2;
if(stdSize > 0)
{
//byte[] std = new byte[stdSize];
StyleDescription aStyle = new StyleDescription(tableStream,
_baseLength, offset, true);
_styleDescriptions[x] = aStyle;
}
offset += stdSize;
}
for(int x = 0; x < _styleDescriptions.length; x++)
{
if(_styleDescriptions[x] != null)
{
createPap(x);
createChp(x);
}
}
}
public void writeTo(HWPFOutputStream out)
throws IOException
{
int offset = 0;
// add two bytes so we can prepend the stylesheet w/ its size
byte[] buf = new byte[_stshiLength + 2];
LittleEndian.putShort(buf, offset, (short)_stshiLength);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_styleDescriptions.length);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_baseLength);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_flags);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_maxIndex);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_maxFixedIndex);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_stylenameVersion);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_rgftc[0]);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_rgftc[1]);
offset += LittleEndian.SHORT_SIZE;
LittleEndian.putShort(buf, offset, (short)_rgftc[2]);
out.write(buf);
byte[] sizeHolder = new byte[2];
for (int x = 0; x < _styleDescriptions.length; x++)
{
if(_styleDescriptions[x] != null)
{
byte[] std = _styleDescriptions[x].toByteArray();
// adjust the size so it is always on a word boundary
LittleEndian.putShort(sizeHolder, (short)((std.length) + (std.length % 2)));
out.write(sizeHolder);
out.write(std);
// Must always start on a word boundary.
if (std.length % 2 == 1)
{
out.write('\0');
}
}
else
{
sizeHolder[0] = 0;
sizeHolder[1] = 0;
out.write(sizeHolder);
}
}
}
public boolean equals(Object o)
{
StyleSheet ss = (StyleSheet)o;
if (ss._baseLength == _baseLength && ss._flags == _flags &&
ss._maxFixedIndex ==_maxFixedIndex && ss._maxIndex == _maxIndex &&
ss._rgftc[0] == _rgftc[0] && ss._rgftc[1] == _rgftc[1] &&
ss._rgftc[2] == _rgftc[2] && ss._stshiLength == _stshiLength &&
ss._stylenameVersion == _stylenameVersion)
{
if (ss._styleDescriptions.length == _styleDescriptions.length)
{
for (int x = 0; x < _styleDescriptions.length; x++)
{
// check for null
if (ss._styleDescriptions[x] != _styleDescriptions[x])
{
// check for equality
if (!ss._styleDescriptions[x].equals(_styleDescriptions[x]))
{
return false;
}
}
}
return true;
}
}
return false;
}
/**
* Creates a PartagraphProperties object from a papx stored in the
* StyleDescription at the index istd in the StyleDescription array. The PAP
* is placed in the StyleDescription at istd after its been created. Not
* every StyleDescription will contain a papx. In these cases this function
* does nothing
*
* @param istd The index of the StyleDescription to create the
* ParagraphProperties from (and also place the finished PAP in)
*/
private void createPap(int istd)
{
StyleDescription sd = _styleDescriptions[istd];
ParagraphProperties pap = sd.getPAP();
byte[] papx = sd.getPAPX();
int baseIndex = sd.getBaseStyle();
if(pap == null && papx != null)
{
ParagraphProperties parentPAP = new ParagraphProperties();
if(baseIndex != NIL_STYLE)
{
parentPAP = _styleDescriptions[baseIndex].getPAP();
if(parentPAP == null) {
if(baseIndex == istd) {
// Oh dear, style claims that it is its own parent
throw new IllegalStateException("Pap style " + istd + " claimed to have itself as its parent, which isn't allowed");
}
// Create the parent style
createPap(baseIndex);
parentPAP = _styleDescriptions[baseIndex].getPAP();
}
}
pap = ParagraphSprmUncompressor.uncompressPAP(parentPAP, papx, 2);
sd.setPAP(pap);
}
}
/**
* Creates a CharacterProperties object from a chpx stored in the
* StyleDescription at the index istd in the StyleDescription array. The
* CharacterProperties object is placed in the StyleDescription at istd after
* its been created. Not every StyleDescription will contain a chpx. In these
* cases this function does nothing.
*
* @param istd The index of the StyleDescription to create the
* CharacterProperties object from.
*/
private void createChp(int istd)
{
StyleDescription sd = _styleDescriptions[istd];
CharacterProperties chp = sd.getCHP();
byte[] chpx = sd.getCHPX();
int baseIndex = sd.getBaseStyle();
if(chp == null && chpx != null)
{
CharacterProperties parentCHP = new CharacterProperties();
if(baseIndex != NIL_STYLE)
{
parentCHP = _styleDescriptions[baseIndex].getCHP();
if(parentCHP == null)
{
createChp(baseIndex);
parentCHP = _styleDescriptions[baseIndex].getCHP();
}
}
chp = CharacterSprmUncompressor.uncompressCHP(parentCHP, chpx, 0);
sd.setCHP(chp);
}
}
/**
* Gets the number of styles in the style sheet.
* @return The number of styles in the style sheet.
*/
public int numStyles() {
return _styleDescriptions.length;
}
/**
* Gets the StyleDescription at index x.
*
* @param x the index of the desired StyleDescription.
*/
public StyleDescription getStyleDescription(int x)
{
return _styleDescriptions[x];
}
public CharacterProperties getCharacterStyle(int x)
{
if (x == NIL_STYLE)
{
return NIL_CHP;
}
return (_styleDescriptions[x] != null ? _styleDescriptions[x].getCHP() : null);
}
public ParagraphProperties getParagraphStyle(int x)
{
if (x == NIL_STYLE)
{
return NIL_PAP;
}
return (_styleDescriptions[x] != null ? _styleDescriptions[x].getPAP() : null);
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Common;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2002-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Foraging extends GatheringSkill
{
@Override
public String ID()
{
return "Foraging";
}
private final static String localizedName = CMLib.lang().L("Foraging");
@Override
public String name()
{
return localizedName;
}
private static final String[] triggerStrings = I(new String[] { "FORAGE", "FORAGING" });
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
@Override
public int classificationCode()
{
return Ability.ACODE_COMMON_SKILL | Ability.DOMAIN_GATHERINGSKILL;
}
@Override
protected boolean allowedWhileMounted()
{
return false;
}
@Override
public String supportedResourceString()
{
return "VEGETATION|HEMP|SILK|COTTON";
}
protected Item found = null;
protected String foundShortName = "";
public Foraging()
{
super();
displayText=L("You are foraging...");
verb=L("foraging");
}
protected int getDuration(final MOB mob, final int level)
{
return getDuration(45,mob,level,10);
}
@Override
protected int baseYield()
{
return 1;
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
if((affected instanceof MOB)&&(tickID==Tickable.TICKID_MOB))
{
final MOB mob=(MOB)affected;
if(tickUp==6)
{
if(found!=null)
{
commonTell(mob,L("You have found some @x1!",foundShortName));
displayText=L("You are foraging for @x1",foundShortName);
verb=L("foraging for @x1",foundShortName);
}
else
{
final StringBuffer str=new StringBuffer(L("You can't seem to find anything worth foraging around here.\n\r"));
final int d=lookingForMat(RawMaterial.MATERIAL_VEGETATION,mob.location());
if(d<0)
str.append(L("You might try elsewhere."));
else
str.append(L("You might try @x1.",CMLib.directions().getInDirectionName(d)));
commonTell(mob,str.toString());
unInvoke();
}
}
}
return super.tick(ticking,tickID);
}
@Override
public void unInvoke()
{
if(canBeUninvoked())
{
if(affected instanceof MOB)
{
final MOB mob=(MOB)affected;
if((found!=null)&&(!aborted)&&(mob.location()!=null))
{
int amount=((found.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_CLOTH)?
(CMLib.dice().roll(1,10,0)*(baseYield()+abilityCode())):
(CMLib.dice().roll(1,3,0)*(baseYield()+abilityCode()));
amount=super.adjustYieldBasedOnRoomSpam(amount, mob.location());
final CMMsg msg=CMClass.getMsg(mob,found,this,getCompletedActivityMessageType(),null);
msg.setValue(amount);
if(mob.location().okMessage(mob, msg))
{
found=(Item)msg.target();
// rely on changers to mangle the text
if(msg.value()<2)
msg.modify(L("<S-NAME> manage(s) to gather up @x1.",found.name()));
else
msg.modify(L("<S-NAME> manage(s) to gather @x1 pounds of @x2.",""+msg.value(),foundShortName));
mob.location().send(mob, msg);
for(int i=0;i<msg.value();i++)
{
final Item newFound=(Item)found.copyOf();
if(!dropAWinner(mob,newFound))
break;
CMLib.commands().postGet(mob,null,newFound,true);
}
}
}
}
}
super.unInvoke();
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
if(super.checkStop(mob, commands))
return true;
bundling=false;
if((!auto)
&&(commands.size()>0)
&&((commands.get(0)).equalsIgnoreCase("bundle")))
{
bundling=true;
if(super.invoke(mob,commands,givenTarget,auto,asLevel))
return super.bundle(mob,commands);
return false;
}
verb=L("foraging");
found=null;
if((!confirmPossibleMaterialLocation(RawMaterial.MATERIAL_VEGETATION,mob.location()))
&&(!confirmPossibleMaterialLocation(RawMaterial.RESOURCE_HEMP,mob.location()))
&&(!confirmPossibleMaterialLocation(RawMaterial.RESOURCE_SILK,mob.location()))
&&(!confirmPossibleMaterialLocation(RawMaterial.RESOURCE_SALT,mob.location()))
&&(!confirmPossibleMaterialLocation(RawMaterial.RESOURCE_COTTON,mob.location())))
{
commonTell(mob,L("You don't think this is a good place to forage."));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final int resourceType=mob.location().myResource();
if((proficiencyCheck(mob,0,auto))
&&(super.checkIfAnyYield(mob.location()))
&&(((resourceType&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_VEGETATION)
||(resourceType==RawMaterial.RESOURCE_HEMP)
||(resourceType==RawMaterial.RESOURCE_SALT)
||(resourceType==RawMaterial.RESOURCE_SILK)
||(resourceType==RawMaterial.RESOURCE_COTTON)))
{
found=(Item)CMLib.materials().makeResource(resourceType,Integer.toString(mob.location().domainType()),false,null, "");
foundShortName="nothing";
if(found!=null)
foundShortName=RawMaterial.CODES.NAME(found.material()).toLowerCase();
}
final Item oldFound=found;
final int duration=getDuration(mob,1);
final CMMsg msg=CMClass.getMsg(mob,found,this,getActivityMessageType(),L("<S-NAME> start(s) foraging."));
if(mob.location().okMessage(mob,msg))
{
// herb/locale customisation for jeremy
if((found!=null)
&&(found.material()==RawMaterial.RESOURCE_HERBS)
&&((found.Name().toUpperCase().endsWith(" HERBS"))
||(found.Name().equalsIgnoreCase("herbs"))))
{
final Map<String,List<String>> H=Resources.getCachedMultiLists("skills/herbs.txt",false);
if(H!=null)
{
final List<String> V=H.get(mob.location().ID());
if((V!=null)&&(V.size()>0))
{
int total=0;
for(int i=0;i<V.size();i++)
{
final String s=V.get(i);
final int x=s.indexOf(' ');
if((x>=0)&&(CMath.isNumber(s.substring(0,x).trim())))
total+=CMath.s_int(s.substring(0,x).trim());
else
total+=10;
}
final int choice=CMLib.dice().roll(1,total,-1);
total=0;
for(int i=0;i<V.size();i++)
{
final String s=V.get(i);
final int x=s.indexOf(' ');
if((x>=0)&&(CMath.isNumber(s.substring(0,x).trim())))
{
total+=CMath.s_int(s.substring(0,x).trim());
if(choice<=total)
{
found.setSecretIdentity(s.substring(x+1).trim());
break;
}
}
else
{
total+=10;
if(choice<=total)
{
found.setSecretIdentity(s);
break;
}
}
}
}
}
}
mob.location().send(mob,msg);
found=(Item)msg.target();
if((found != oldFound)&&(found!=null))
foundShortName=CMLib.english().removeArticleLead(found.Name());
beneficialAffect(mob,mob,asLevel,duration);
}
return true;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.websecurityscanner.v1;
import static com.google.cloud.websecurityscanner.v1.WebSecurityScannerClient.ListCrawledUrlsPagedResponse;
import static com.google.cloud.websecurityscanner.v1.WebSecurityScannerClient.ListFindingsPagedResponse;
import static com.google.cloud.websecurityscanner.v1.WebSecurityScannerClient.ListScanConfigsPagedResponse;
import static com.google.cloud.websecurityscanner.v1.WebSecurityScannerClient.ListScanRunsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.cloud.websecurityscanner.v1.stub.WebSecurityScannerStubSettings;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link WebSecurityScannerClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (websecurityscanner.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of createScanConfig to 30 seconds:
*
* <pre>{@code
* WebSecurityScannerSettings.Builder webSecurityScannerSettingsBuilder =
* WebSecurityScannerSettings.newBuilder();
* webSecurityScannerSettingsBuilder
* .createScanConfigSettings()
* .setRetrySettings(
* webSecurityScannerSettingsBuilder
* .createScanConfigSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* WebSecurityScannerSettings webSecurityScannerSettings =
* webSecurityScannerSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class WebSecurityScannerSettings extends ClientSettings<WebSecurityScannerSettings> {
/** Returns the object with the settings used for calls to createScanConfig. */
public UnaryCallSettings<CreateScanConfigRequest, ScanConfig> createScanConfigSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).createScanConfigSettings();
}
/** Returns the object with the settings used for calls to deleteScanConfig. */
public UnaryCallSettings<DeleteScanConfigRequest, Empty> deleteScanConfigSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).deleteScanConfigSettings();
}
/** Returns the object with the settings used for calls to getScanConfig. */
public UnaryCallSettings<GetScanConfigRequest, ScanConfig> getScanConfigSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).getScanConfigSettings();
}
/** Returns the object with the settings used for calls to listScanConfigs. */
public PagedCallSettings<
ListScanConfigsRequest, ListScanConfigsResponse, ListScanConfigsPagedResponse>
listScanConfigsSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).listScanConfigsSettings();
}
/** Returns the object with the settings used for calls to updateScanConfig. */
public UnaryCallSettings<UpdateScanConfigRequest, ScanConfig> updateScanConfigSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).updateScanConfigSettings();
}
/** Returns the object with the settings used for calls to startScanRun. */
public UnaryCallSettings<StartScanRunRequest, ScanRun> startScanRunSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).startScanRunSettings();
}
/** Returns the object with the settings used for calls to getScanRun. */
public UnaryCallSettings<GetScanRunRequest, ScanRun> getScanRunSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).getScanRunSettings();
}
/** Returns the object with the settings used for calls to listScanRuns. */
public PagedCallSettings<ListScanRunsRequest, ListScanRunsResponse, ListScanRunsPagedResponse>
listScanRunsSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).listScanRunsSettings();
}
/** Returns the object with the settings used for calls to stopScanRun. */
public UnaryCallSettings<StopScanRunRequest, ScanRun> stopScanRunSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).stopScanRunSettings();
}
/** Returns the object with the settings used for calls to listCrawledUrls. */
public PagedCallSettings<
ListCrawledUrlsRequest, ListCrawledUrlsResponse, ListCrawledUrlsPagedResponse>
listCrawledUrlsSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).listCrawledUrlsSettings();
}
/** Returns the object with the settings used for calls to getFinding. */
public UnaryCallSettings<GetFindingRequest, Finding> getFindingSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).getFindingSettings();
}
/** Returns the object with the settings used for calls to listFindings. */
public PagedCallSettings<ListFindingsRequest, ListFindingsResponse, ListFindingsPagedResponse>
listFindingsSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).listFindingsSettings();
}
/** Returns the object with the settings used for calls to listFindingTypeStats. */
public UnaryCallSettings<ListFindingTypeStatsRequest, ListFindingTypeStatsResponse>
listFindingTypeStatsSettings() {
return ((WebSecurityScannerStubSettings) getStubSettings()).listFindingTypeStatsSettings();
}
public static final WebSecurityScannerSettings create(WebSecurityScannerStubSettings stub)
throws IOException {
return new WebSecurityScannerSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return WebSecurityScannerStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return WebSecurityScannerStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return WebSecurityScannerStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return WebSecurityScannerStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return WebSecurityScannerStubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return WebSecurityScannerStubSettings.defaultTransportChannelProvider();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return WebSecurityScannerStubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected WebSecurityScannerSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for WebSecurityScannerSettings. */
public static class Builder extends ClientSettings.Builder<WebSecurityScannerSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(WebSecurityScannerStubSettings.newBuilder(clientContext));
}
protected Builder(WebSecurityScannerSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(WebSecurityScannerStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(WebSecurityScannerStubSettings.newBuilder());
}
public WebSecurityScannerStubSettings.Builder getStubSettingsBuilder() {
return ((WebSecurityScannerStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to createScanConfig. */
public UnaryCallSettings.Builder<CreateScanConfigRequest, ScanConfig>
createScanConfigSettings() {
return getStubSettingsBuilder().createScanConfigSettings();
}
/** Returns the builder for the settings used for calls to deleteScanConfig. */
public UnaryCallSettings.Builder<DeleteScanConfigRequest, Empty> deleteScanConfigSettings() {
return getStubSettingsBuilder().deleteScanConfigSettings();
}
/** Returns the builder for the settings used for calls to getScanConfig. */
public UnaryCallSettings.Builder<GetScanConfigRequest, ScanConfig> getScanConfigSettings() {
return getStubSettingsBuilder().getScanConfigSettings();
}
/** Returns the builder for the settings used for calls to listScanConfigs. */
public PagedCallSettings.Builder<
ListScanConfigsRequest, ListScanConfigsResponse, ListScanConfigsPagedResponse>
listScanConfigsSettings() {
return getStubSettingsBuilder().listScanConfigsSettings();
}
/** Returns the builder for the settings used for calls to updateScanConfig. */
public UnaryCallSettings.Builder<UpdateScanConfigRequest, ScanConfig>
updateScanConfigSettings() {
return getStubSettingsBuilder().updateScanConfigSettings();
}
/** Returns the builder for the settings used for calls to startScanRun. */
public UnaryCallSettings.Builder<StartScanRunRequest, ScanRun> startScanRunSettings() {
return getStubSettingsBuilder().startScanRunSettings();
}
/** Returns the builder for the settings used for calls to getScanRun. */
public UnaryCallSettings.Builder<GetScanRunRequest, ScanRun> getScanRunSettings() {
return getStubSettingsBuilder().getScanRunSettings();
}
/** Returns the builder for the settings used for calls to listScanRuns. */
public PagedCallSettings.Builder<
ListScanRunsRequest, ListScanRunsResponse, ListScanRunsPagedResponse>
listScanRunsSettings() {
return getStubSettingsBuilder().listScanRunsSettings();
}
/** Returns the builder for the settings used for calls to stopScanRun. */
public UnaryCallSettings.Builder<StopScanRunRequest, ScanRun> stopScanRunSettings() {
return getStubSettingsBuilder().stopScanRunSettings();
}
/** Returns the builder for the settings used for calls to listCrawledUrls. */
public PagedCallSettings.Builder<
ListCrawledUrlsRequest, ListCrawledUrlsResponse, ListCrawledUrlsPagedResponse>
listCrawledUrlsSettings() {
return getStubSettingsBuilder().listCrawledUrlsSettings();
}
/** Returns the builder for the settings used for calls to getFinding. */
public UnaryCallSettings.Builder<GetFindingRequest, Finding> getFindingSettings() {
return getStubSettingsBuilder().getFindingSettings();
}
/** Returns the builder for the settings used for calls to listFindings. */
public PagedCallSettings.Builder<
ListFindingsRequest, ListFindingsResponse, ListFindingsPagedResponse>
listFindingsSettings() {
return getStubSettingsBuilder().listFindingsSettings();
}
/** Returns the builder for the settings used for calls to listFindingTypeStats. */
public UnaryCallSettings.Builder<ListFindingTypeStatsRequest, ListFindingTypeStatsResponse>
listFindingTypeStatsSettings() {
return getStubSettingsBuilder().listFindingTypeStatsSettings();
}
@Override
public WebSecurityScannerSettings build() throws IOException {
return new WebSecurityScannerSettings(this);
}
}
}
| |
package com.beecavegames.stats.db;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.sql.DataSource;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.util.StringInputStream;
import com.beecavegames.common.servlets.BCGFilter;
import com.beecavegames.common.servlets.SeenUsers;
import com.beecavegames.persistence.KVStoreClient;
import com.beecavegames.servlets.InjectableHttpServlet;
import com.beecavegames.stats.CopyManifest;
import com.beecavegames.stats.CopyManifest.CopyManifestEntry;
import com.beecavegames.stats.Counter;
import com.beecavegames.stats.GlobalCounter;
import com.beecavegames.stats.Sample;
import com.beecavegames.stats.StatsChunk;
import com.beecavegames.stats.StatsManager;
import com.beecavegames.stats.Timer;
import com.beecavegames.util.Box;
import com.beecavegames.util.Config;
import com.beecavegames.util.CouldNotLockException;
import com.beecavegames.util.MCLock;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Singleton
public class StatsRecorder {
public static final DateFormat DATE_FORMATTER = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
private static final Counter COUNTER_STATS_S3SAVE = Counter.create(GlobalCounter.Category.Stats, GlobalCounter.Stats.stats_s3save);
private static final String STATS_TMP_FOLDER = "stats/tmp/";
private static final Pattern BATCH_PATTERN = Pattern.compile(STATS_TMP_FOLDER + "[A-Za-z]+-([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}).copy");
private static final Timer
TIMER_STATS_INSERT = Timer.create(GlobalCounter.Category.Stats, GlobalCounter.Stats.copy),
TIMER_STATS_RECORD = Timer.create(GlobalCounter.Category.Stats, GlobalCounter.Stats.persist);
public static final String NULL = "\\N";
private static final Counter COUNTER_STATS_COPY = Counter.create(GlobalCounter.Category.Stats, GlobalCounter.Stats.copy);
private Cache<UserMetadata, Long> tagListIdCache=CacheBuilder.newBuilder()
.maximumSize(1024)
.build();
private Cache<String, Boolean> tableCache=CacheBuilder.newBuilder().build();
@Inject
StatsManager stats;
@Inject @Named("analysis")
Provider<DataSource> dataSource;
@Inject(optional=true)
AmazonS3 s3Client;
@Inject @Named("s3Bucket")
String s3Bucket;
@Inject
KVStoreClient vmc;
@Inject
ObjectMapper mapper;
@Inject
private Box<SeenUsers> serverMap;
String credentialsString;
public volatile boolean copyingToRDBMS;
private volatile Statement activeStmt;
@Inject
public void init(AWSCredentials creds) {
this.credentialsString = "aws_access_key_id=" + creds.getAWSAccessKeyId() + ";" +
"aws_secret_access_key=" + creds.getAWSSecretKey();
}
public void copyToRDBMS() {
if (serverMap.get() != null
//Limit the number of servers participating in this process to 20ish, to avoid Redshift concurrent query limits
&& serverMap.get().getSortedServerIds().indexOf(BCGFilter.getServerId())<Config.getInt("maxDBServers", 20)
&& s3Client != null && !copyingToRDBMS) {
copyingToRDBMS=true;
try {
ArrayList<String> keys=new ArrayList<>();
Date minTime=getChunkKeys(keys);
Multimap<String,String> categories=HashMultimap.create();
for (String key: keys) {
int offset = STATS_TMP_FOLDER.length();
String category = key.substring(offset, key.indexOf('-', offset));
categories.put(category, key);
}
for (String category: categories.keySet()) {
if (!Config.getBoolean("rdbmsStatsFlow", true)) {
return;
}
try (MCLock lock = vmc.tryLock(getCategoryKey(category), 10, TimeUnit.MINUTES)) {
List<String> catFiles = getChunkKeys(category);
copyToRDBMS(category, catFiles, minTime);
} catch (IOException e) {
log.warn("IOException getting lock", e);
} catch (CouldNotLockException e) {
//Skip it, someone else is working on it
} catch (SQLException | ExecutionException e) {
log.warn("Exception writing to DB", e);
}
}
} finally {
copyingToRDBMS=false;
}
}
}
private void copyToRDBMS(String category, List<String> allKeys, Date minTime) throws SQLException, IOException, ExecutionException {
if (allKeys.isEmpty()) {
return;
}
try (Connection connection = dataSource.get().getConnection()) {
for (int i=0; i<allKeys.size(); i+=100) {
Collection<String> keys=allKeys.subList(i, Math.min(i+100, allKeys.size()));
DeleteObjectsRequest delete = null;
List<KeyVersion> keysToDelete = new ArrayList<>(keys.size()+1);
//Map batchIds to the S3 object key
Map<String,String> batchKeys = new HashMap<>();
for (String key: keys) {
Matcher m = BATCH_PATTERN.matcher(key);
if (m.matches()) {
batchKeys.put(m.group(1), key);
}
}
long start = System.currentTimeMillis();
tableCache.get(category, new EventTableBuilder(category, connection));
connection.setAutoCommit(false);
try (Statement stmt = connection.createStatement()) {
String table = getTableName(category);
//Find any chunks which already exist (extremely rare), and remove them
//from the list we'll consider
StringBuilder batchListBuilder = new StringBuilder();
Iterator<String> elems=batchKeys.keySet().iterator();
if (elems.hasNext()) {
batchListBuilder.append('\'').append(elems.next()).append('\'');
while (elems.hasNext()) {
batchListBuilder.append("',").append(elems.next()).append('\'');
}
}
String batchList = batchListBuilder.toString();
Set<String> existingBatches = new HashSet<>(keys.size());
try (Statement deleteStatement = connection.createStatement()) {
activeStmt = deleteStatement;
try (ResultSet rs = deleteStatement.executeQuery(
"SELECT DISTINCT batch_id FROM " + table
+ " WHERE batch_id IN (" + batchList + ") AND ts >= '" + DATE_FORMATTER.format(minTime) + "'")) {
while (rs.next()) {
existingBatches.add(rs.getString(1));
}
}
}
activeStmt = null;
if (Config.getBoolean("avoidDuplicateBatches",true)) {
for (String batchId: existingBatches) {
batchKeys.remove(batchId);
}
}
if (!batchKeys.isEmpty()) {
//Build and publish the manifest
CopyManifest manifest = new CopyManifest();
for (String key: batchKeys.values()) {
manifest.entries.add(new CopyManifestEntry(getS3Key(key), true));
}
String manifestKey = STATS_TMP_FOLDER + "copy-" + UUID.randomUUID() + ".manifest";
keysToDelete.add(new KeyVersion(manifestKey));
String manifestString;
try {
manifestString = mapper.writeValueAsString(manifest);
} catch (JsonProcessingException e) {
log.warn("Couldn't serialize manifest", e);
return;
}
ObjectMetadata metadata=new ObjectMetadata();
metadata.setContentType(InjectableHttpServlet.JSON);
metadata.setContentLength(manifestString.length());
PutObjectRequest pro = new PutObjectRequest(s3Bucket, manifestKey, new StringInputStream(manifestString), metadata);
pro.setStorageClass(StorageClass.ReducedRedundancy);
s3Client.putObject(pro);
delete = new DeleteObjectsRequest(s3Bucket);
activeStmt=stmt;
stmt.executeUpdate("COPY " + table+"(batch_id, product, ts, user_id, action, label, qualifier, val, srate, network, platform, distributor, locale, taglist_id)\n" +
" FROM 's3://" + s3Bucket + "/" + manifestKey + "'\n" +
" credentials '" +
credentialsString + "'\n STATUPDATE off TIMEFORMAT 'YYYY-MM-DD HH:MI:SS' DELIMITER AS '\t' MANIFEST");
activeStmt = null;
for (String key: keys) {
keysToDelete.add(new KeyVersion(key));
}
delete.setKeys(keysToDelete);
s3Client.deleteObjects(delete);
delete=null;
long end = System.currentTimeMillis();
stats.monitor(TIMER_STATS_INSERT, end-start);
stats.monitor(COUNTER_STATS_COPY, keys.size());
}
connection.commit();
} catch (Exception e) {
connection.rollback();
throw e;
} finally {
if (delete != null) {
delete.setKeys(keysToDelete);
s3Client.deleteObjects(delete);
}
}
}
}
}
private String getS3Key(String key) {
return "s3://" + s3Bucket + "/" + key;
}
public static String getTableName(String category) {
return category.toLowerCase() + "_events";
}
private String getCategoryKey(String category) {
return "statsCopyLock/"+category;
}
private Date getChunkKeys(ArrayList<String> keys) {
ListObjectsRequest req = new ListObjectsRequest().withBucketName(s3Bucket)
.withPrefix(STATS_TMP_FOLDER);
return getChunkKeys(keys, req);
}
private Date getChunkKeys(ArrayList<String> keys, ListObjectsRequest req) {
ObjectListing result;
Date minTime = null;
do {
result = s3Client.listObjects(req);
req=req.withMarker(result.getNextMarker());
keys.ensureCapacity(keys.size()+result.getObjectSummaries().size());
for (S3ObjectSummary summary: result.getObjectSummaries()) {
if (summary.getKey().endsWith(".copy")) {
keys.add(summary.getKey());
if (minTime==null || summary.getLastModified().before(minTime)) {
minTime = summary.getLastModified();
}
}
}
} while (result.isTruncated());
return minTime;
}
private List<String> getChunkKeys(String category) {
ArrayList<String> keys=new ArrayList<>();
ListObjectsRequest req = new ListObjectsRequest().withBucketName(s3Bucket)
.withPrefix(STATS_TMP_FOLDER + category + "-");
getChunkKeys(keys, req);
return keys;
}
public void recordChunk(StatsChunk chunk) {
try {
long start = System.currentTimeMillis();
if (Config.getBoolean("rdbmsStatsFlow", true)) {
try (Connection conn = dataSource.get().getConnection()) {
recordChunk(conn, chunk);
}
}
long end = System.currentTimeMillis();
stats.monitor(TIMER_STATS_RECORD, end-start);
} catch (Exception e) {
log.warn("Could write chunk to S3", e);
}
}
public void recordChunk(Connection connection, StatsChunk chunk) throws Exception {
try {
connection.setAutoCommit(false);
for (Sample s: chunk.getSamples()) {
buildIds(connection, s);
}
buildS3CopyRecords(chunk);
connection.commit();
} catch (Exception e) {
log.warn("Error recording stats chunk", e);
try {
connection.rollback();
} catch (SQLException e1) {
log.warn("Exception in recordChunk", e1);
}
throw e;
}
}
private void buildIds(Connection connection, Sample s) throws ExecutionException {
UserMetadata userMetadata = s.getUserMetadata();
s.tagListId = getTagListId(connection, userMetadata);
}
public long getTagListId(Connection connection, UserMetadata data) throws ExecutionException {
if (data.tags == null || data.tags.isEmpty()) {
return 0;
} else {
return tagListIdCache.get(data, new TagListBuilder(data, connection));
}
}
private Map<String, String> buildS3CopyRecords(StatsChunk chunk) throws IOException {
Map<String, PrintWriter> buckets = new HashMap<>();
Map<String, File> files = new HashMap<>();
try {
try {
for (Sample s : chunk.getSamples()) {
String category = s.getKey().getCategory();
PrintWriter w = buckets.get(category);
if (w == null) {
File tmpFile = File.createTempFile("s3stats-" + category, ".tsv");
files.put(category, tmpFile);
tmpFile.deleteOnExit();
w = new PrintWriter(new BufferedWriter(new FileWriter(tmpFile)));
buckets.put(category, w);
}
w.append(chunk.getChunkId().toString()).append('\t')
.append(s.product == null ? StatsRecorder.NULL : s.product.name()).append('\t')
.append(DATE_FORMATTER.format(s.getTime())).append('\t')
.append(Long.toString(s.userId)).append('\t')
.append(s.getKey().getAction()).append('\t')
.append(orNull(s.getKey().getLabel())).append('\t')
.append(orNull(s.getKey().getQualifier())).append('\t')
.append(Long.toString(s.value)).append('\t')
.append(orNull(s.sampleRate)).append('\t')
.append(orNull(s.network)).append('\t')
.append(orNull(s.platform)).append('\t')
.append(orNull(s.distributor)).append('\t')
.append(s.locale == null ? StatsRecorder.NULL : s.locale.toLanguageTag()).append('\t')
.append(Long.toString(s.tagListId)).append('\n');
}
Map<String, String> rv = new HashMap<>();
for (String category : buckets.keySet()) {
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentType(InjectableHttpServlet.TEXT);
String s3Key = "stats/tmp/" + category + "-" + chunk.getChunkId() + ".copy";
PrintWriter writer = buckets.get(category);
writer.flush();
writer.close();
File file = files.get(category);
PutObjectRequest req = new PutObjectRequest(s3Bucket, s3Key, file);
req.setMetadata(metadata);
req.setStorageClass(StorageClass.ReducedRedundancy);
s3Client.putObject(req);
file.delete();
rv.put(category, s3Key);
stats.monitor(COUNTER_STATS_S3SAVE, req.getMetadata().getContentLength());
}
return rv;
} finally {
for (PrintWriter w : buckets.values()) {
w.close();
}
}
} finally {
for (File f : files.values()) {
f.delete();
}
}
}
private String orNull(Object obj) {
if (obj == null) {
return "\\N";
} else {
return String.valueOf(obj);
}
}
public void shutdown() {
if (activeStmt != null) {
try {
if (!activeStmt.isClosed()) {
activeStmt.cancel();
}
} catch (SQLException e) {
log.warn("Couldn't cancel active statement", e);
}
}
}
}
| |
package il.ac.tau.jsfuzzer.Generator;
import il.ac.tau.jsfuzzer.Generator.Config.ConfigProperties;
import il.ac.tau.jsfuzzer.Generator.Config.Configs;
import il.ac.tau.jsfuzzer.Generator.Params.GenerateExpressionParams;
import il.ac.tau.jsfuzzer.Generator.Params.OperationExpressionParams;
import il.ac.tau.jsfuzzer.Generator.Params.createParams;
import il.ac.tau.jsfuzzer.JST.AbsExpression;
import il.ac.tau.jsfuzzer.JST.AbsStatement;
import il.ac.tau.jsfuzzer.JST.JSTNode;
import il.ac.tau.jsfuzzer.JST.Enums.DataTypes;
import il.ac.tau.jsfuzzer.JST.Enums.JSTNodes;
import il.ac.tau.jsfuzzer.Utils.StdRandom;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class GenLogic
{
private final Generator _gen;
private final Configs _configs;
private int _depth = 0;
private int _funcDepth = 0;
public GenLogic(Generator gen, Configs configs) {
_gen = gen;
_configs = configs;
}
protected JSTNode applyMethod(JSTNodes methodName, Context context, createParams params)
{
JSTNode node;
switch (methodName)
{
case ForEach: node = _gen.createForEach(context, params); break;
case Switch: node = _gen.createSwitch(context, params); break;
case For: node = _gen.createFor(context, params); break;
case If: node = _gen.createIf(context, params); break;
case DoWhile: node = _gen.createDoWhile(context, params); break;
case Case: node = _gen.createCase(context, params); break;
case While: node = _gen.createWhile(context, params); break;
case Break: node = _gen.createBreak(context, params); break;
case Return: node = _gen.createReturn(context, params); break;
case Call: node = _gen.createCall(context, params); break;
case This: node = _gen.createThis(context, params); break;
case Literal: node = _gen.createLiteral(context, params); break;
case CaseBlock: node = _gen.createCaseBlock(context, params); break;
case FunctionDef: node = _gen.createFunctionDef(context, params); break;
case Continue: node = _gen.createContinue(context, params); break;
case ArrayExp: node = _gen.createArrayExp(context, params); break;
case Identifier: node = _gen.createIdentifier(context, params); break;
case VarDeclerator: node = _gen.createVarDeclerator(context, params); break;
case Assignment: node = _gen.createAssignment(context, params); break;
case StatementsBlock: node = _gen.createStatementsBlock(context, params); break;
case FunctionExp: node = _gen.createFunctionExp(context, params); break;
case MemberExp: node = _gen.createMemberExp(context, params); break;
case CompoundAssignment: node = _gen.createCompoundAssignment(context, params); break;
case ObjectExp: node = _gen.createObjectExp(context, params); break;
case VarDecleration:node = _gen.createVarDecleration(context, params); break;
case LiteralNumber: node = _gen.createLiteralNumber(context, params); break;
case OperationExp: node = _gen.createOperationExp(context, params); break;
case LiteralString: node = _gen.createLiteralString(context, params); break;
// generate expression
case AbsExpression: node = generateExpression(context, (GenerateExpressionParams)params); break;
default: throw new IllegalArgumentException("JSTnode '"+methodName+"' creation method was not defined");
}
return node;
}
/**
* This is an initial and non complex solution
* Get all probabilities from the config and chose randomly with respect to their relations
* @return
*/
AbsStatement generateStatement(Context context)
{
HashMap<JSTNodes, Double> hs = new HashMap<JSTNodes, Double>();
// Factor for making leafs commoner in deep depth parts
double factorDepth = Math.pow(_configs.valDouble(ConfigProperties.FACTOR_DEPTH), _depth);
// All properties are relative to the total of all properties
// Leafs
hs.put(JSTNodes.VarDecleration, (double) _configs.valInt(ConfigProperties.STMT_VARDECLERATION) / factorDepth);
hs.put(JSTNodes.CompoundAssignment, (double) _configs.valInt(ConfigProperties.STMT_COMPOUNDASSIGNMENT) / factorDepth);
hs.put(JSTNodes.Assignment, (double) _configs.valInt(ConfigProperties.STMT_ASSIGNMENT) / factorDepth);
hs.put(JSTNodes.AbsExpression, (double) _configs.valInt(ConfigProperties.STMT_EXPRESSION) / factorDepth);
hs.put(JSTNodes.Call, (double) _configs.valInt(ConfigProperties.STMT_CALL) / factorDepth);
// Is in function
if (context.isInFunction())
hs.put(JSTNodes.Return, (double) _configs.valInt(ConfigProperties.STMT_RETURN) / factorDepth);
// Is in loop
if (context.isInLoop())
{
hs.put(JSTNodes.Break, (double) _configs.valInt(ConfigProperties.STMT_BREAK) / factorDepth);
hs.put(JSTNodes.Continue, (double) _configs.valInt(ConfigProperties.STMT_CONTINUE) / factorDepth);
}
// Non-Leafs
hs.put(JSTNodes.If, (double) _configs.valInt(ConfigProperties.STMT_IF) * factorDepth);
hs.put(JSTNodes.Switch, (double) _configs.valInt(ConfigProperties.STMT_SWITCH) * factorDepth);
// Prevent some JSTnodes from being generated within imaginary scopes
if (!context.isImaginaryContext())
{
hs.put(JSTNodes.FunctionDef, (double) _configs.valInt(ConfigProperties.STMT_FUNCTIONDEFINITION) * factorDepth);
}
// Lower the probability of nested loop
factorDepth *= _configs.valDouble(ConfigProperties.NESTED_LOOPS_FACTOR) * (context.getLoopDepth() + 1); // depth must starts from 1 (not 0)
hs.put(JSTNodes.ForEach, (double) (_configs.valInt(ConfigProperties.STMT_FOREACH) * factorDepth));
hs.put(JSTNodes.While, (double) (_configs.valInt(ConfigProperties.STMT_WHILE) * factorDepth));
hs.put(JSTNodes.DoWhile, (double) (_configs.valInt(ConfigProperties.STMT_DOWHILE) * factorDepth));
hs.put(JSTNodes.For, (double) (_configs.valInt(ConfigProperties.STMT_FOR) * factorDepth));
// randomly choose statement
JSTNodes createMethod = StdRandom.chooseFromProbList(hs);
GenerateExpressionParams params = null;
// if expression was selected then prevent object expression and anonymous function
if (createMethod==JSTNodes.AbsExpression) {
params = new GenerateExpressionParams(false);
params.addOption(JSTNodes.ObjectExp, null);
params.addOption(JSTNodes.FunctionExp, null);
}
return (AbsStatement) applyMethod(createMethod, context, params);
}
/**
* This is an initial and non complex solution
* Get all pr obabilities from the config and chose randomly with respect to their relations
* @return
*/
AbsExpression generateExpression(Context context, GenerateExpressionParams params)
{
// Get expression type options from params
HashMap<JSTNodes, Double> hs = new HashMap<JSTNodes, Double>();
double factorDepth = Math.pow(_configs.valDouble(ConfigProperties.FACTOR_DEPTH), _depth);
// leafs - probability increase as depth grows
testAndPut(params, hs, JSTNodes.Identifier, _configs.valInt(ConfigProperties.EXPR_IDENTIFIER)/factorDepth);
testAndPut(params, hs, JSTNodes.Literal, _configs.valInt(ConfigProperties.EXPR_LITERAL)/factorDepth);
testAndPut(params, hs, JSTNodes.This, _configs.valInt(ConfigProperties.EXPR_THIS)/factorDepth);
// non-leafs - probability decrease as depth grows
testAndPut(params, hs, JSTNodes.OperationExp, _configs.valInt(ConfigProperties.EXPR_EXPRESSIONOP)*factorDepth);
testAndPut(params, hs, JSTNodes.Call, _configs.valInt(ConfigProperties.EXPR_CALL)*factorDepth);
//testAndPut(params, hs, JSTNodes.ArrayExp, _configs.valInt(ConfigProperties.EXPR_ARRAYEXPRESSION)*factorDepth);
testAndPut(params, hs, JSTNodes.MemberExp, _configs.valInt(ConfigProperties.EXPR_MEMBEREXPRESSION)*factorDepth);
//ObjectExp is illegal statement
testAndPut(params, hs, JSTNodes.ObjectExp, _configs.valInt(ConfigProperties.EXPR_OBJECTEXPRESSION)*factorDepth);
testAndPut(params, hs, JSTNodes.FunctionExp, _configs.valInt(ConfigProperties.EXPR_FUNCTIONEXPRESSION)*factorDepth);
// randomly choose expression
JSTNodes createMethod = StdRandom.chooseFromProbList(hs);
// If this is a special expresstion transfer its special parameters
createParams applyParams = (params!= null) ? params.getOptions().get(createMethod) : null;
return (AbsExpression) applyMethod(createMethod, context, applyParams);
}
/**
* @param context - current Context
* @param params - reserved parameter, null expected
* @return an AbsExpression, suitable to be used as condition
*/
AbsExpression generateCondition(Context context, createParams params)
{
// create params for all options
GenerateExpressionParams GenExpParams = new GenerateExpressionParams(true);
GenExpParams.addOption(JSTNodes.Identifier, null);
GenExpParams.addOption(JSTNodes.Call, null);
// OperationExp that returns boolean value
OperationExpressionParams opExpParams = new OperationExpressionParams(DataTypes.BOOLEAN);
GenExpParams.addOption(JSTNodes.OperationExp, opExpParams);
return generateExpression(context, GenExpParams);
}
private void testAndPut(createParams params, HashMap<JSTNodes, Double> hs, JSTNodes node, double val)
{
Map<JSTNodes, createParams> options = null;
// include if no parameter is given
if (!(params instanceof GenerateExpressionParams))
{
hs.put(node, val);
}
else
{
GenerateExpressionParams ExpParams = (GenerateExpressionParams)params;
options = ExpParams.getOptions();
// If no options were given or include this or disclude other than this
if ((options == null) || ExpParams.getInclude() == null || (ExpParams.getInclude() == options.keySet().contains(node)))
{
hs.put(node, val);
}
}
}
public List<AbsExpression> generateExpression(Context context, GenerateExpressionParams params, int size)
{
List<AbsExpression> expList = new LinkedList<AbsExpression>();
for (int i=0 ; i<size ; i++) {
expList.add(generateExpression(context, params));
}
return expList;
}
public List<AbsStatement> generateStatement(Context context, int size)
{
List<AbsStatement> stmtList = new LinkedList<AbsStatement>();
for (int i=0 ; i<size ; i++) {
stmtList.add(generateStatement(context));
}
return stmtList;
}
public void increaseDepth() {
_depth++;
}
public void decreaseDepth() {
_depth--;
}
public int getDepth() {
return _depth;
}
public void increaseFuncDepth(int d) {
_funcDepth += d;
}
public void decreaseFuncDepth(int d) {
_funcDepth -= d;
}
public int getFuncDepth() {
return _funcDepth;
}
}
| |
// Copyright 2012 Citrix Systems, Inc. Licensed under the
// Apache License, Version 2.0 (the "License"); you may not use this
// file except in compliance with the License. Citrix Systems, Inc.
// reserves all rights not expressly granted by the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Automatically generated by addcopyright.py at 04/03/2012
package com.xensource.xenapi;
import com.xensource.xenapi.Types.BadServerResponse;
import com.xensource.xenapi.Types.VersionException;
import com.xensource.xenapi.Types.XenAPIException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.apache.xmlrpc.XmlRpcException;
/**
* A storage manager plugin
*
* @author Citrix Systems, Inc.
*/
public class SM extends XenAPIObject {
/**
* The XenAPI reference to this object.
*/
protected final String ref;
/**
* For internal use only.
*/
SM(String ref) {
this.ref = ref;
}
public String toWireString() {
return this.ref;
}
/**
* If obj is a SM, compares XenAPI references for equality.
*/
@Override
public boolean equals(Object obj)
{
if (obj != null && obj instanceof SM)
{
SM other = (SM) obj;
return other.ref.equals(this.ref);
} else
{
return false;
}
}
@Override
public int hashCode()
{
return ref.hashCode();
}
/**
* Represents all the fields in a SM
*/
public static class Record implements Types.Record {
public String toString() {
StringWriter writer = new StringWriter();
PrintWriter print = new PrintWriter(writer);
print.printf("%1$20s: %2$s\n", "uuid", this.uuid);
print.printf("%1$20s: %2$s\n", "nameLabel", this.nameLabel);
print.printf("%1$20s: %2$s\n", "nameDescription", this.nameDescription);
print.printf("%1$20s: %2$s\n", "type", this.type);
print.printf("%1$20s: %2$s\n", "vendor", this.vendor);
print.printf("%1$20s: %2$s\n", "copyright", this.copyright);
print.printf("%1$20s: %2$s\n", "version", this.version);
print.printf("%1$20s: %2$s\n", "requiredApiVersion", this.requiredApiVersion);
print.printf("%1$20s: %2$s\n", "configuration", this.configuration);
print.printf("%1$20s: %2$s\n", "capabilities", this.capabilities);
print.printf("%1$20s: %2$s\n", "otherConfig", this.otherConfig);
print.printf("%1$20s: %2$s\n", "driverFilename", this.driverFilename);
return writer.toString();
}
/**
* Convert a SM.Record to a Map
*/
public Map<String,Object> toMap() {
Map<String,Object> map = new HashMap<String,Object>();
map.put("uuid", this.uuid == null ? "" : this.uuid);
map.put("name_label", this.nameLabel == null ? "" : this.nameLabel);
map.put("name_description", this.nameDescription == null ? "" : this.nameDescription);
map.put("type", this.type == null ? "" : this.type);
map.put("vendor", this.vendor == null ? "" : this.vendor);
map.put("copyright", this.copyright == null ? "" : this.copyright);
map.put("version", this.version == null ? "" : this.version);
map.put("required_api_version", this.requiredApiVersion == null ? "" : this.requiredApiVersion);
map.put("configuration", this.configuration == null ? new HashMap<String, String>() : this.configuration);
map.put("capabilities", this.capabilities == null ? new LinkedHashSet<String>() : this.capabilities);
map.put("other_config", this.otherConfig == null ? new HashMap<String, String>() : this.otherConfig);
map.put("driver_filename", this.driverFilename == null ? "" : this.driverFilename);
return map;
}
/**
* Unique identifier/object reference
*/
public String uuid;
/**
* a human-readable name
*/
public String nameLabel;
/**
* a notes field containg human-readable description
*/
public String nameDescription;
/**
* SR.type
*/
public String type;
/**
* Vendor who created this plugin
*/
public String vendor;
/**
* Entity which owns the copyright of this plugin
*/
public String copyright;
/**
* Version of the plugin
*/
public String version;
/**
* Minimum SM API version required on the server
*/
public String requiredApiVersion;
/**
* names and descriptions of device config keys
*/
public Map<String, String> configuration;
/**
* capabilities of the SM plugin
*/
public Set<String> capabilities;
/**
* additional configuration
*/
public Map<String, String> otherConfig;
/**
* filename of the storage driver
*/
public String driverFilename;
}
/**
* Get a record containing the current state of the given SM.
*
* @return all fields from the object
*/
public SM.Record getRecord(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_record";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toSMRecord(result);
}
/**
* Get a reference to the SM instance with the specified UUID.
*
* @param uuid UUID of object to return
* @return reference to the object
*/
public static SM getByUuid(Connection c, String uuid) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_by_uuid";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(uuid)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toSM(result);
}
/**
* Get all the SM instances with the given label.
*
* @param label label of object to return
* @return references to objects with matching names
*/
public static Set<SM> getByNameLabel(Connection c, String label) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_by_name_label";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(label)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toSetOfSM(result);
}
/**
* Get the uuid field of the given SM.
*
* @return value of the field
*/
public String getUuid(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_uuid";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the name/label field of the given SM.
*
* @return value of the field
*/
public String getNameLabel(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_name_label";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the name/description field of the given SM.
*
* @return value of the field
*/
public String getNameDescription(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_name_description";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the type field of the given SM.
*
* @return value of the field
*/
public String getType(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_type";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the vendor field of the given SM.
*
* @return value of the field
*/
public String getVendor(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_vendor";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the copyright field of the given SM.
*
* @return value of the field
*/
public String getCopyright(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_copyright";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the version field of the given SM.
*
* @return value of the field
*/
public String getVersion(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_version";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the required_api_version field of the given SM.
*
* @return value of the field
*/
public String getRequiredApiVersion(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_required_api_version";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Get the configuration field of the given SM.
*
* @return value of the field
*/
public Map<String, String> getConfiguration(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_configuration";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toMapOfStringString(result);
}
/**
* Get the capabilities field of the given SM.
*
* @return value of the field
*/
public Set<String> getCapabilities(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_capabilities";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toSetOfString(result);
}
/**
* Get the other_config field of the given SM.
*
* @return value of the field
*/
public Map<String, String> getOtherConfig(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_other_config";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toMapOfStringString(result);
}
/**
* Get the driver_filename field of the given SM.
*
* @return value of the field
*/
public String getDriverFilename(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_driver_filename";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toString(result);
}
/**
* Set the other_config field of the given SM.
*
* @param otherConfig New value to set
*/
public void setOtherConfig(Connection c, Map<String, String> otherConfig) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.set_other_config";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref), Marshalling.toXMLRPC(otherConfig)};
Map response = c.dispatch(method_call, method_params);
return;
}
/**
* Add the given key-value pair to the other_config field of the given SM.
*
* @param key Key to add
* @param value Value to add
*/
public void addToOtherConfig(Connection c, String key, String value) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.add_to_other_config";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref), Marshalling.toXMLRPC(key), Marshalling.toXMLRPC(value)};
Map response = c.dispatch(method_call, method_params);
return;
}
/**
* Remove the given key and its corresponding value from the other_config field of the given SM. If the key is not in that Map, then do nothing.
*
* @param key Key to remove
*/
public void removeFromOtherConfig(Connection c, String key) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.remove_from_other_config";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref), Marshalling.toXMLRPC(key)};
Map response = c.dispatch(method_call, method_params);
return;
}
/**
* Return a list of all the SMs known to the system.
*
* @return references to all objects
*/
public static Set<SM> getAll(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_all";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toSetOfSM(result);
}
/**
* Return a map of SM references to SM records for all SMs known to the system.
*
* @return records of all objects
*/
public static Map<SM, SM.Record> getAllRecords(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "SM.get_all_records";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toMapOfSMSMRecord(result);
}
}
| |
/*
* Copyright (c) 2000, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/* Type-specific source code for unit test
*
* Regenerate the BasicX classes via genBasic.sh whenever this file changes.
* We check in the generated source files so that the test tree can be used
* independently of the rest of the source tree.
*/
// -- This file was mechanically generated: Do not edit! -- //
import java.nio.*;
import java.lang.reflect.Method;
public class BasicShort
extends Basic
{
private static final short[] VALUES = {
Short.MIN_VALUE,
(short) -1,
(short) 0,
(short) 1,
Short.MAX_VALUE,
};
private static void relGet(ShortBuffer b) {
int n = b.capacity();
short v;
for (int i = 0; i < n; i++)
ck(b, (long)b.get(), (long)((short)ic(i)));
b.rewind();
}
private static void relGet(ShortBuffer b, int start) {
int n = b.remaining();
short v;
for (int i = start; i < n; i++)
ck(b, (long)b.get(), (long)((short)ic(i)));
b.rewind();
}
private static void absGet(ShortBuffer b) {
int n = b.capacity();
short v;
for (int i = 0; i < n; i++)
ck(b, (long)b.get(), (long)((short)ic(i)));
b.rewind();
}
private static void bulkGet(ShortBuffer b) {
int n = b.capacity();
short[] a = new short[n + 7];
b.get(a, 7, n);
for (int i = 0; i < n; i++)
ck(b, (long)a[i + 7], (long)((short)ic(i)));
}
private static void relPut(ShortBuffer b) {
int n = b.capacity();
b.clear();
for (int i = 0; i < n; i++)
b.put((short)ic(i));
b.flip();
}
private static void absPut(ShortBuffer b) {
int n = b.capacity();
b.clear();
for (int i = 0; i < n; i++)
b.put(i, (short)ic(i));
b.limit(n);
b.position(0);
}
private static void bulkPutArray(ShortBuffer b) {
int n = b.capacity();
b.clear();
short[] a = new short[n + 7];
for (int i = 0; i < n; i++)
a[i + 7] = (short)ic(i);
b.put(a, 7, n);
b.flip();
}
private static void bulkPutBuffer(ShortBuffer b) {
int n = b.capacity();
b.clear();
ShortBuffer c = ShortBuffer.allocate(n + 7);
c.position(7);
for (int i = 0; i < n; i++)
c.put((short)ic(i));
c.flip();
c.position(7);
b.put(c);
b.flip();
}
//6231529
private static void callReset(ShortBuffer b) {
b.position(0);
b.mark();
b.duplicate().reset();
b.asReadOnlyBuffer().reset();
}
// 6221101-6234263
private static void putBuffer() {
final int cap = 10;
ShortBuffer direct1 = ByteBuffer.allocateDirect(cap).asShortBuffer();
ShortBuffer nondirect1 = ByteBuffer.allocate(cap).asShortBuffer();
direct1.put(nondirect1);
ShortBuffer direct2 = ByteBuffer.allocateDirect(cap).asShortBuffer();
ShortBuffer nondirect2 = ByteBuffer.allocate(cap).asShortBuffer();
nondirect2.put(direct2);
ShortBuffer direct3 = ByteBuffer.allocateDirect(cap).asShortBuffer();
ShortBuffer direct4 = ByteBuffer.allocateDirect(cap).asShortBuffer();
direct3.put(direct4);
ShortBuffer nondirect3 = ByteBuffer.allocate(cap).asShortBuffer();
ShortBuffer nondirect4 = ByteBuffer.allocate(cap).asShortBuffer();
nondirect3.put(nondirect4);
}
private static void checkSlice(ShortBuffer b, ShortBuffer slice) {
ck(slice, 0, slice.position());
ck(slice, b.remaining(), slice.limit());
ck(slice, b.remaining(), slice.capacity());
if (b.isDirect() != slice.isDirect())
fail("Lost direction", slice);
if (b.isReadOnly() != slice.isReadOnly())
fail("Lost read-only", slice);
}
private static void fail(String problem,
ShortBuffer xb, ShortBuffer yb,
short x, short y) {
fail(problem + String.format(": x=%s y=%s", x, y), xb, yb);
}
private static void tryCatch(Buffer b, Class ex, Runnable thunk) {
boolean caught = false;
try {
thunk.run();
} catch (Throwable x) {
if (ex.isAssignableFrom(x.getClass())) {
caught = true;
} else {
fail(x.getMessage() + " not expected");
}
}
if (!caught)
fail(ex.getName() + " not thrown", b);
}
private static void tryCatch(short [] t, Class ex, Runnable thunk) {
tryCatch(ShortBuffer.wrap(t), ex, thunk);
}
public static void test(int level, final ShortBuffer b, boolean direct) {
show(level, b);
if (direct != b.isDirect())
fail("Wrong direction", b);
// Gets and puts
relPut(b);
relGet(b);
absGet(b);
bulkGet(b);
absPut(b);
relGet(b);
absGet(b);
bulkGet(b);
bulkPutArray(b);
relGet(b);
bulkPutBuffer(b);
relGet(b);
// Compact
relPut(b);
b.position(13);
b.compact();
b.flip();
relGet(b, 13);
// Exceptions
relPut(b);
b.limit(b.capacity() / 2);
b.position(b.limit());
tryCatch(b, BufferUnderflowException.class, new Runnable() {
public void run() {
b.get();
}});
tryCatch(b, BufferOverflowException.class, new Runnable() {
public void run() {
b.put((short)42);
}});
// The index must be non-negative and lesss than the buffer's limit.
tryCatch(b, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
b.get(b.limit());
}});
tryCatch(b, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
b.get(-1);
}});
tryCatch(b, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
b.put(b.limit(), (short)42);
}});
tryCatch(b, InvalidMarkException.class, new Runnable() {
public void run() {
b.position(0);
b.mark();
b.compact();
b.reset();
}});
// Values
b.clear();
b.put((short)0);
b.put((short)-1);
b.put((short)1);
b.put(Short.MAX_VALUE);
b.put(Short.MIN_VALUE);
short v;
b.flip();
ck(b, b.get(), 0);
ck(b, b.get(), (short)-1);
ck(b, b.get(), 1);
ck(b, b.get(), Short.MAX_VALUE);
ck(b, b.get(), Short.MIN_VALUE);
// Comparison
b.rewind();
ShortBuffer b2 = ShortBuffer.allocate(b.capacity());
b2.put(b);
b2.flip();
b.position(2);
b2.position(2);
if (!b.equals(b2)) {
for (int i = 2; i < b.limit(); i++) {
short x = b.get(i);
short y = b2.get(i);
if (x != y
)
out.println("[" + i + "] " + x + " != " + y);
}
fail("Identical buffers not equal", b, b2);
}
if (b.compareTo(b2) != 0)
fail("Comparison to identical buffer != 0", b, b2);
b.limit(b.limit() + 1);
b.position(b.limit() - 1);
b.put((short)99);
b.rewind();
b2.rewind();
if (b.equals(b2))
fail("Non-identical buffers equal", b, b2);
if (b.compareTo(b2) <= 0)
fail("Comparison to shorter buffer <= 0", b, b2);
b.limit(b.limit() - 1);
b.put(2, (short)42);
if (b.equals(b2))
fail("Non-identical buffers equal", b, b2);
if (b.compareTo(b2) <= 0)
fail("Comparison to lesser buffer <= 0", b, b2);
// Check equals and compareTo with interesting values
for (short x : VALUES) {
ShortBuffer xb = ShortBuffer.wrap(new short[] { x });
if (xb.compareTo(xb) != 0) {
fail("compareTo not reflexive", xb, xb, x, x);
}
if (! xb.equals(xb)) {
fail("equals not reflexive", xb, xb, x, x);
}
for (short y : VALUES) {
ShortBuffer yb = ShortBuffer.wrap(new short[] { y });
if (xb.compareTo(yb) != - yb.compareTo(xb)) {
fail("compareTo not anti-symmetric",
xb, yb, x, y);
}
if ((xb.compareTo(yb) == 0) != xb.equals(yb)) {
fail("compareTo inconsistent with equals",
xb, yb, x, y);
}
if (xb.compareTo(yb) != Short.compare(x, y)) {
fail("Incorrect results for ShortBuffer.compareTo",
xb, yb, x, y);
}
if (xb.equals(yb) != ((x == y) || ((x != x) && (y != y)))) {
fail("Incorrect results for ShortBuffer.equals",
xb, yb, x, y);
}
}
}
// Sub, dup
relPut(b);
relGet(b.duplicate());
b.position(13);
relGet(b.duplicate(), 13);
relGet(b.duplicate().slice(), 13);
relGet(b.slice(), 13);
relGet(b.slice().duplicate(), 13);
// Slice
b.position(5);
ShortBuffer sb = b.slice();
checkSlice(b, sb);
b.position(0);
ShortBuffer sb2 = sb.slice();
checkSlice(sb, sb2);
if (!sb.equals(sb2))
fail("Sliced slices do not match", sb, sb2);
if ((sb.hasArray()) && (sb.arrayOffset() != sb2.arrayOffset()))
fail("Array offsets do not match: "
+ sb.arrayOffset() + " != " + sb2.arrayOffset(), sb, sb2);
// Read-only views
b.rewind();
final ShortBuffer rb = b.asReadOnlyBuffer();
if (!b.equals(rb))
fail("Buffer not equal to read-only view", b, rb);
show(level + 1, rb);
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
relPut(rb);
}});
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
absPut(rb);
}});
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
bulkPutArray(rb);
}});
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
bulkPutBuffer(rb);
}});
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
rb.compact();
}});
if (rb.getClass().getName().startsWith("java.nio.Heap")) {
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
rb.array();
}});
tryCatch(b, ReadOnlyBufferException.class, new Runnable() {
public void run() {
rb.arrayOffset();
}});
if (rb.hasArray())
fail("Read-only heap buffer's backing array is accessible",
rb);
}
// Bulk puts from read-only buffers
b.clear();
rb.rewind();
b.put(rb);
relPut(b); // Required by testViews
}
public static void test(final short [] ba) {
int offset = 47;
int length = 900;
final ShortBuffer b = ShortBuffer.wrap(ba, offset, length);
show(0, b);
ck(b, b.capacity(), ba.length);
ck(b, b.position(), offset);
ck(b, b.limit(), offset + length);
// The offset must be non-negative and no larger than <array.length>.
tryCatch(ba, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
ShortBuffer.wrap(ba, -1, ba.length);
}});
tryCatch(ba, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
ShortBuffer.wrap(ba, ba.length + 1, ba.length);
}});
tryCatch(ba, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
ShortBuffer.wrap(ba, 0, -1);
}});
tryCatch(ba, IndexOutOfBoundsException.class, new Runnable() {
public void run() {
ShortBuffer.wrap(ba, 0, ba.length + 1);
}});
// A NullPointerException will be thrown if the array is null.
tryCatch(ba, NullPointerException.class, new Runnable() {
public void run() {
ShortBuffer.wrap((short []) null, 0, 5);
}});
tryCatch(ba, NullPointerException.class, new Runnable() {
public void run() {
ShortBuffer.wrap((short []) null);
}});
}
private static void testAllocate() {
// An IllegalArgumentException will be thrown for negative capacities.
tryCatch((Buffer) null, IllegalArgumentException.class, new Runnable() {
public void run() {
ShortBuffer.allocate(-1);
}});
}
public static void test() {
testAllocate();
test(0, ShortBuffer.allocate(7 * 1024), false);
test(0, ShortBuffer.wrap(new short[7 * 1024], 0, 7 * 1024), false);
test(new short[1024]);
callReset(ShortBuffer.allocate(10));
putBuffer();
}
}
| |
//
// Util.java
// Adjust
//
// Created by Christian Wellenbrock on 2012-10-11.
// Copyright (c) 2012-2014 adjust GmbH. All rights reserved.
// See the file MIT-LICENSE for copying permission.
//
package com.adjust.sdk;
import static com.adjust.sdk.Constants.ENCODING;
import static com.adjust.sdk.Constants.HIGH;
import static com.adjust.sdk.Constants.LARGE;
import static com.adjust.sdk.Constants.LONG;
import static com.adjust.sdk.Constants.LOW;
import static com.adjust.sdk.Constants.MD5;
import static com.adjust.sdk.Constants.MEDIUM;
import static com.adjust.sdk.Constants.NORMAL;
import static com.adjust.sdk.Constants.PLUGINS;
import static com.adjust.sdk.Constants.SHA1;
import static com.adjust.sdk.Constants.SMALL;
import static com.adjust.sdk.Constants.UNKNOWN;
import static com.adjust.sdk.Constants.XLARGE;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.ContentResolver;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import com.adjust.sdk.plugin.Plugin;
/**
* Collects utility functions used by Adjust.
*/
public class Util {
private static SimpleDateFormat dateFormat;
private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'Z";
protected static String getUserAgent(final Context context) {
final Resources resources = context.getResources();
final DisplayMetrics displayMetrics = resources.getDisplayMetrics();
final Configuration configuration = resources.getConfiguration();
final Locale locale = configuration.locale;
final int screenLayout = configuration.screenLayout;
final String[] parts = {
getPackageName(context),
getAppVersion(context),
getDeviceType(screenLayout),
getDeviceName(),
getOsName(),
getOsVersion(),
getLanguage(locale),
getCountry(locale),
getScreenSize(screenLayout),
getScreenFormat(screenLayout),
getScreenDensity(displayMetrics),
getDisplayWidth(displayMetrics),
getDisplayHeight(displayMetrics)
};
return TextUtils.join(" ", parts);
}
private static String getPackageName(final Context context) {
final String packageName = context.getPackageName();
return sanitizeString(packageName);
}
private static String getAppVersion(final Context context) {
try {
final PackageManager packageManager = context.getPackageManager();
final String name = context.getPackageName();
final PackageInfo info = packageManager.getPackageInfo(name, 0);
final String versionName = info.versionName;
return sanitizeString(versionName);
} catch (NameNotFoundException e) {
return UNKNOWN;
}
}
private static String getDeviceType(final int screenLayout) {
int screenSize = screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK;
switch (screenSize) {
case Configuration.SCREENLAYOUT_SIZE_SMALL:
case Configuration.SCREENLAYOUT_SIZE_NORMAL:
return "phone";
case Configuration.SCREENLAYOUT_SIZE_LARGE:
case 4:
return "tablet";
default:
return UNKNOWN;
}
}
private static String getDeviceName() {
final String deviceName = Build.MODEL;
return sanitizeString(deviceName);
}
private static String getOsName() {
return "android";
}
private static String getOsVersion() {
final String osVersion = "" + Build.VERSION.SDK_INT;
return sanitizeString(osVersion);
}
private static String getLanguage(final Locale locale) {
final String language = locale.getLanguage();
return sanitizeStringShort(language);
}
private static String getCountry(final Locale locale) {
final String country = locale.getCountry();
return sanitizeStringShort(country);
}
private static String getScreenSize(final int screenLayout) {
final int screenSize = screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK;
switch (screenSize) {
case Configuration.SCREENLAYOUT_SIZE_SMALL:
return SMALL;
case Configuration.SCREENLAYOUT_SIZE_NORMAL:
return NORMAL;
case Configuration.SCREENLAYOUT_SIZE_LARGE:
return LARGE;
case 4:
return XLARGE;
default:
return UNKNOWN;
}
}
private static String getScreenFormat(final int screenLayout) {
final int screenFormat = screenLayout & Configuration.SCREENLAYOUT_LONG_MASK;
switch (screenFormat) {
case Configuration.SCREENLAYOUT_LONG_YES:
return LONG;
case Configuration.SCREENLAYOUT_LONG_NO:
return NORMAL;
default:
return UNKNOWN;
}
}
private static String getScreenDensity(final DisplayMetrics displayMetrics) {
final int density = displayMetrics.densityDpi;
final int low = (DisplayMetrics.DENSITY_MEDIUM + DisplayMetrics.DENSITY_LOW) / 2;
final int high = (DisplayMetrics.DENSITY_MEDIUM + DisplayMetrics.DENSITY_HIGH) / 2;
if (0 == density) {
return UNKNOWN;
} else if (density < low) {
return LOW;
} else if (density > high) {
return HIGH;
}
return MEDIUM;
}
private static String getDisplayWidth(DisplayMetrics displayMetrics) {
final String displayWidth = String.valueOf(displayMetrics.widthPixels);
return sanitizeString(displayWidth);
}
private static String getDisplayHeight(DisplayMetrics displayMetrics) {
final String displayHeight = String.valueOf(displayMetrics.heightPixels);
return sanitizeString(displayHeight);
}
protected static String createUuid() {
return UUID.randomUUID().toString();
}
// removes spaces and replaces empty string with "unknown"
private static String sanitizeString(final String string) {
return sanitizeString(string, UNKNOWN);
}
private static String sanitizeStringShort(final String string) {
return sanitizeString(string, "zz");
}
private static String sanitizeString(final String string, final String defaultString) {
String result = string;
if (TextUtils.isEmpty(result)) {
result = defaultString;
}
result = result.replaceAll("\\s", "");
if (TextUtils.isEmpty(result)) {
result = defaultString;
}
return result;
}
protected static String getAttributionId(final Context context) {
try {
final ContentResolver contentResolver = context.getContentResolver();
final Uri uri = Uri.parse("content://com.facebook.katana.provider.AttributionIdProvider");
final String columnName = "aid";
final String[] projection = {columnName};
final Cursor cursor = contentResolver.query(uri, projection, null, null, null);
if (null == cursor) {
return null;
}
if (!cursor.moveToFirst()) {
cursor.close();
return null;
}
final String attributionId = cursor.getString(cursor.getColumnIndex(columnName));
cursor.close();
return attributionId;
} catch (Exception e) {
return null;
}
}
public static String quote(String string) {
if (string == null) {
return null;
}
Pattern pattern = Pattern.compile("\\s");
Matcher matcher = pattern.matcher(string);
if (!matcher.find()) {
return string;
}
return String.format("'%s'", string);
}
public static String dateFormat(long date) {
if (null == dateFormat) {
dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US);
}
return dateFormat.format(date);
}
public static JSONObject buildJsonObject(String jsonString) {
JSONObject jsonObject = null;
try {
jsonObject = new JSONObject(jsonString);
} catch (JSONException e){
}
return jsonObject;
}
public static String getPlayAdId(Context context) {
return Reflection.getPlayAdId(context);
}
public static Boolean isPlayTrackingEnabled(Context context) {
return Reflection.isPlayTrackingEnabled(context);
}
public static boolean isGooglePlayServicesAvailable(Context context) {
return Reflection.isGooglePlayServicesAvailable(context);
}
public static String getMacAddress(Context context) {
return Reflection.getMacAddress(context);
}
public static String getMacSha1(String macAddress) {
if (macAddress == null) {
return null;
}
String macSha1 = sha1(macAddress);
return macSha1;
}
public static String getMacShortMd5(String macAddress) {
if (macAddress == null) {
return null;
}
String macShort = macAddress.replaceAll(":", "");
String macShortMd5 = md5(macShort);
return macShortMd5;
}
public static String getAndroidId(Context context) {
return Reflection.getAndroidId(context);
}
public static String sha1(final String text) {
return hash(text, SHA1);
}
private static String md5(final String text) {
return hash(text, MD5);
}
private static String hash(final String text, final String method) {
String hashString = null;
try {
final byte[] bytes = text.getBytes(ENCODING);
final MessageDigest mesd = MessageDigest.getInstance(method);
mesd.update(bytes, 0, bytes.length);
final byte[] hash = mesd.digest();
hashString = convertToHex(hash);
} catch (Exception e) {
}
return hashString;
}
private static String convertToHex(final byte[] bytes) {
final BigInteger bigInt = new BigInteger(1, bytes);
final String formatString = "%0" + (bytes.length << 1) + "x";
return String.format(formatString, bigInt);
}
public static Map<String, String> getPluginKeys(Context context) {
Map<String, String> pluginKeys = new HashMap<String, String>();
for (Plugin plugin : getPlugins()) {
Map.Entry<String, String> pluginEntry = plugin.getParameter(context);
if (pluginEntry != null) {
pluginKeys.put(pluginEntry.getKey(), pluginEntry.getValue());
}
}
if (pluginKeys.size() == 0) {
return null;
} else {
return pluginKeys;
}
}
private static List<Plugin> getPlugins() {
List<Plugin> plugins = new ArrayList<Plugin>(PLUGINS.size());
for (String pluginName : PLUGINS) {
Object pluginObject = Reflection.createDefaultInstance(pluginName);
if (pluginObject != null && pluginObject instanceof Plugin) {
plugins.add((Plugin) pluginObject);
}
}
return plugins;
}
public static Bundle getApplicationBundle(Context context, Logger logger) {
final ApplicationInfo applicationInfo;
try {
String packageName = context.getPackageName();
applicationInfo = context.getPackageManager().getApplicationInfo(packageName, PackageManager.GET_META_DATA);
return applicationInfo.metaData;
} catch (NameNotFoundException e) {
logger.error("ApplicationInfo not found");
} catch (Exception e) {
logger.error("Failed to get ApplicationBundle (%s)", e);
}
return null;
}
}
| |
/**
* Copyright 2013 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.protocols.payments;
import com.google.bitcoin.core.*;
import com.google.bitcoin.crypto.X509Utils;
import com.google.bitcoin.script.ScriptBuilder;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import org.bitcoin.protocols.payments.Protos;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.security.*;
import java.security.cert.*;
import java.security.cert.Certificate;
import java.util.ArrayList;
import java.util.List;
/**
* <p>Utility methods and constants for working with <a href="https://github.com/bitcoin/bips/blob/master/bip-0070.mediawiki">
* BIP 70 aka the payment protocol</a>. These are low level wrappers around the protocol buffers. If you're implementing
* a wallet app, look at {@link PaymentSession} for a higher level API that should simplify working with the protocol.</p>
*
* <p>BIP 70 defines a binary, protobuf based protocol that runs directly between sender and receiver of funds. Payment
* protocol data does not flow over the Bitcoin P2P network or enter the block chain. It's instead for data that is only
* of interest to the parties involved but isn't otherwise needed for consensus.</p>
*/
public class PaymentProtocol {
// MIME types as defined in BIP71.
public static final String MIMETYPE_PAYMENTREQUEST = "application/bitcoin-paymentrequest";
public static final String MIMETYPE_PAYMENT = "application/bitcoin-payment";
public static final String MIMETYPE_PAYMENTACK = "application/bitcoin-paymentack";
/**
* Create a payment request with one standard pay to address output. You may want to sign the request using
* {@link #signPaymentRequest}. Use {@link Protos.PaymentRequest.Builder#build} to get the actual payment
* request.
*
* @param params network parameters
* @param amount amount of coins to request, or null
* @param toAddress address to request coins to
* @param memo arbitrary, user readable memo, or null if none
* @param paymentUrl URL to send payment message to, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment request, in its builder form
*/
public static Protos.PaymentRequest.Builder createPaymentRequest(NetworkParameters params,
@Nullable Coin amount, Address toAddress, @Nullable String memo, @Nullable String paymentUrl,
@Nullable byte[] merchantData) {
return createPaymentRequest(params, ImmutableList.of(createPayToAddressOutput(amount, toAddress)), memo,
paymentUrl, merchantData);
}
/**
* Create a payment request. You may want to sign the request using {@link #signPaymentRequest}. Use
* {@link Protos.PaymentRequest.Builder#build} to get the actual payment request.
*
* @param params network parameters
* @param outputs list of outputs to request coins to
* @param memo arbitrary, user readable memo, or null if none
* @param paymentUrl URL to send payment message to, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment request, in its builder form
*/
public static Protos.PaymentRequest.Builder createPaymentRequest(NetworkParameters params,
List<Protos.Output> outputs, @Nullable String memo, @Nullable String paymentUrl,
@Nullable byte[] merchantData) {
final Protos.PaymentDetails.Builder paymentDetails = Protos.PaymentDetails.newBuilder();
paymentDetails.setNetwork(params.getPaymentProtocolId());
for (Protos.Output output : outputs)
paymentDetails.addOutputs(output);
if (memo != null)
paymentDetails.setMemo(memo);
if (paymentUrl != null)
paymentDetails.setPaymentUrl(paymentUrl);
if (merchantData != null)
paymentDetails.setMerchantData(ByteString.copyFrom(merchantData));
paymentDetails.setTime(Utils.currentTimeSeconds());
final Protos.PaymentRequest.Builder paymentRequest = Protos.PaymentRequest.newBuilder();
paymentRequest.setSerializedPaymentDetails(paymentDetails.build().toByteString());
return paymentRequest;
}
/**
* Parse a payment request.
*
* @param paymentRequest payment request to parse
* @return instance of {@link PaymentSession}, used as a value object
* @throws PaymentProtocolException
*/
public static PaymentSession parsePaymentRequest(Protos.PaymentRequest paymentRequest)
throws PaymentProtocolException {
return new PaymentSession(paymentRequest, false, null);
}
/**
* Sign the provided payment request.
*
* @param paymentRequest Payment request to sign, in its builder form.
* @param certificateChain Certificate chain to send with the payment request, ordered from client certificate to root
* certificate. The root certificate itself may be omitted.
* @param privateKey The key to sign with. Must match the public key from the first certificate of the certificate chain.
*/
public static void signPaymentRequest(Protos.PaymentRequest.Builder paymentRequest,
X509Certificate[] certificateChain, PrivateKey privateKey) {
try {
final Protos.X509Certificates.Builder certificates = Protos.X509Certificates.newBuilder();
for (final Certificate certificate : certificateChain)
certificates.addCertificate(ByteString.copyFrom(certificate.getEncoded()));
paymentRequest.setPkiType("x509+sha256");
paymentRequest.setPkiData(certificates.build().toByteString());
paymentRequest.setSignature(ByteString.EMPTY);
final Protos.PaymentRequest paymentRequestToSign = paymentRequest.build();
final String algorithm;
if (privateKey.getAlgorithm().equalsIgnoreCase("RSA"))
algorithm = "SHA256withRSA";
else
throw new IllegalStateException(privateKey.getAlgorithm());
final Signature signature = Signature.getInstance(algorithm);
signature.initSign(privateKey);
signature.update(paymentRequestToSign.toByteArray());
paymentRequest.setSignature(ByteString.copyFrom(signature.sign()));
} catch (final GeneralSecurityException x) {
// Should never happen so don't make users have to think about it.
throw new RuntimeException(x);
}
}
/**
* Uses the provided PKI method to find the corresponding public key and verify the provided signature.
*
* @param paymentRequest Payment request to verify.
* @param trustStore KeyStore of trusted root certificate authorities.
* @return verification data, or null if no PKI method was specified in the {@link Protos.PaymentRequest}.
* @throws PaymentProtocolException if payment request could not be verified.
*/
public static @Nullable PkiVerificationData verifyPaymentRequestPki(Protos.PaymentRequest paymentRequest, KeyStore trustStore)
throws PaymentProtocolException {
List<X509Certificate> certs = null;
try {
final String pkiType = paymentRequest.getPkiType();
if (pkiType.equals("none"))
// Nothing to verify. Everything is fine. Move along.
return null;
String algorithm;
if (pkiType.equals("x509+sha256"))
algorithm = "SHA256withRSA";
else if (pkiType.equals("x509+sha1"))
algorithm = "SHA1withRSA";
else
throw new PaymentProtocolException.InvalidPkiType("Unsupported PKI type: " + pkiType);
Protos.X509Certificates protoCerts = Protos.X509Certificates.parseFrom(paymentRequest.getPkiData());
if (protoCerts.getCertificateCount() == 0)
throw new PaymentProtocolException.InvalidPkiData("No certificates provided in message: server config error");
// Parse the certs and turn into a certificate chain object. Cert factories can parse both DER and base64.
// The ordering of certificates is defined by the payment protocol spec to be the same as what the Java
// crypto API requires - convenient!
CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509");
certs = Lists.newArrayList();
for (ByteString bytes : protoCerts.getCertificateList())
certs.add((X509Certificate) certificateFactory.generateCertificate(bytes.newInput()));
CertPath path = certificateFactory.generateCertPath(certs);
// Retrieves the most-trusted CAs from keystore.
PKIXParameters params = new PKIXParameters(trustStore);
// Revocation not supported in the current version.
params.setRevocationEnabled(false);
// Now verify the certificate chain is correct and trusted. This let's us get an identity linked pubkey.
CertPathValidator validator = CertPathValidator.getInstance("PKIX");
PKIXCertPathValidatorResult result = (PKIXCertPathValidatorResult) validator.validate(path, params);
PublicKey publicKey = result.getPublicKey();
// OK, we got an identity, now check it was used to sign this message.
Signature signature = Signature.getInstance(algorithm);
// Note that we don't use signature.initVerify(certs.get(0)) here despite it being the most obvious
// way to set it up, because we don't care about the constraints specified on the certificates: any
// cert that links a key to a domain name or other identity will do for us.
signature.initVerify(publicKey);
Protos.PaymentRequest.Builder reqToCheck = paymentRequest.toBuilder();
reqToCheck.setSignature(ByteString.EMPTY);
signature.update(reqToCheck.build().toByteArray());
if (!signature.verify(paymentRequest.getSignature().toByteArray()))
throw new PaymentProtocolException.PkiVerificationException("Invalid signature, this payment request is not valid.");
// Signature verifies, get the names from the identity we just verified for presentation to the user.
final X509Certificate cert = certs.get(0);
String displayName = X509Utils.getDisplayNameFromCertificate(cert, true);
if (displayName == null)
throw new PaymentProtocolException.PkiVerificationException("Could not extract name from certificate");
// Everything is peachy. Return some useful data to the caller.
return new PkiVerificationData(displayName, publicKey, result.getTrustAnchor());
} catch (InvalidProtocolBufferException e) {
// Data structures are malformed.
throw new PaymentProtocolException.InvalidPkiData(e);
} catch (CertificateException e) {
// The X.509 certificate data didn't parse correctly.
throw new PaymentProtocolException.PkiVerificationException(e);
} catch (NoSuchAlgorithmException e) {
// Should never happen so don't make users have to think about it. PKIX is always present.
throw new RuntimeException(e);
} catch (InvalidAlgorithmParameterException e) {
throw new RuntimeException(e);
} catch (CertPathValidatorException e) {
// The certificate chain isn't known or trusted, probably, the server is using an SSL root we don't
// know about and the user needs to upgrade to a new version of the software (or import a root cert).
throw new PaymentProtocolException.PkiVerificationException(e, certs);
} catch (InvalidKeyException e) {
// Shouldn't happen if the certs verified correctly.
throw new PaymentProtocolException.PkiVerificationException(e);
} catch (SignatureException e) {
// Something went wrong during hashing (yes, despite the name, this does not mean the sig was invalid).
throw new PaymentProtocolException.PkiVerificationException(e);
} catch (KeyStoreException e) {
throw new RuntimeException(e);
}
}
/**
* Information about the X.509 signature's issuer and subject.
*/
public static class PkiVerificationData {
/** Display name of the payment requestor, could be a domain name, email address, legal name, etc */
public final String displayName;
/** SSL public key that was used to sign. */
public final PublicKey merchantSigningKey;
/** Object representing the CA that verified the merchant's ID */
public final TrustAnchor rootAuthority;
/** String representing the display name of the CA that verified the merchant's ID */
public final String rootAuthorityName;
private PkiVerificationData(@Nullable String displayName, PublicKey merchantSigningKey,
TrustAnchor rootAuthority) throws PaymentProtocolException.PkiVerificationException {
try {
this.displayName = displayName;
this.merchantSigningKey = merchantSigningKey;
this.rootAuthority = rootAuthority;
this.rootAuthorityName = X509Utils.getDisplayNameFromCertificate(rootAuthority.getTrustedCert(), true);
} catch (CertificateParsingException x) {
throw new PaymentProtocolException.PkiVerificationException(x);
}
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("displayName", displayName)
.add("rootAuthorityName", rootAuthorityName)
.add("merchantSigningKey", merchantSigningKey)
.add("rootAuthority", rootAuthority)
.toString();
}
}
/**
* Create a payment message with one standard pay to address output.
*
* @param transactions one or more transactions that satisfy the requested outputs.
* @param refundAmount amount of coins to request as a refund, or null if no refund.
* @param refundAddress address to refund coins to
* @param memo arbitrary, user readable memo, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment message
*/
public static Protos.Payment createPaymentMessage(List<Transaction> transactions,
@Nullable Coin refundAmount, @Nullable Address refundAddress, @Nullable String memo,
@Nullable byte[] merchantData) {
if (refundAddress != null) {
if (refundAmount == null)
throw new IllegalArgumentException("Specify refund amount if refund address is specified.");
return createPaymentMessage(transactions,
ImmutableList.of(createPayToAddressOutput(refundAmount, refundAddress)), memo, merchantData);
} else {
return createPaymentMessage(transactions, null, memo, merchantData);
}
}
/**
* Create a payment message. This wraps up transaction data along with anything else useful for making a payment.
*
* @param transactions transactions to include with the payment message
* @param refundOutputs list of outputs to refund coins to, or null
* @param memo arbitrary, user readable memo, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment message
*/
public static Protos.Payment createPaymentMessage(List<Transaction> transactions,
@Nullable List<Protos.Output> refundOutputs, @Nullable String memo, @Nullable byte[] merchantData) {
Protos.Payment.Builder builder = Protos.Payment.newBuilder();
for (Transaction transaction : transactions) {
transaction.verify();
builder.addTransactions(ByteString.copyFrom(transaction.unsafeBitcoinSerialize()));
}
if (refundOutputs != null) {
for (Protos.Output output : refundOutputs)
builder.addRefundTo(output);
}
if (memo != null)
builder.setMemo(memo);
if (merchantData != null)
builder.setMerchantData(ByteString.copyFrom(merchantData));
return builder.build();
}
/**
* Parse transactions from payment message.
*
* @param params network parameters (needed for transaction deserialization)
* @param paymentMessage payment message to parse
* @return list of transactions
*/
public static List<Transaction> parseTransactionsFromPaymentMessage(NetworkParameters params,
Protos.Payment paymentMessage) {
final List<Transaction> transactions = new ArrayList<Transaction>(paymentMessage.getTransactionsCount());
for (final ByteString transaction : paymentMessage.getTransactionsList())
transactions.add(new Transaction(params, transaction.toByteArray()));
return transactions;
}
/**
* Message returned by the merchant in response to a Payment message.
*/
public static class Ack {
@Nullable private final String memo;
Ack(@Nullable String memo) {
this.memo = memo;
}
/**
* Returns the memo included by the merchant in the payment ack. This message is typically displayed to the user
* as a notification (e.g. "Your payment was received and is being processed"). If none was provided, returns
* null.
*/
@Nullable public String getMemo() {
return memo;
}
}
/**
* Create a payment ack.
*
* @param paymentMessage payment message to send with the ack
* @param memo arbitrary, user readable memo, or null if none
* @return created payment ack
*/
public static Protos.PaymentACK createPaymentAck(Protos.Payment paymentMessage, @Nullable String memo) {
final Protos.PaymentACK.Builder builder = Protos.PaymentACK.newBuilder();
builder.setPayment(paymentMessage);
if (memo != null)
builder.setMemo(memo);
return builder.build();
}
/**
* Parse payment ack into an object.
*/
public static Ack parsePaymentAck(Protos.PaymentACK paymentAck) {
final String memo = paymentAck.hasMemo() ? paymentAck.getMemo() : null;
return new Ack(memo);
}
/**
* Create a standard pay to address output for usage in {@link #createPaymentRequest} and
* {@link #createPaymentMessage}.
*
* @param amount amount to pay, or null
* @param address address to pay to
* @return output
*/
public static Protos.Output createPayToAddressOutput(@Nullable Coin amount, Address address) {
Protos.Output.Builder output = Protos.Output.newBuilder();
if (amount != null) {
if (amount.compareTo(NetworkParameters.MAX_MONEY) > 0)
throw new IllegalArgumentException("Amount too big: " + amount);
output.setAmount(amount.value);
} else {
output.setAmount(0);
}
output.setScript(ByteString.copyFrom(ScriptBuilder.createOutputScript(address).getProgram()));
return output.build();
}
/**
* Value object to hold amount/script pairs.
*/
public static class Output implements Serializable {
public final @Nullable Coin amount;
public final byte[] scriptData;
public Output(@Nullable Coin amount, byte[] scriptData) {
this.amount = amount;
this.scriptData = scriptData;
}
}
}
| |
/*
* MIT License
*
* Copyright (c) 2016
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.codingrodent.emulator.utilities;
import com.codingrodent.emulator.cards.common.FDC17xx;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
public class DiskImage {
private final static int MAX_TRACKS = 256;
private final static int MAX_SECTORS = 256;
private final BTree[] diskSide0 = new BTree[MAX_TRACKS];
private final BTree[] diskSide1 = new BTree[MAX_TRACKS];
/**
* Create a blank disk image
*/
public DiskImage() {
resetDiskImage();
}
/**
* Emulator entry point
*
* @param args Not used
*/
public static void main(String[] args) {
DiskImage reader = new DiskImage();
short[] ram = new short[65536];
MemoryChunk memory = new MemoryChunk(ram);
try {
memory.setBase(0xC000);
reader.dumpDiskToMemory(memory, 1, 1, 0x20);
reader.dumpDiskToMemory(memory, 2, 0xF, 0x20);
memory.setBase(0x0000);
//
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("dump.nas"), StandardCharsets.UTF_8));
FileHandler fileHandler = new FileHandler();
fileHandler.writeHexDumpFile(memory, writer);
} catch (IOException ex) {
ex.printStackTrace();
}
}
/**
* Clear a disk image
*/
private void resetDiskImage() {
for (int track = 0; track < diskSide0.length; track++) {
diskSide0[track] = new BTree();
diskSide1[track] = new BTree();
}
}
/**
* Simulate a disk removal and erase + tidy up any remaining data
*/
public void ejectDisk() {
resetDiskImage();
}
/**
* disk reader for files in ANADisk format
*
* @param file File handle to the ANADisk file
*/
public void loadANADisk(File file) {
byte[] headerBlock = new byte[8];
byte[] sectorBlock;
try {
BufferedInputStream fis = new BufferedInputStream(new FileInputStream(file));
while (fis.read(headerBlock) != -1) {
int cylinder = headerBlock[0];
int side = headerBlock[1];
int sector = headerBlock[4];
int count = headerBlock[6] + headerBlock[7] << 8;
/*
* int lengthCode = headerBlock[5]; System.out.print("Cylinder = " + util.getByte(cylinder));
* System.out.print(" Side = " + util.getByte(side)); System.out.print(" Sector = " +
* util.getByte(sector)); System.out.print(" LengthCode=" + util.getByte(lengthCode));
* System.out.println(" Count = " + util.getWord(count));
*/
if (count < 0) {
count = 128;
}
sectorBlock = new byte[count];
if (-1 != fis.read(sectorBlock, 0, count)) {
putSector(cylinder, sector, side, sectorBlock);
}
}
fis.close();
} catch (Exception e) {
throw new RuntimeException("Unable to load disk image. " + e.getMessage());
}
}
/**
* Read a binary dump of a double sided disk in .dmp format side...track...sector
*
* @param file Dump file to read
* @param tracks Tracks to read
* @param sectors Sectors to read
* @param sectorSize Size of the sectors
* @param sides Number of sides (0 or 1)
*/
public void diskDumpReader(File file, int tracks, int sectors, int sectorSize, int sides) {
try {
BufferedInputStream fis = new BufferedInputStream(new FileInputStream(file));
for (int track = 0; track < tracks; track++) {
for (int sector = 1; sector <= sectors; sector++) {
byte[] sectorData = new byte[sectorSize];
for (int position = 0; position < sectorSize; position++) {
sectorData[position] = (byte) fis.read();
}
putSector(track, sector, 0, sectorData);
}
if (1 == sides) {
for (int sector = 1; sector <= sectors; sector++) {
byte[] sectorData = new byte[sectorSize];
for (int position = 0; position < sectorSize; position++) {
sectorData[position] = (byte) fis.read();
}
putSector(track, sector, 1, sectorData);
}
}
}
fis.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Read a sector as a byte array from a specified disk
*
* @param track Track to read
* @param sector Sector to read
* @param side Side to read
* @return The sector as a byte array
*/
public byte[] getSector(int track, int sector, int side) {
try {
if (0 == side) {
return ((Sector) diskSide0[track].getNode(sector).getData()).getData();
} else {
return ((Sector) diskSide1[track].getNode(sector).getData()).getData();
}
} catch (Exception e) {
return null;
}
}
/**
* Put a sector into the B tree representing the disk image
*
* @param track The track to write
* @param sector The sector to write
* @param side The side to write
* @param sectorData The data to write
*/
public void putSector(int track, int sector, int side, byte[] sectorData) {
// System.out.println("Disk Image : "+side+" / "+track + " / " + sector );
Sector sectorNode = new Sector(sector);
BTreeNode node = new BTreeNode();
sectorNode.setData(sectorData);
node.setData(sectorNode);
if (0 == side) {
diskSide0[track].insertNode(node);
} else {
diskSide1[track].insertNode(node);
}
}
/**
* Erase all the sectors in a track
*
* @param track int The track to erase
* @param side int The side to erase
*/
public void eraseTrack(int track, int side) {
if (0 == side) {
diskSide0[track].erase();
} else {
diskSide1[track].erase();
}
}
/**
* Read a byte from a sector
*
* @param sector The sector to read from
* @param location Location in sector
* @return The byte as an int (so we don't get any negative values)
*/
private int getByte(byte[] sector, int location) {
int p1 = sector[location];
if (p1 < 0) {
p1 = p1 + 256;
}
return p1;
}
/**
* Read a word from a sector
*
* @param sector The sector to read from
* @param location Location in sector
* @return The byte as an int (so we don't get any negative values)
*/
private int getWord(byte[] sector, int location) {
int p1 = sector[location];
if (p1 < 0) {
p1 = p1 + 256;
}
int p2 = sector[location + 1];
if (p2 < 0) {
p2 = p2 + 256;
}
return (p2 << 8) + p1;
}
/**
* Print out a sector as if it contains Nas-Dos directory data
*
* @param sector Sector data
*/
public void printNasDosDirectorySector(byte[] sector) {
String fileName;
int executionAddress;
int loadAddress;
int startTrack;
int startSector;
int length;
//
for (int i = 0; i < sector.length; i = i + 16) {
if (sector[i] >= 0) {
fileName = new String(Arrays.copyOfRange(sector, i, i + 8), StandardCharsets.UTF_8);
executionAddress = getWord(sector, i + 8);
loadAddress = getWord(sector, i + 10);
startTrack = getByte(sector, i + 12);
startSector = getByte(sector, i + 13);
length = getWord(sector, i + 14);
System.out.print(fileName + " ");
System.out.print(Utilities.getWord(executionAddress) + " ");
System.out.print(Utilities.getWord(loadAddress) + " ");
System.out.print(Utilities.getWord(startTrack) + " ");
System.out.print(Utilities.getWord(startSector) + " ");
System.out.print(Utilities.getWord(length) + " ");
System.out.println();
}
}
}
/**
* Dump a sector to screen in NAS SYS tab format
*
* @param sector The sector to be dumped
*/
private void dumpSector(byte[] sector) {
int value;
for (int i = 0; i < sector.length; i = i + 16) {
for (int j = 0; j < 16; j++) {
value = sector[i + j];
if (value < 0) {
value = value + 256;
}
System.out.print(Utilities.getByte(value) + " ");
}
for (int j = 0; j < 16; j++) {
value = sector[i + j];
if (value < 0) {
value = value + 256;
}
if ((value > 31) && (value < 128)) {
System.out.print((char) value);
} else {
System.out.print(".");
}
}
System.out.println();
}
}
/**
* Dump a file to screen in NAS SYS tab format
*
* @param fileName The file to be dumped
*/
public void dumpFile(String fileName) {
byte[] buffer = new byte[16];
try (BufferedInputStream fis = new BufferedInputStream(new FileInputStream(fileName))) {
while (-1 != fis.read(buffer)) {
int value;
for (int j = 0; j < 16; j++) {
value = buffer[j];
if (value < 0) {
value = value + 256;
}
System.out.print(Utilities.getByte(value) + " ");
}
for (int j = 0; j < 16; j++) {
value = buffer[j];
if (value < 0) {
value = value + 256;
}
if ((value > 31) && (value < 128)) {
System.out.print((char) value);
} else {
System.out.print(".");
}
}
System.out.println();
fis.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Dump a disk in ANADisk image format
*
* @param fileName File to produce
* @param fdc Controller chip
*/
public void dumpANADiskToFile(String fileName, FDC17xx.FDC_CHIP fdc) {
try (FileOutputStream fos = new FileOutputStream(fileName)) {
byte[] sectorData;
//
for (int side = 0; side < 2; side++) {
for (int track = 0; track < MAX_TRACKS; track++) {
for (int sector = 0; sector < MAX_SECTORS; sector++) {
sectorData = getSector(track, sector, side);
// write the ANADISK header block
if (null != sectorData) {
int length = sectorData.length;
fos.write(track);
fos.write(side);
fos.write(track);
fos.write(side);
fos.write(sector);
fos.write(FDC17xx.getLengthCode(fdc, length)); // length code
fos.write(length & 0x00FF);
fos.write((length & 0xFF00) >> 8);
fos.write(sectorData, 0, length);
}
}
}
}
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Dump two images to a file (double sided disk)
*
* @param diskImage0 Side 0
* @param diskImage1 Side 1
* @param fileName Name of file to write to
*/
public void dumpDiskToFile(byte[][][] diskImage0, byte[][][] diskImage1, String fileName) {
try (FileOutputStream fos = new FileOutputStream(fileName)) {
// dump side 0
for (int track = 0; track < diskImage0.length; track++) {
// dump side 0
for (int sector = 0; sector < diskImage0[0].length; sector++) {
fos.write(diskImage0[track][sector], 0, diskImage0[0][0].length);
}
// dump side 1
for (int sector = 0; sector < diskImage1[0].length; sector++) {
fos.write(diskImage1[track][sector], 0, diskImage1[0][0].length);
}
}
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private void dumpDiskToMemory(MemoryChunk memory, int track, int sector, int size) {
for (int i = 0; i < size; i++) {
byte[] data = getSector(track, sector, 0);
dumpSector(data);
for (byte b : data) {
memory.setByte(b);
}
if (18 == sector) {
sector = 1;
track++;
} else {
sector++;
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* This data type is used as a response element in the <code>ModifyDBInstance</code> operation and contains changes that
* will be applied during the next maintenance window.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/PendingModifiedValues" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PendingModifiedValues implements Serializable, Cloneable {
/**
* <p>
* The name of the compute and memory capacity class for the DB instance.
* </p>
*/
private String dBInstanceClass;
/**
* <p>
* The allocated storage size for the DB instance specified in gibibytes (GiB).
* </p>
*/
private Integer allocatedStorage;
/**
* <p>
* The master credentials for the DB instance.
* </p>
*/
private String masterUserPassword;
/**
* <p>
* The port for the DB instance.
* </p>
*/
private Integer port;
/**
* <p>
* The number of days for which automated backups are retained.
* </p>
*/
private Integer backupRetentionPeriod;
/**
* <p>
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
* </p>
*/
private Boolean multiAZ;
/**
* <p>
* The database engine version.
* </p>
*/
private String engineVersion;
/**
* <p>
* The license model for the DB instance.
* </p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
* </p>
*/
private String licenseModel;
/**
* <p>
* The Provisioned IOPS value for the DB instance.
* </p>
*/
private Integer iops;
/**
* <p>
* The database identifier for the DB instance.
* </p>
*/
private String dBInstanceIdentifier;
/**
* <p>
* The storage type of the DB instance.
* </p>
*/
private String storageType;
/**
* <p>
* The identifier of the CA certificate for the DB instance.
* </p>
*/
private String cACertificateIdentifier;
/**
* <p>
* The DB subnet group for the DB instance.
* </p>
*/
private String dBSubnetGroupName;
private PendingCloudwatchLogsExports pendingCloudwatchLogsExports;
/**
* <p>
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<ProcessorFeature> processorFeatures;
/**
* <p>
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts is
* enabled.
* </p>
*/
private Boolean iAMDatabaseAuthenticationEnabled;
/**
* <p>
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>, the
* instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* </p>
*/
private String automationMode;
/**
* <p>
* The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full automation. The
* minimum value is 60 (default). The maximum value is 1,440.
* </p>
*/
private java.util.Date resumeFullAutomationModeTime;
/**
* <p>
* The name of the compute and memory capacity class for the DB instance.
* </p>
*
* @param dBInstanceClass
* The name of the compute and memory capacity class for the DB instance.
*/
public void setDBInstanceClass(String dBInstanceClass) {
this.dBInstanceClass = dBInstanceClass;
}
/**
* <p>
* The name of the compute and memory capacity class for the DB instance.
* </p>
*
* @return The name of the compute and memory capacity class for the DB instance.
*/
public String getDBInstanceClass() {
return this.dBInstanceClass;
}
/**
* <p>
* The name of the compute and memory capacity class for the DB instance.
* </p>
*
* @param dBInstanceClass
* The name of the compute and memory capacity class for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withDBInstanceClass(String dBInstanceClass) {
setDBInstanceClass(dBInstanceClass);
return this;
}
/**
* <p>
* The allocated storage size for the DB instance specified in gibibytes (GiB).
* </p>
*
* @param allocatedStorage
* The allocated storage size for the DB instance specified in gibibytes (GiB).
*/
public void setAllocatedStorage(Integer allocatedStorage) {
this.allocatedStorage = allocatedStorage;
}
/**
* <p>
* The allocated storage size for the DB instance specified in gibibytes (GiB).
* </p>
*
* @return The allocated storage size for the DB instance specified in gibibytes (GiB).
*/
public Integer getAllocatedStorage() {
return this.allocatedStorage;
}
/**
* <p>
* The allocated storage size for the DB instance specified in gibibytes (GiB).
* </p>
*
* @param allocatedStorage
* The allocated storage size for the DB instance specified in gibibytes (GiB).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withAllocatedStorage(Integer allocatedStorage) {
setAllocatedStorage(allocatedStorage);
return this;
}
/**
* <p>
* The master credentials for the DB instance.
* </p>
*
* @param masterUserPassword
* The master credentials for the DB instance.
*/
public void setMasterUserPassword(String masterUserPassword) {
this.masterUserPassword = masterUserPassword;
}
/**
* <p>
* The master credentials for the DB instance.
* </p>
*
* @return The master credentials for the DB instance.
*/
public String getMasterUserPassword() {
return this.masterUserPassword;
}
/**
* <p>
* The master credentials for the DB instance.
* </p>
*
* @param masterUserPassword
* The master credentials for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withMasterUserPassword(String masterUserPassword) {
setMasterUserPassword(masterUserPassword);
return this;
}
/**
* <p>
* The port for the DB instance.
* </p>
*
* @param port
* The port for the DB instance.
*/
public void setPort(Integer port) {
this.port = port;
}
/**
* <p>
* The port for the DB instance.
* </p>
*
* @return The port for the DB instance.
*/
public Integer getPort() {
return this.port;
}
/**
* <p>
* The port for the DB instance.
* </p>
*
* @param port
* The port for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withPort(Integer port) {
setPort(port);
return this;
}
/**
* <p>
* The number of days for which automated backups are retained.
* </p>
*
* @param backupRetentionPeriod
* The number of days for which automated backups are retained.
*/
public void setBackupRetentionPeriod(Integer backupRetentionPeriod) {
this.backupRetentionPeriod = backupRetentionPeriod;
}
/**
* <p>
* The number of days for which automated backups are retained.
* </p>
*
* @return The number of days for which automated backups are retained.
*/
public Integer getBackupRetentionPeriod() {
return this.backupRetentionPeriod;
}
/**
* <p>
* The number of days for which automated backups are retained.
* </p>
*
* @param backupRetentionPeriod
* The number of days for which automated backups are retained.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withBackupRetentionPeriod(Integer backupRetentionPeriod) {
setBackupRetentionPeriod(backupRetentionPeriod);
return this;
}
/**
* <p>
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
* </p>
*
* @param multiAZ
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
*/
public void setMultiAZ(Boolean multiAZ) {
this.multiAZ = multiAZ;
}
/**
* <p>
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
* </p>
*
* @return A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
*/
public Boolean getMultiAZ() {
return this.multiAZ;
}
/**
* <p>
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
* </p>
*
* @param multiAZ
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withMultiAZ(Boolean multiAZ) {
setMultiAZ(multiAZ);
return this;
}
/**
* <p>
* A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
* </p>
*
* @return A value that indicates that the Single-AZ DB instance will change to a Multi-AZ deployment.
*/
public Boolean isMultiAZ() {
return this.multiAZ;
}
/**
* <p>
* The database engine version.
* </p>
*
* @param engineVersion
* The database engine version.
*/
public void setEngineVersion(String engineVersion) {
this.engineVersion = engineVersion;
}
/**
* <p>
* The database engine version.
* </p>
*
* @return The database engine version.
*/
public String getEngineVersion() {
return this.engineVersion;
}
/**
* <p>
* The database engine version.
* </p>
*
* @param engineVersion
* The database engine version.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withEngineVersion(String engineVersion) {
setEngineVersion(engineVersion);
return this;
}
/**
* <p>
* The license model for the DB instance.
* </p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
* </p>
*
* @param licenseModel
* The license model for the DB instance.</p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
*/
public void setLicenseModel(String licenseModel) {
this.licenseModel = licenseModel;
}
/**
* <p>
* The license model for the DB instance.
* </p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
* </p>
*
* @return The license model for the DB instance.</p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
*/
public String getLicenseModel() {
return this.licenseModel;
}
/**
* <p>
* The license model for the DB instance.
* </p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
* </p>
*
* @param licenseModel
* The license model for the DB instance.</p>
* <p>
* Valid values: <code>license-included</code> | <code>bring-your-own-license</code> |
* <code>general-public-license</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withLicenseModel(String licenseModel) {
setLicenseModel(licenseModel);
return this;
}
/**
* <p>
* The Provisioned IOPS value for the DB instance.
* </p>
*
* @param iops
* The Provisioned IOPS value for the DB instance.
*/
public void setIops(Integer iops) {
this.iops = iops;
}
/**
* <p>
* The Provisioned IOPS value for the DB instance.
* </p>
*
* @return The Provisioned IOPS value for the DB instance.
*/
public Integer getIops() {
return this.iops;
}
/**
* <p>
* The Provisioned IOPS value for the DB instance.
* </p>
*
* @param iops
* The Provisioned IOPS value for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withIops(Integer iops) {
setIops(iops);
return this;
}
/**
* <p>
* The database identifier for the DB instance.
* </p>
*
* @param dBInstanceIdentifier
* The database identifier for the DB instance.
*/
public void setDBInstanceIdentifier(String dBInstanceIdentifier) {
this.dBInstanceIdentifier = dBInstanceIdentifier;
}
/**
* <p>
* The database identifier for the DB instance.
* </p>
*
* @return The database identifier for the DB instance.
*/
public String getDBInstanceIdentifier() {
return this.dBInstanceIdentifier;
}
/**
* <p>
* The database identifier for the DB instance.
* </p>
*
* @param dBInstanceIdentifier
* The database identifier for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withDBInstanceIdentifier(String dBInstanceIdentifier) {
setDBInstanceIdentifier(dBInstanceIdentifier);
return this;
}
/**
* <p>
* The storage type of the DB instance.
* </p>
*
* @param storageType
* The storage type of the DB instance.
*/
public void setStorageType(String storageType) {
this.storageType = storageType;
}
/**
* <p>
* The storage type of the DB instance.
* </p>
*
* @return The storage type of the DB instance.
*/
public String getStorageType() {
return this.storageType;
}
/**
* <p>
* The storage type of the DB instance.
* </p>
*
* @param storageType
* The storage type of the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withStorageType(String storageType) {
setStorageType(storageType);
return this;
}
/**
* <p>
* The identifier of the CA certificate for the DB instance.
* </p>
*
* @param cACertificateIdentifier
* The identifier of the CA certificate for the DB instance.
*/
public void setCACertificateIdentifier(String cACertificateIdentifier) {
this.cACertificateIdentifier = cACertificateIdentifier;
}
/**
* <p>
* The identifier of the CA certificate for the DB instance.
* </p>
*
* @return The identifier of the CA certificate for the DB instance.
*/
public String getCACertificateIdentifier() {
return this.cACertificateIdentifier;
}
/**
* <p>
* The identifier of the CA certificate for the DB instance.
* </p>
*
* @param cACertificateIdentifier
* The identifier of the CA certificate for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withCACertificateIdentifier(String cACertificateIdentifier) {
setCACertificateIdentifier(cACertificateIdentifier);
return this;
}
/**
* <p>
* The DB subnet group for the DB instance.
* </p>
*
* @param dBSubnetGroupName
* The DB subnet group for the DB instance.
*/
public void setDBSubnetGroupName(String dBSubnetGroupName) {
this.dBSubnetGroupName = dBSubnetGroupName;
}
/**
* <p>
* The DB subnet group for the DB instance.
* </p>
*
* @return The DB subnet group for the DB instance.
*/
public String getDBSubnetGroupName() {
return this.dBSubnetGroupName;
}
/**
* <p>
* The DB subnet group for the DB instance.
* </p>
*
* @param dBSubnetGroupName
* The DB subnet group for the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withDBSubnetGroupName(String dBSubnetGroupName) {
setDBSubnetGroupName(dBSubnetGroupName);
return this;
}
/**
* @param pendingCloudwatchLogsExports
*/
public void setPendingCloudwatchLogsExports(PendingCloudwatchLogsExports pendingCloudwatchLogsExports) {
this.pendingCloudwatchLogsExports = pendingCloudwatchLogsExports;
}
/**
* @return
*/
public PendingCloudwatchLogsExports getPendingCloudwatchLogsExports() {
return this.pendingCloudwatchLogsExports;
}
/**
* @param pendingCloudwatchLogsExports
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withPendingCloudwatchLogsExports(PendingCloudwatchLogsExports pendingCloudwatchLogsExports) {
setPendingCloudwatchLogsExports(pendingCloudwatchLogsExports);
return this;
}
/**
* <p>
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* </p>
*
* @return The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
*/
public java.util.List<ProcessorFeature> getProcessorFeatures() {
if (processorFeatures == null) {
processorFeatures = new com.amazonaws.internal.SdkInternalList<ProcessorFeature>();
}
return processorFeatures;
}
/**
* <p>
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* </p>
*
* @param processorFeatures
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
*/
public void setProcessorFeatures(java.util.Collection<ProcessorFeature> processorFeatures) {
if (processorFeatures == null) {
this.processorFeatures = null;
return;
}
this.processorFeatures = new com.amazonaws.internal.SdkInternalList<ProcessorFeature>(processorFeatures);
}
/**
* <p>
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setProcessorFeatures(java.util.Collection)} or {@link #withProcessorFeatures(java.util.Collection)} if
* you want to override the existing values.
* </p>
*
* @param processorFeatures
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withProcessorFeatures(ProcessorFeature... processorFeatures) {
if (this.processorFeatures == null) {
setProcessorFeatures(new com.amazonaws.internal.SdkInternalList<ProcessorFeature>(processorFeatures.length));
}
for (ProcessorFeature ele : processorFeatures) {
this.processorFeatures.add(ele);
}
return this;
}
/**
* <p>
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* </p>
*
* @param processorFeatures
* The number of CPU cores and the number of threads per core for the DB instance class of the DB instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withProcessorFeatures(java.util.Collection<ProcessorFeature> processorFeatures) {
setProcessorFeatures(processorFeatures);
return this;
}
/**
* <p>
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts is
* enabled.
* </p>
*
* @param iAMDatabaseAuthenticationEnabled
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts
* is enabled.
*/
public void setIAMDatabaseAuthenticationEnabled(Boolean iAMDatabaseAuthenticationEnabled) {
this.iAMDatabaseAuthenticationEnabled = iAMDatabaseAuthenticationEnabled;
}
/**
* <p>
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts is
* enabled.
* </p>
*
* @return Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts
* is enabled.
*/
public Boolean getIAMDatabaseAuthenticationEnabled() {
return this.iAMDatabaseAuthenticationEnabled;
}
/**
* <p>
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts is
* enabled.
* </p>
*
* @param iAMDatabaseAuthenticationEnabled
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts
* is enabled.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withIAMDatabaseAuthenticationEnabled(Boolean iAMDatabaseAuthenticationEnabled) {
setIAMDatabaseAuthenticationEnabled(iAMDatabaseAuthenticationEnabled);
return this;
}
/**
* <p>
* Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts is
* enabled.
* </p>
*
* @return Whether mapping of Amazon Web Services Identity and Access Management (IAM) accounts to database accounts
* is enabled.
*/
public Boolean isIAMDatabaseAuthenticationEnabled() {
return this.iAMDatabaseAuthenticationEnabled;
}
/**
* <p>
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>, the
* instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* </p>
*
* @param automationMode
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>,
* the instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* @see AutomationMode
*/
public void setAutomationMode(String automationMode) {
this.automationMode = automationMode;
}
/**
* <p>
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>, the
* instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* </p>
*
* @return The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>
* , the instance pauses automation for the duration set by
* <code>--resume-full-automation-mode-minutes</code>.
* @see AutomationMode
*/
public String getAutomationMode() {
return this.automationMode;
}
/**
* <p>
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>, the
* instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* </p>
*
* @param automationMode
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>,
* the instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AutomationMode
*/
public PendingModifiedValues withAutomationMode(String automationMode) {
setAutomationMode(automationMode);
return this;
}
/**
* <p>
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>, the
* instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* </p>
*
* @param automationMode
* The automation mode of the RDS Custom DB instance: <code>full</code> or <code>all-paused</code>. If
* <code>full</code>, the DB instance automates monitoring and instance recovery. If <code>all-paused</code>,
* the instance pauses automation for the duration set by <code>--resume-full-automation-mode-minutes</code>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AutomationMode
*/
public PendingModifiedValues withAutomationMode(AutomationMode automationMode) {
this.automationMode = automationMode.toString();
return this;
}
/**
* <p>
* The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full automation. The
* minimum value is 60 (default). The maximum value is 1,440.
* </p>
*
* @param resumeFullAutomationModeTime
* The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full
* automation. The minimum value is 60 (default). The maximum value is 1,440.
*/
public void setResumeFullAutomationModeTime(java.util.Date resumeFullAutomationModeTime) {
this.resumeFullAutomationModeTime = resumeFullAutomationModeTime;
}
/**
* <p>
* The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full automation. The
* minimum value is 60 (default). The maximum value is 1,440.
* </p>
*
* @return The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full
* automation. The minimum value is 60 (default). The maximum value is 1,440.
*/
public java.util.Date getResumeFullAutomationModeTime() {
return this.resumeFullAutomationModeTime;
}
/**
* <p>
* The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full automation. The
* minimum value is 60 (default). The maximum value is 1,440.
* </p>
*
* @param resumeFullAutomationModeTime
* The number of minutes to pause the automation. When the time period ends, RDS Custom resumes full
* automation. The minimum value is 60 (default). The maximum value is 1,440.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PendingModifiedValues withResumeFullAutomationModeTime(java.util.Date resumeFullAutomationModeTime) {
setResumeFullAutomationModeTime(resumeFullAutomationModeTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBInstanceClass() != null)
sb.append("DBInstanceClass: ").append(getDBInstanceClass()).append(",");
if (getAllocatedStorage() != null)
sb.append("AllocatedStorage: ").append(getAllocatedStorage()).append(",");
if (getMasterUserPassword() != null)
sb.append("MasterUserPassword: ").append(getMasterUserPassword()).append(",");
if (getPort() != null)
sb.append("Port: ").append(getPort()).append(",");
if (getBackupRetentionPeriod() != null)
sb.append("BackupRetentionPeriod: ").append(getBackupRetentionPeriod()).append(",");
if (getMultiAZ() != null)
sb.append("MultiAZ: ").append(getMultiAZ()).append(",");
if (getEngineVersion() != null)
sb.append("EngineVersion: ").append(getEngineVersion()).append(",");
if (getLicenseModel() != null)
sb.append("LicenseModel: ").append(getLicenseModel()).append(",");
if (getIops() != null)
sb.append("Iops: ").append(getIops()).append(",");
if (getDBInstanceIdentifier() != null)
sb.append("DBInstanceIdentifier: ").append(getDBInstanceIdentifier()).append(",");
if (getStorageType() != null)
sb.append("StorageType: ").append(getStorageType()).append(",");
if (getCACertificateIdentifier() != null)
sb.append("CACertificateIdentifier: ").append(getCACertificateIdentifier()).append(",");
if (getDBSubnetGroupName() != null)
sb.append("DBSubnetGroupName: ").append(getDBSubnetGroupName()).append(",");
if (getPendingCloudwatchLogsExports() != null)
sb.append("PendingCloudwatchLogsExports: ").append(getPendingCloudwatchLogsExports()).append(",");
if (getProcessorFeatures() != null)
sb.append("ProcessorFeatures: ").append(getProcessorFeatures()).append(",");
if (getIAMDatabaseAuthenticationEnabled() != null)
sb.append("IAMDatabaseAuthenticationEnabled: ").append(getIAMDatabaseAuthenticationEnabled()).append(",");
if (getAutomationMode() != null)
sb.append("AutomationMode: ").append(getAutomationMode()).append(",");
if (getResumeFullAutomationModeTime() != null)
sb.append("ResumeFullAutomationModeTime: ").append(getResumeFullAutomationModeTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof PendingModifiedValues == false)
return false;
PendingModifiedValues other = (PendingModifiedValues) obj;
if (other.getDBInstanceClass() == null ^ this.getDBInstanceClass() == null)
return false;
if (other.getDBInstanceClass() != null && other.getDBInstanceClass().equals(this.getDBInstanceClass()) == false)
return false;
if (other.getAllocatedStorage() == null ^ this.getAllocatedStorage() == null)
return false;
if (other.getAllocatedStorage() != null && other.getAllocatedStorage().equals(this.getAllocatedStorage()) == false)
return false;
if (other.getMasterUserPassword() == null ^ this.getMasterUserPassword() == null)
return false;
if (other.getMasterUserPassword() != null && other.getMasterUserPassword().equals(this.getMasterUserPassword()) == false)
return false;
if (other.getPort() == null ^ this.getPort() == null)
return false;
if (other.getPort() != null && other.getPort().equals(this.getPort()) == false)
return false;
if (other.getBackupRetentionPeriod() == null ^ this.getBackupRetentionPeriod() == null)
return false;
if (other.getBackupRetentionPeriod() != null && other.getBackupRetentionPeriod().equals(this.getBackupRetentionPeriod()) == false)
return false;
if (other.getMultiAZ() == null ^ this.getMultiAZ() == null)
return false;
if (other.getMultiAZ() != null && other.getMultiAZ().equals(this.getMultiAZ()) == false)
return false;
if (other.getEngineVersion() == null ^ this.getEngineVersion() == null)
return false;
if (other.getEngineVersion() != null && other.getEngineVersion().equals(this.getEngineVersion()) == false)
return false;
if (other.getLicenseModel() == null ^ this.getLicenseModel() == null)
return false;
if (other.getLicenseModel() != null && other.getLicenseModel().equals(this.getLicenseModel()) == false)
return false;
if (other.getIops() == null ^ this.getIops() == null)
return false;
if (other.getIops() != null && other.getIops().equals(this.getIops()) == false)
return false;
if (other.getDBInstanceIdentifier() == null ^ this.getDBInstanceIdentifier() == null)
return false;
if (other.getDBInstanceIdentifier() != null && other.getDBInstanceIdentifier().equals(this.getDBInstanceIdentifier()) == false)
return false;
if (other.getStorageType() == null ^ this.getStorageType() == null)
return false;
if (other.getStorageType() != null && other.getStorageType().equals(this.getStorageType()) == false)
return false;
if (other.getCACertificateIdentifier() == null ^ this.getCACertificateIdentifier() == null)
return false;
if (other.getCACertificateIdentifier() != null && other.getCACertificateIdentifier().equals(this.getCACertificateIdentifier()) == false)
return false;
if (other.getDBSubnetGroupName() == null ^ this.getDBSubnetGroupName() == null)
return false;
if (other.getDBSubnetGroupName() != null && other.getDBSubnetGroupName().equals(this.getDBSubnetGroupName()) == false)
return false;
if (other.getPendingCloudwatchLogsExports() == null ^ this.getPendingCloudwatchLogsExports() == null)
return false;
if (other.getPendingCloudwatchLogsExports() != null && other.getPendingCloudwatchLogsExports().equals(this.getPendingCloudwatchLogsExports()) == false)
return false;
if (other.getProcessorFeatures() == null ^ this.getProcessorFeatures() == null)
return false;
if (other.getProcessorFeatures() != null && other.getProcessorFeatures().equals(this.getProcessorFeatures()) == false)
return false;
if (other.getIAMDatabaseAuthenticationEnabled() == null ^ this.getIAMDatabaseAuthenticationEnabled() == null)
return false;
if (other.getIAMDatabaseAuthenticationEnabled() != null
&& other.getIAMDatabaseAuthenticationEnabled().equals(this.getIAMDatabaseAuthenticationEnabled()) == false)
return false;
if (other.getAutomationMode() == null ^ this.getAutomationMode() == null)
return false;
if (other.getAutomationMode() != null && other.getAutomationMode().equals(this.getAutomationMode()) == false)
return false;
if (other.getResumeFullAutomationModeTime() == null ^ this.getResumeFullAutomationModeTime() == null)
return false;
if (other.getResumeFullAutomationModeTime() != null && other.getResumeFullAutomationModeTime().equals(this.getResumeFullAutomationModeTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBInstanceClass() == null) ? 0 : getDBInstanceClass().hashCode());
hashCode = prime * hashCode + ((getAllocatedStorage() == null) ? 0 : getAllocatedStorage().hashCode());
hashCode = prime * hashCode + ((getMasterUserPassword() == null) ? 0 : getMasterUserPassword().hashCode());
hashCode = prime * hashCode + ((getPort() == null) ? 0 : getPort().hashCode());
hashCode = prime * hashCode + ((getBackupRetentionPeriod() == null) ? 0 : getBackupRetentionPeriod().hashCode());
hashCode = prime * hashCode + ((getMultiAZ() == null) ? 0 : getMultiAZ().hashCode());
hashCode = prime * hashCode + ((getEngineVersion() == null) ? 0 : getEngineVersion().hashCode());
hashCode = prime * hashCode + ((getLicenseModel() == null) ? 0 : getLicenseModel().hashCode());
hashCode = prime * hashCode + ((getIops() == null) ? 0 : getIops().hashCode());
hashCode = prime * hashCode + ((getDBInstanceIdentifier() == null) ? 0 : getDBInstanceIdentifier().hashCode());
hashCode = prime * hashCode + ((getStorageType() == null) ? 0 : getStorageType().hashCode());
hashCode = prime * hashCode + ((getCACertificateIdentifier() == null) ? 0 : getCACertificateIdentifier().hashCode());
hashCode = prime * hashCode + ((getDBSubnetGroupName() == null) ? 0 : getDBSubnetGroupName().hashCode());
hashCode = prime * hashCode + ((getPendingCloudwatchLogsExports() == null) ? 0 : getPendingCloudwatchLogsExports().hashCode());
hashCode = prime * hashCode + ((getProcessorFeatures() == null) ? 0 : getProcessorFeatures().hashCode());
hashCode = prime * hashCode + ((getIAMDatabaseAuthenticationEnabled() == null) ? 0 : getIAMDatabaseAuthenticationEnabled().hashCode());
hashCode = prime * hashCode + ((getAutomationMode() == null) ? 0 : getAutomationMode().hashCode());
hashCode = prime * hashCode + ((getResumeFullAutomationModeTime() == null) ? 0 : getResumeFullAutomationModeTime().hashCode());
return hashCode;
}
@Override
public PendingModifiedValues clone() {
try {
return (PendingModifiedValues) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.logging.v2.stub;
import static com.google.cloud.logging.v2.PagedResponseWrappers.ListLogEntriesPagedResponse;
import static com.google.cloud.logging.v2.PagedResponseWrappers.ListLogsPagedResponse;
import static com.google.cloud.logging.v2.PagedResponseWrappers.ListMonitoredResourceDescriptorsPagedResponse;
import com.google.api.MonitoredResourceDescriptor;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.batching.BatchingSettings;
import com.google.api.gax.batching.FlowControlSettings;
import com.google.api.gax.batching.FlowController.LimitExceededBehavior;
import com.google.api.gax.batching.PartitionKey;
import com.google.api.gax.batching.RequestBuilder;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.BatchedRequestIssuer;
import com.google.api.gax.rpc.BatchingCallSettings;
import com.google.api.gax.rpc.BatchingDescriptor;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.logging.v2.DeleteLogRequest;
import com.google.logging.v2.ListLogEntriesRequest;
import com.google.logging.v2.ListLogEntriesResponse;
import com.google.logging.v2.ListLogsRequest;
import com.google.logging.v2.ListLogsResponse;
import com.google.logging.v2.ListMonitoredResourceDescriptorsRequest;
import com.google.logging.v2.ListMonitoredResourceDescriptorsResponse;
import com.google.logging.v2.LogEntry;
import com.google.logging.v2.WriteLogEntriesRequest;
import com.google.logging.v2.WriteLogEntriesResponse;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* Settings class to configure an instance of {@link LoggingServiceV2Stub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (logging.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object. For
* example, to set the total timeout of deleteLog to 30 seconds:
*
* <pre>
* <code>
* LoggingServiceV2StubSettings.Builder loggingSettingsBuilder =
* LoggingServiceV2StubSettings.newBuilder();
* loggingSettingsBuilder.deleteLogSettings().getRetrySettingsBuilder()
* .setTotalTimeout(Duration.ofSeconds(30));
* LoggingServiceV2StubSettings loggingSettings = loggingSettingsBuilder.build();
* </code>
* </pre>
*/
@Generated("by GAPIC v0.0.5")
@BetaApi
public class LoggingServiceV2StubSettings extends StubSettings<LoggingServiceV2StubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/cloud-platform.read-only")
.add("https://www.googleapis.com/auth/logging.admin")
.add("https://www.googleapis.com/auth/logging.read")
.add("https://www.googleapis.com/auth/logging.write")
.build();
private final UnaryCallSettings<DeleteLogRequest, Empty> deleteLogSettings;
private final BatchingCallSettings<WriteLogEntriesRequest, WriteLogEntriesResponse>
writeLogEntriesSettings;
private final PagedCallSettings<
ListLogEntriesRequest, ListLogEntriesResponse, ListLogEntriesPagedResponse>
listLogEntriesSettings;
private final PagedCallSettings<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
ListMonitoredResourceDescriptorsPagedResponse>
listMonitoredResourceDescriptorsSettings;
private final PagedCallSettings<ListLogsRequest, ListLogsResponse, ListLogsPagedResponse>
listLogsSettings;
/** Returns the object with the settings used for calls to deleteLog. */
public UnaryCallSettings<DeleteLogRequest, Empty> deleteLogSettings() {
return deleteLogSettings;
}
/** Returns the object with the settings used for calls to writeLogEntries. */
public BatchingCallSettings<WriteLogEntriesRequest, WriteLogEntriesResponse>
writeLogEntriesSettings() {
return writeLogEntriesSettings;
}
/** Returns the object with the settings used for calls to listLogEntries. */
public PagedCallSettings<
ListLogEntriesRequest, ListLogEntriesResponse, ListLogEntriesPagedResponse>
listLogEntriesSettings() {
return listLogEntriesSettings;
}
/** Returns the object with the settings used for calls to listMonitoredResourceDescriptors. */
public PagedCallSettings<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
ListMonitoredResourceDescriptorsPagedResponse>
listMonitoredResourceDescriptorsSettings() {
return listMonitoredResourceDescriptorsSettings;
}
/** Returns the object with the settings used for calls to listLogs. */
public PagedCallSettings<ListLogsRequest, ListLogsResponse, ListLogsPagedResponse>
listLogsSettings() {
return listLogsSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public LoggingServiceV2Stub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcLoggingServiceV2Stub.create(this);
} else {
throw new UnsupportedOperationException(
"Transport not supported: " + getTransportChannelProvider().getTransportName());
}
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "logging.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(LoggingServiceV2StubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected LoggingServiceV2StubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
deleteLogSettings = settingsBuilder.deleteLogSettings().build();
writeLogEntriesSettings = settingsBuilder.writeLogEntriesSettings().build();
listLogEntriesSettings = settingsBuilder.listLogEntriesSettings().build();
listMonitoredResourceDescriptorsSettings =
settingsBuilder.listMonitoredResourceDescriptorsSettings().build();
listLogsSettings = settingsBuilder.listLogsSettings().build();
}
private static final PagedListDescriptor<ListLogEntriesRequest, ListLogEntriesResponse, LogEntry>
LIST_LOG_ENTRIES_PAGE_STR_DESC =
new PagedListDescriptor<ListLogEntriesRequest, ListLogEntriesResponse, LogEntry>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLogEntriesRequest injectToken(ListLogEntriesRequest payload, String token) {
return ListLogEntriesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLogEntriesRequest injectPageSize(
ListLogEntriesRequest payload, int pageSize) {
return ListLogEntriesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLogEntriesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLogEntriesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<LogEntry> extractResources(ListLogEntriesResponse payload) {
return payload.getEntriesList();
}
};
private static final PagedListDescriptor<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
MonitoredResourceDescriptor>
LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC =
new PagedListDescriptor<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
MonitoredResourceDescriptor>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListMonitoredResourceDescriptorsRequest injectToken(
ListMonitoredResourceDescriptorsRequest payload, String token) {
return ListMonitoredResourceDescriptorsRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public ListMonitoredResourceDescriptorsRequest injectPageSize(
ListMonitoredResourceDescriptorsRequest payload, int pageSize) {
return ListMonitoredResourceDescriptorsRequest.newBuilder(payload)
.setPageSize(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListMonitoredResourceDescriptorsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListMonitoredResourceDescriptorsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<MonitoredResourceDescriptor> extractResources(
ListMonitoredResourceDescriptorsResponse payload) {
return payload.getResourceDescriptorsList();
}
};
private static final PagedListDescriptor<ListLogsRequest, ListLogsResponse, String>
LIST_LOGS_PAGE_STR_DESC =
new PagedListDescriptor<ListLogsRequest, ListLogsResponse, String>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLogsRequest injectToken(ListLogsRequest payload, String token) {
return ListLogsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLogsRequest injectPageSize(ListLogsRequest payload, int pageSize) {
return ListLogsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLogsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLogsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<String> extractResources(ListLogsResponse payload) {
return payload.getLogNamesList();
}
};
private static final PagedListResponseFactory<
ListLogEntriesRequest, ListLogEntriesResponse, ListLogEntriesPagedResponse>
LIST_LOG_ENTRIES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLogEntriesRequest, ListLogEntriesResponse, ListLogEntriesPagedResponse>() {
@Override
public ApiFuture<ListLogEntriesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLogEntriesRequest, ListLogEntriesResponse> callable,
ListLogEntriesRequest request,
ApiCallContext context,
ApiFuture<ListLogEntriesResponse> futureResponse) {
PageContext<ListLogEntriesRequest, ListLogEntriesResponse, LogEntry> pageContext =
PageContext.create(callable, LIST_LOG_ENTRIES_PAGE_STR_DESC, request, context);
return ListLogEntriesPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
ListMonitoredResourceDescriptorsPagedResponse>
LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
ListMonitoredResourceDescriptorsPagedResponse>() {
@Override
public ApiFuture<ListMonitoredResourceDescriptorsPagedResponse> getFuturePagedResponse(
UnaryCallable<
ListMonitoredResourceDescriptorsRequest,
ListMonitoredResourceDescriptorsResponse>
callable,
ListMonitoredResourceDescriptorsRequest request,
ApiCallContext context,
ApiFuture<ListMonitoredResourceDescriptorsResponse> futureResponse) {
PageContext<
ListMonitoredResourceDescriptorsRequest,
ListMonitoredResourceDescriptorsResponse, MonitoredResourceDescriptor>
pageContext =
PageContext.create(
callable,
LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC,
request,
context);
return ListMonitoredResourceDescriptorsPagedResponse.createAsync(
pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLogsRequest, ListLogsResponse, ListLogsPagedResponse>
LIST_LOGS_PAGE_STR_FACT =
new PagedListResponseFactory<ListLogsRequest, ListLogsResponse, ListLogsPagedResponse>() {
@Override
public ApiFuture<ListLogsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLogsRequest, ListLogsResponse> callable,
ListLogsRequest request,
ApiCallContext context,
ApiFuture<ListLogsResponse> futureResponse) {
PageContext<ListLogsRequest, ListLogsResponse, String> pageContext =
PageContext.create(callable, LIST_LOGS_PAGE_STR_DESC, request, context);
return ListLogsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final BatchingDescriptor<WriteLogEntriesRequest, WriteLogEntriesResponse>
WRITE_LOG_ENTRIES_BATCHING_DESC =
new BatchingDescriptor<WriteLogEntriesRequest, WriteLogEntriesResponse>() {
@Override
public PartitionKey getBatchPartitionKey(WriteLogEntriesRequest request) {
return new PartitionKey(
request.getLogName(), request.getResource(), request.getLabelsMap());
}
@Override
public RequestBuilder<WriteLogEntriesRequest> getRequestBuilder() {
return new RequestBuilder<WriteLogEntriesRequest>() {
private WriteLogEntriesRequest.Builder builder;
@Override
public void appendRequest(WriteLogEntriesRequest request) {
if (builder == null) {
builder = request.toBuilder();
} else {
builder.addAllEntries(request.getEntriesList());
}
}
@Override
public WriteLogEntriesRequest build() {
return builder.build();
}
};
}
@Override
public void splitResponse(
WriteLogEntriesResponse batchResponse,
Collection<? extends BatchedRequestIssuer<WriteLogEntriesResponse>> batch) {
int batchMessageIndex = 0;
for (BatchedRequestIssuer<WriteLogEntriesResponse> responder : batch) {
WriteLogEntriesResponse response = WriteLogEntriesResponse.newBuilder().build();
responder.setResponse(response);
}
}
@Override
public void splitException(
Throwable throwable,
Collection<? extends BatchedRequestIssuer<WriteLogEntriesResponse>> batch) {
for (BatchedRequestIssuer<WriteLogEntriesResponse> responder : batch) {
responder.setException(throwable);
}
}
@Override
public long countElements(WriteLogEntriesRequest request) {
return request.getEntriesCount();
}
@Override
public long countBytes(WriteLogEntriesRequest request) {
return request.getSerializedSize();
}
};
/** Builder for LoggingServiceV2StubSettings. */
public static class Builder extends StubSettings.Builder<LoggingServiceV2StubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<DeleteLogRequest, Empty> deleteLogSettings;
private final BatchingCallSettings.Builder<WriteLogEntriesRequest, WriteLogEntriesResponse>
writeLogEntriesSettings;
private final PagedCallSettings.Builder<
ListLogEntriesRequest, ListLogEntriesResponse, ListLogEntriesPagedResponse>
listLogEntriesSettings;
private final PagedCallSettings.Builder<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
ListMonitoredResourceDescriptorsPagedResponse>
listMonitoredResourceDescriptorsSettings;
private final PagedCallSettings.Builder<
ListLogsRequest, ListLogsResponse, ListLogsPagedResponse>
listLogsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"idempotent",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED,
StatusCode.Code.INTERNAL,
StatusCode.Code.UNAVAILABLE)));
definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.2)
.setMaxRetryDelay(Duration.ofMillis(1000L))
.setInitialRpcTimeout(Duration.ofMillis(20000L))
.setRpcTimeoutMultiplier(1.5)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(90000L))
.build();
definitions.put("default", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.2)
.setMaxRetryDelay(Duration.ofMillis(1000L))
.setInitialRpcTimeout(Duration.ofMillis(2000L))
.setRpcTimeoutMultiplier(1.5)
.setMaxRpcTimeout(Duration.ofMillis(10000L))
.setTotalTimeout(Duration.ofMillis(20000L))
.build();
definitions.put("list", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this((ClientContext) null);
}
protected Builder(ClientContext clientContext) {
super(clientContext);
deleteLogSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
writeLogEntriesSettings =
BatchingCallSettings.newBuilder(WRITE_LOG_ENTRIES_BATCHING_DESC)
.setBatchingSettings(BatchingSettings.newBuilder().build());
listLogEntriesSettings = PagedCallSettings.newBuilder(LIST_LOG_ENTRIES_PAGE_STR_FACT);
listMonitoredResourceDescriptorsSettings =
PagedCallSettings.newBuilder(LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_FACT);
listLogsSettings = PagedCallSettings.newBuilder(LIST_LOGS_PAGE_STR_FACT);
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteLogSettings,
writeLogEntriesSettings,
listLogEntriesSettings,
listMonitoredResourceDescriptorsSettings,
listLogsSettings);
initDefaults(this);
}
private static Builder createDefault() {
Builder builder = new Builder((ClientContext) null);
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.deleteLogSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.writeLogEntriesSettings()
.setBatchingSettings(
BatchingSettings.newBuilder()
.setElementCountThreshold(1000L)
.setRequestByteThreshold(1048576L)
.setDelayThreshold(Duration.ofMillis(50))
.setFlowControlSettings(
FlowControlSettings.newBuilder()
.setMaxOutstandingElementCount(100000L)
.setMaxOutstandingRequestBytes(10485760L)
.setLimitExceededBehavior(LimitExceededBehavior.ThrowException)
.build())
.build());
builder
.writeLogEntriesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.listLogEntriesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("list"));
builder
.listMonitoredResourceDescriptorsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.listLogsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
return builder;
}
protected Builder(LoggingServiceV2StubSettings settings) {
super(settings);
deleteLogSettings = settings.deleteLogSettings.toBuilder();
writeLogEntriesSettings = settings.writeLogEntriesSettings.toBuilder();
listLogEntriesSettings = settings.listLogEntriesSettings.toBuilder();
listMonitoredResourceDescriptorsSettings =
settings.listMonitoredResourceDescriptorsSettings.toBuilder();
listLogsSettings = settings.listLogsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteLogSettings,
writeLogEntriesSettings,
listLogEntriesSettings,
listMonitoredResourceDescriptorsSettings,
listLogsSettings);
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to deleteLog. */
public UnaryCallSettings.Builder<DeleteLogRequest, Empty> deleteLogSettings() {
return deleteLogSettings;
}
/** Returns the builder for the settings used for calls to writeLogEntries. */
public BatchingCallSettings.Builder<WriteLogEntriesRequest, WriteLogEntriesResponse>
writeLogEntriesSettings() {
return writeLogEntriesSettings;
}
/** Returns the builder for the settings used for calls to listLogEntries. */
public PagedCallSettings.Builder<
ListLogEntriesRequest, ListLogEntriesResponse, ListLogEntriesPagedResponse>
listLogEntriesSettings() {
return listLogEntriesSettings;
}
/** Returns the builder for the settings used for calls to listMonitoredResourceDescriptors. */
public PagedCallSettings.Builder<
ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse,
ListMonitoredResourceDescriptorsPagedResponse>
listMonitoredResourceDescriptorsSettings() {
return listMonitoredResourceDescriptorsSettings;
}
/** Returns the builder for the settings used for calls to listLogs. */
public PagedCallSettings.Builder<ListLogsRequest, ListLogsResponse, ListLogsPagedResponse>
listLogsSettings() {
return listLogsSettings;
}
@Override
public LoggingServiceV2StubSettings build() throws IOException {
return new LoggingServiceV2StubSettings(this);
}
}
}
| |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.kernel.slot;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.configuration.AndesConfigurationManager;
import org.wso2.andes.configuration.enums.AndesConfiguration;
import org.wso2.andes.kernel.AndesException;
import org.wso2.andes.kernel.AndesMessage;
import org.wso2.andes.kernel.AndesMessageMetadata;
import org.wso2.andes.server.cluster.coordination.hazelcast.HazelcastAgent;
import org.wso2.andes.thrift.MBThriftClient;
import org.wso2.andes.thrift.slot.gen.SlotManagementService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
/**
* This class is responsible of counting messages in a slot for each queue
*/
public class SlotMessageCounter {
private ConcurrentHashMap<String, Slot> queueToSlotMap = new ConcurrentHashMap<String, Slot>();
private ConcurrentHashMap<String, Long> slotTimeOutMap = new ConcurrentHashMap<String, Long>();
/**
* Timeout in milliseconds for messages in the slot. When this timeout is exceeded slot will be
* submitted to the coordinator
*/
private Long timeOutForMessagesInQueue;
private Timer submitSlotToCoordinatorTimer = new Timer();
private Log log = LogFactory.getLog(SlotMessageCounter.class);
private static SlotMessageCounter slotMessageCounter = new SlotMessageCounter();
private Integer slotWindowSize;
private long currentSlotDeleteSafeZone;
/** Keep track of how many update loops
* are skipped without messages. */
private int slotSubmitLoopSkipCount;
private static final int SLOT_SUBMIT_LOOP_SKIP_COUNT_THRESHOLD = 10;
private SlotMessageCounter() {
scheduleSubmitSlotToCoordinatorTimer();
slotWindowSize = AndesConfigurationManager.readValue
(AndesConfiguration.PERFORMANCE_TUNING_SLOTS_SLOT_WINDOW_SIZE);
timeOutForMessagesInQueue = AndesConfigurationManager.readValue
(AndesConfiguration.PERFORMANCE_TUNING_SLOTS_SLOT_RETAIN_TIME_IN_MEMORY);
slotSubmitLoopSkipCount = 0;
}
/**
* This thread is to record message IDs in slot manager when a timeout is passed
*/
private void scheduleSubmitSlotToCoordinatorTimer() {
submitSlotToCoordinatorTimer.scheduleAtFixedRate(new TimerTask() {
public void run() {
Set<Map.Entry<String, Long>> slotTimeoutEntries = slotTimeOutMap.entrySet();
for (Map.Entry<String, Long> entry : slotTimeoutEntries) {
if ((System.currentTimeMillis() - entry
.getValue()) > timeOutForMessagesInQueue) {
try {
submitSlot(entry.getKey());
} catch (AndesException e) {
/*
We do not do anything here since this thread will be run every 3
seconds
*/
log.error(
"Error occurred while connecting to the thrift coordinator " + e
.getMessage(), e);
}
}
}
if(slotTimeoutEntries.isEmpty()) {
slotSubmitLoopSkipCount += 1;
if(slotSubmitLoopSkipCount == SLOT_SUBMIT_LOOP_SKIP_COUNT_THRESHOLD) {
//update current slot Deletion Safe Zone
submitCurrentSafeZone(currentSlotDeleteSafeZone);
slotSubmitLoopSkipCount = 0;
}
}
}
}, 4000, 3000);
}
/**
* Record metadata count in the current slot related to a particular queue.
*
* @param messageList AndesMessage list to be record
*/
public void recordMetaDataCountInSlot(List<AndesMessage> messageList) {
//If metadata list is null this method is called from time out thread
for (AndesMessage message : messageList) {
String storageQueueName = message.getMetadata().getStorageQueueName();
//If this is the first message to that queue
Slot currentSlot;
currentSlot = updateQueueToSlotMap(message.getMetadata());
if (currentSlot.getMessageCount() >= slotWindowSize) {
try {
submitSlot(storageQueueName);
} catch (AndesException e) {
/*
We do not do anything here since this operation will be run by timeout thread also
*/
log.error("Error occurred while connecting to the thrift coordinator " + e
.getMessage(), e);
}
}
}
}
private void submitCurrentSafeZone(long currentSlotDeleteSafeZone) {
String nodeId = HazelcastAgent.getInstance().getNodeId();
MBThriftClient.updateSlotDeletionSafeZone(currentSlotDeleteSafeZone, nodeId);
if(log.isDebugEnabled()) {
log.info("Submitted safe zone from node : " + nodeId + " | safe zone : " +
currentSlotDeleteSafeZone);
}
}
/**
* Update in-memory queue to slot map. This method is is not synchronized. Single publisher should access this.
* Ideally through a disruptor event handler
* @param metadata Andes metadata whose ID needs to be reported to SlotManager
* @return Current slot which this metadata belongs to
*/
private Slot updateQueueToSlotMap(AndesMessageMetadata metadata) {
String storageQueueName = metadata.getStorageQueueName();
Slot currentSlot = queueToSlotMap.get(storageQueueName);
if (currentSlot == null) {
currentSlot = new Slot();
currentSlot.setStartMessageId(metadata.getMessageID());
currentSlot.setEndMessageId(metadata.getMessageID());
currentSlot.setMessageCount(1L);
queueToSlotMap.put(storageQueueName, currentSlot);
slotTimeOutMap.put(storageQueueName, System.currentTimeMillis());
} else {
long currentMsgCount = currentSlot.getMessageCount();
long newMessageCount = currentMsgCount + 1;
currentSlot.setMessageCount(newMessageCount);
currentSlot.setEndMessageId(metadata.getMessageID());
queueToSlotMap.put(storageQueueName, currentSlot);
}
return currentSlot;
}
/**
* Submit last message ID in the slot to SlotManager.
*
* @param storageQueueName name of the queue which this slot belongs to
*/
public void submitSlot(String storageQueueName) throws AndesException {
Slot slot = queueToSlotMap.get(storageQueueName);
if (null != slot) {
try {
MBThriftClient.updateMessageId(storageQueueName, HazelcastAgent.getInstance().getNodeId(), slot.getStartMessageId(), slot.getEndMessageId());
queueToSlotMap.remove(storageQueueName);
slotTimeOutMap.remove(storageQueueName);
} catch (ConnectionException e) {
/* we only log here since this thread will be run every 3
seconds*/
log.error("Error occurred while connecting to the thrift coordinator " + e
.getMessage(), e);
}
}
}
public void updateSafeZoneForNode(long currentSafeZoneVal) {
currentSlotDeleteSafeZone = currentSafeZoneVal;
}
/**
* @return SlotMessageCounter instance
*/
public static SlotMessageCounter getInstance() {
return slotMessageCounter;
}
}
| |
/*
* Copyright 2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spockframework.compiler;
import java.util.*;
import org.codehaus.groovy.ast.*;
import org.codehaus.groovy.ast.expr.*;
import org.codehaus.groovy.ast.stmt.*;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.syntax.Token;
import org.codehaus.groovy.syntax.Types;
import org.objectweb.asm.Opcodes;
import org.spockframework.compiler.model.WhereBlock;
import org.spockframework.runtime.model.DataProviderMetadata;
import org.spockframework.util.*;
/**
*
* @author Peter Niederwieser
*/
public class WhereBlockRewriter {
private final WhereBlock whereBlock;
private final IRewriteResources resources;
private final InstanceFieldAccessChecker instanceFieldAccessChecker;
private int dataProviderCount = 0;
// parameters of the data processor method (one for each data provider)
private final List<Parameter> dataProcessorParams = new ArrayList<Parameter>();
// statements of the data processor method (one for each parameterization variable)
private final List<Statement> dataProcessorStats = new ArrayList<Statement>();
// parameterization variables of the data processor method
private final List<VariableExpression> dataProcessorVars = new ArrayList<VariableExpression>();
private WhereBlockRewriter(WhereBlock whereBlock, IRewriteResources resources) {
this.whereBlock = whereBlock;
this.resources = resources;
instanceFieldAccessChecker = new InstanceFieldAccessChecker(resources);
}
public static void rewrite(WhereBlock block, IRewriteResources resources) {
new WhereBlockRewriter(block, resources).rewrite();
}
private void rewrite() {
ListIterator<Statement> stats = whereBlock.getAst().listIterator();
while (stats.hasNext())
try {
rewriteWhereStat(stats);
} catch (InvalidSpecCompileException e) {
resources.getErrorReporter().error(e);
}
whereBlock.getAst().clear();
handleFeatureParameters();
createDataProcessorMethod();
}
private void rewriteWhereStat(ListIterator<Statement> stats) throws InvalidSpecCompileException {
Statement stat = stats.next();
BinaryExpression binExpr = AstUtil.getExpression(stat, BinaryExpression.class);
if (binExpr == null || binExpr.getClass() != BinaryExpression.class) // don't allow subclasses like DeclarationExpression
notAParameterization(stat);
@SuppressWarnings("ConstantConditions")
int type = binExpr.getOperation().getType();
if (type == Types.LEFT_SHIFT) {
Expression leftExpr = binExpr.getLeftExpression();
if (leftExpr instanceof VariableExpression)
rewriteSimpleParameterization(binExpr, stat);
else if (leftExpr instanceof ListExpression)
rewriteMultiParameterization(binExpr, stat);
else notAParameterization(stat);
} else if (type == Types.ASSIGN)
rewriteDerivedParameterization(binExpr, stat);
else if (getOrExpression(binExpr) != null) {
stats.previous();
rewriteTableLikeParameterization(stats);
}
else notAParameterization(stat);
}
private void createDataProviderMethod(Expression dataProviderExpr, int nextDataVariableIndex) {
instanceFieldAccessChecker.check(dataProviderExpr);
MethodNode method =
new MethodNode(
InternalIdentifiers.getDataProviderName(whereBlock.getParent().getAst().getName(), dataProviderCount++),
Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC,
ClassHelper.OBJECT_TYPE,
Parameter.EMPTY_ARRAY,
ClassNode.EMPTY_ARRAY,
new BlockStatement(
Arrays.<Statement> asList(
new ReturnStatement(
new ExpressionStatement(dataProviderExpr))),
new VariableScope()));
method.addAnnotation(createDataProviderAnnotation(dataProviderExpr, nextDataVariableIndex));
whereBlock.getParent().getParent().getAst().addMethod(method);
}
private AnnotationNode createDataProviderAnnotation(Expression dataProviderExpr, int nextDataVariableIndex) {
AnnotationNode ann = new AnnotationNode(resources.getAstNodeCache().DataProviderMetadata);
ann.addMember(DataProviderMetadata.LINE, new ConstantExpression(dataProviderExpr.getLineNumber()));
List<Expression> dataVariableNames = new ArrayList<Expression>();
for (int i = nextDataVariableIndex; i < dataProcessorVars.size(); i++)
dataVariableNames.add(new ConstantExpression(dataProcessorVars.get(i).getName()));
ann.addMember(DataProviderMetadata.DATA_VARIABLES, new ListExpression(dataVariableNames));
return ann;
}
private Parameter createDataProcessorParameter() {
Parameter p = new Parameter(ClassHelper.DYNAMIC_TYPE, "$spock_p" + dataProcessorParams.size());
dataProcessorParams.add(p);
return p;
}
// generates: arg = argMethodParam
private void rewriteSimpleParameterization(BinaryExpression binExpr, ASTNode sourcePos)
throws InvalidSpecCompileException {
int nextDataVariableIndex = dataProcessorVars.size();
Parameter dataProcessorParameter = createDataProcessorParameter();
VariableExpression arg = (VariableExpression) binExpr.getLeftExpression();
VariableExpression dataVar = createDataProcessorVariable(arg, sourcePos);
ExpressionStatement exprStat = new ExpressionStatement(
new DeclarationExpression(
dataVar,
Token.newSymbol(Types.ASSIGN, -1, -1),
new VariableExpression(dataProcessorParameter)));
exprStat.setSourcePosition(sourcePos);
dataProcessorStats.add(exprStat);
createDataProviderMethod(binExpr.getRightExpression(), nextDataVariableIndex);
}
// generates:
// arg0 = argMethodParam.getAt(0)
// arg1 = argMethodParam.getAt(1)
private void rewriteMultiParameterization(BinaryExpression binExpr, Statement enclosingStat)
throws InvalidSpecCompileException {
int nextDataVariableIndex = dataProcessorVars.size();
Parameter dataProcessorParameter = createDataProcessorParameter();
ListExpression list = (ListExpression) binExpr.getLeftExpression();
@SuppressWarnings("unchecked")
List<Expression> listElems = list.getExpressions();
for (int i = 0; i < listElems.size(); i++) {
Expression listElem = listElems.get(i);
if (AstUtil.isWildcardRef(listElem)) continue;
VariableExpression dataVar = createDataProcessorVariable(listElem, enclosingStat);
ExpressionStatement exprStat =
new ExpressionStatement(
new DeclarationExpression(
dataVar,
Token.newSymbol(Types.ASSIGN, -1, -1),
new MethodCallExpression(
new VariableExpression(dataProcessorParameter),
"getAt",
new ConstantExpression(i))));
exprStat.setSourcePosition(enclosingStat);
dataProcessorStats.add(exprStat);
}
createDataProviderMethod(binExpr.getRightExpression(), nextDataVariableIndex);
}
private void rewriteDerivedParameterization(BinaryExpression parameterization, Statement enclosingStat)
throws InvalidSpecCompileException {
VariableExpression dataVar = createDataProcessorVariable(parameterization.getLeftExpression(), enclosingStat);
ExpressionStatement exprStat =
new ExpressionStatement(
new DeclarationExpression(
dataVar,
Token.newSymbol(Types.ASSIGN, -1, -1),
parameterization.getRightExpression()));
exprStat.setSourcePosition(enclosingStat);
dataProcessorStats.add(exprStat);
}
private void rewriteTableLikeParameterization(ListIterator<Statement> stats) throws InvalidSpecCompileException {
LinkedList<List<Expression>> rows = new LinkedList<List<Expression>>();
while (stats.hasNext()) {
Statement stat = stats.next();
BinaryExpression orExpr = getOrExpression(stat);
if (orExpr == null) {
stats.previous();
break;
}
List<Expression> row = new ArrayList<Expression>();
splitRow(orExpr, row);
if (rows.size() > 0 && rows.getLast().size() != row.size())
throw new InvalidSpecCompileException(stat, String.format("Row in data table has wrong number of elements (%s instead of %s)", row.size(), rows.getLast().size()));
rows.add(row);
}
for (List<Expression> column : transposeTable(rows))
turnIntoSimpleParameterization(column);
}
List<List<Expression>> transposeTable(List<List<Expression>> rows) {
List<List<Expression>> columns = new ArrayList<List<Expression>>();
if (rows.isEmpty()) return columns;
for (int i = 0; i < rows.get(0).size(); i++)
columns.add(new ArrayList<Expression>());
for (List<Expression> row : rows)
for (int i = 0; i < row.size(); i++)
columns.get(i).add(row.get(i));
return columns;
}
private void turnIntoSimpleParameterization(List<Expression> column) throws InvalidSpecCompileException {
VariableExpression varExpr = ObjectUtil.asInstance(column.get(0), VariableExpression.class);
if (varExpr == null)
throw new InvalidSpecCompileException(column.get(0),
"Header of data table may only contain variable names");
if (AstUtil.isWildcardRef(varExpr)){
return; // ignore wildcards - _ can be used as placeholder for pseudo-second-column in one-column data
}
ListExpression listExpr = new ListExpression(column.subList(1, column.size()));
BinaryExpression binExpr = new BinaryExpression(varExpr, Token.newSymbol(Types.LEFT_SHIFT, -1, -1), listExpr);
// NOTE: varExpr may not be the "perfect" source position here, but as long as we rewrite data tables
// into simple parameterizations, it seems like the best approximation; also this source position is
// unlikely to make it into a compile error, because header variable has already been checked, and the
// assignment itself is unlikely to cause a compile error. (It's more likely that the rval causes a
// compile error, but the rval's source position is retained.)
rewriteSimpleParameterization(binExpr, varExpr);
}
private void splitRow(Expression row, List<Expression> parts) {
BinaryExpression orExpr = getOrExpression(row);
if (orExpr == null)
parts.add(row);
else {
splitRow(orExpr.getLeftExpression(), parts);
splitRow(orExpr.getRightExpression(), parts);
}
}
private BinaryExpression getOrExpression(Statement stat) {
Expression expr = AstUtil.getExpression(stat, Expression.class);
return getOrExpression(expr);
}
private BinaryExpression getOrExpression(Expression expr) {
BinaryExpression binExpr = ObjectUtil.asInstance(expr, BinaryExpression.class);
if (binExpr == null) return null;
int binExprType = binExpr.getOperation().getType();
if (binExprType == Types.BITWISE_OR || binExprType == Types.LOGICAL_OR) return binExpr;
return null;
}
private VariableExpression createDataProcessorVariable(Expression varExpr, ASTNode sourcePos)
throws InvalidSpecCompileException {
if (!(varExpr instanceof VariableExpression))
notAParameterization(sourcePos);
VariableExpression typedVarExpr = (VariableExpression)varExpr;
verifyDataProcessorVariable(typedVarExpr);
VariableExpression result = new VariableExpression(typedVarExpr.getName(), typedVarExpr.getType());
dataProcessorVars.add(result);
return result;
}
private void verifyDataProcessorVariable(VariableExpression varExpr) {
Variable accessedVar = varExpr.getAccessedVariable();
if (accessedVar instanceof VariableExpression) { // local variable
resources.getErrorReporter().error(varExpr, "A variable named '%s' already exists in this scope", varExpr.getName());
return;
}
if (getDataProcessorVariable(varExpr.getName()) != null) {
resources.getErrorReporter().error(varExpr, "Duplicate declaration of data variable '%s'", varExpr.getName());
return;
}
if (whereBlock.getParent().getAst().getParameters().length > 0 && !(accessedVar instanceof Parameter)) {
resources.getErrorReporter().error(varExpr,
"Data variable '%s' needs to be declared as method parameter",
varExpr.getName());
}
}
private VariableExpression getDataProcessorVariable(String name) {
for (VariableExpression var : dataProcessorVars)
if (var.getName().equals(name)) return var;
return null;
}
private void handleFeatureParameters() {
Parameter[] parameters = whereBlock.getParent().getAst().getParameters();
if (parameters.length == 0)
addFeatureParameters();
else
checkAllParametersAreDataVariables(parameters);
}
private void checkAllParametersAreDataVariables(Parameter[] parameters) {
for (Parameter param : parameters)
if (getDataProcessorVariable(param.getName()) == null)
resources.getErrorReporter().error(param, "Parameter '%s' does not refer to a data variable", param.getName());
}
private void addFeatureParameters() {
Parameter[] parameters = new Parameter[dataProcessorVars.size()];
for (int i = 0; i < dataProcessorVars.size(); i++)
parameters[i] = new Parameter(ClassHelper.DYNAMIC_TYPE, dataProcessorVars.get(i).getName());
whereBlock.getParent().getAst().setParameters(parameters);
}
@SuppressWarnings("unchecked")
private void createDataProcessorMethod() {
if (dataProcessorVars.isEmpty()) return;
dataProcessorStats.add(
new ReturnStatement(
new ArrayExpression(
ClassHelper.OBJECT_TYPE,
(List) dataProcessorVars)));
BlockStatement blockStat = new BlockStatement(dataProcessorStats, new VariableScope());
new DataProcessorVariableRewriter().visitBlockStatement(blockStat);
whereBlock.getParent().getParent().getAst().addMethod(
new MethodNode(
InternalIdentifiers.getDataProcessorName(whereBlock.getParent().getAst().getName()),
Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC,
ClassHelper.OBJECT_TYPE,
dataProcessorParams.toArray(new Parameter[dataProcessorParams.size()]),
ClassNode.EMPTY_ARRAY,
blockStat));
}
private static void notAParameterization(ASTNode stat) throws InvalidSpecCompileException {
throw new InvalidSpecCompileException(stat,
"where-blocks may only contain parameterizations (e.g. 'salary << [1000, 5000, 9000]; salaryk = salary / 1000')");
}
private class DataProcessorVariableRewriter extends ClassCodeVisitorSupport {
@Override
protected SourceUnit getSourceUnit() {
throw new UnsupportedOperationException("getSourceUnit");
}
@Override
public void visitClosureExpression(ClosureExpression expr) {
super.visitClosureExpression(expr);
AstUtil.fixUpLocalVariables(dataProcessorVars, expr.getVariableScope(), true);
}
@Override
public void visitBlockStatement(BlockStatement stat) {
super.visitBlockStatement(stat);
AstUtil.fixUpLocalVariables(dataProcessorVars, stat.getVariableScope(), false);
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
/**
* <p>
* List of option groups.
* </p>
*/
public class DescribeOptionGroupsResult implements Serializable, Cloneable {
/**
* <p>
* List of option groups.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<OptionGroup> optionGroupsList;
/**
* <p>
* An optional pagination token provided by a previous request. If this
* parameter is specified, the response includes only records beyond the
* marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*/
private String marker;
/**
* <p>
* List of option groups.
* </p>
*
* @return List of option groups.
*/
public java.util.List<OptionGroup> getOptionGroupsList() {
if (optionGroupsList == null) {
optionGroupsList = new com.amazonaws.internal.SdkInternalList<OptionGroup>();
}
return optionGroupsList;
}
/**
* <p>
* List of option groups.
* </p>
*
* @param optionGroupsList
* List of option groups.
*/
public void setOptionGroupsList(
java.util.Collection<OptionGroup> optionGroupsList) {
if (optionGroupsList == null) {
this.optionGroupsList = null;
return;
}
this.optionGroupsList = new com.amazonaws.internal.SdkInternalList<OptionGroup>(
optionGroupsList);
}
/**
* <p>
* List of option groups.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setOptionGroupsList(java.util.Collection)} or
* {@link #withOptionGroupsList(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param optionGroupsList
* List of option groups.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeOptionGroupsResult withOptionGroupsList(
OptionGroup... optionGroupsList) {
if (this.optionGroupsList == null) {
setOptionGroupsList(new com.amazonaws.internal.SdkInternalList<OptionGroup>(
optionGroupsList.length));
}
for (OptionGroup ele : optionGroupsList) {
this.optionGroupsList.add(ele);
}
return this;
}
/**
* <p>
* List of option groups.
* </p>
*
* @param optionGroupsList
* List of option groups.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeOptionGroupsResult withOptionGroupsList(
java.util.Collection<OptionGroup> optionGroupsList) {
setOptionGroupsList(optionGroupsList);
return this;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this
* parameter is specified, the response includes only records beyond the
* marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous request. If
* this parameter is specified, the response includes only records
* beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this
* parameter is specified, the response includes only records beyond the
* marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @return An optional pagination token provided by a previous request. If
* this parameter is specified, the response includes only records
* beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this
* parameter is specified, the response includes only records beyond the
* marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous request. If
* this parameter is specified, the response includes only records
* beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeOptionGroupsResult withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getOptionGroupsList() != null)
sb.append("OptionGroupsList: " + getOptionGroupsList() + ",");
if (getMarker() != null)
sb.append("Marker: " + getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeOptionGroupsResult == false)
return false;
DescribeOptionGroupsResult other = (DescribeOptionGroupsResult) obj;
if (other.getOptionGroupsList() == null
^ this.getOptionGroupsList() == null)
return false;
if (other.getOptionGroupsList() != null
&& other.getOptionGroupsList().equals(
this.getOptionGroupsList()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null
&& other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getOptionGroupsList() == null) ? 0 : getOptionGroupsList()
.hashCode());
hashCode = prime * hashCode
+ ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public DescribeOptionGroupsResult clone() {
try {
return (DescribeOptionGroupsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.apache.storm.topology;
import static org.apache.storm.spout.CheckpointSpout.CHECKPOINT_COMPONENT_ID;
import static org.apache.storm.spout.CheckpointSpout.CHECKPOINT_STREAM_ID;
import static org.apache.storm.utils.Utils.parseJson;
import java.io.NotSerializableException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.storm.Config;
import org.apache.storm.generated.Bolt;
import org.apache.storm.generated.ComponentCommon;
import org.apache.storm.generated.ComponentObject;
import org.apache.storm.generated.GlobalStreamId;
import org.apache.storm.generated.Grouping;
import org.apache.storm.generated.NullStruct;
import org.apache.storm.generated.SharedMemory;
import org.apache.storm.generated.SpoutSpec;
import org.apache.storm.generated.StateSpoutSpec;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.grouping.CustomStreamGrouping;
import org.apache.storm.grouping.PartialKeyGrouping;
import org.apache.storm.hooks.IWorkerHook;
import org.apache.storm.lambda.LambdaBiConsumerBolt;
import org.apache.storm.lambda.LambdaConsumerBolt;
import org.apache.storm.lambda.LambdaSpout;
import org.apache.storm.lambda.SerializableBiConsumer;
import org.apache.storm.lambda.SerializableConsumer;
import org.apache.storm.lambda.SerializableSupplier;
import org.apache.storm.shade.org.json.simple.JSONValue;
import org.apache.storm.spout.CheckpointSpout;
import org.apache.storm.state.State;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.utils.Utils;
import org.apache.storm.windowing.TupleWindow;
/**
* TopologyBuilder exposes the Java API for specifying a topology for Storm to execute. Topologies are Thrift structures in the end, but
* since the Thrift API is so verbose, TopologyBuilder greatly eases the process of creating topologies. The template for creating and
* submitting a topology looks something like:
*
* <p>```java TopologyBuilder builder = new TopologyBuilder();
*
* <p>builder.setSpout("1", new TestWordSpout(true), 5); builder.setSpout("2", new TestWordSpout(true), 3); builder.setBolt("3", new
* TestWordCounter(), 3) .fieldsGrouping("1", new Fields("word")) .fieldsGrouping("2", new Fields("word")); builder.setBolt("4", new
* TestGlobalCount()) .globalGrouping("1");
*
* <p>Map<String, Object> conf = new HashMap(); conf.put(Config.TOPOLOGY_WORKERS, 4);
*
* <p>StormSubmitter.submitTopology("mytopology", conf, builder.createTopology()); ```
*
* <p>Running the exact same topology in local mode (in process), and configuring it to log all tuples emitted, looks
* like the following. Note that it lets the topology run for 10 seconds before shutting down the local cluster.
*
* <p>```java TopologyBuilder builder = new TopologyBuilder();
*
* <p>builder.setSpout("1", new TestWordSpout(true), 5); builder.setSpout("2", new TestWordSpout(true), 3); builder.setBolt("3", new
* TestWordCounter(), 3) .fieldsGrouping("1", new Fields("word")) .fieldsGrouping("2", new Fields("word")); builder.setBolt("4", new
* TestGlobalCount()) .globalGrouping("1");
*
* <p>Map<String, Object> conf = new HashMap(); conf.put(Config.TOPOLOGY_WORKERS, 4); conf.put(Config.TOPOLOGY_DEBUG, true);
*
* <p>try (LocalCluster cluster = new LocalCluster(); LocalTopology topo = cluster.submitTopology("mytopology", conf,
* builder.createTopology());){ Utils.sleep(10000); } ```
*
* <p>The pattern for `TopologyBuilder` is to map component ids to components using the setSpout and setBolt methods. Those methods return
* objects that are then used to declare the inputs for that component.
*/
public class TopologyBuilder {
private final Map<String, IRichBolt> bolts = new HashMap<>();
private final Map<String, IRichSpout> spouts = new HashMap<>();
private final Map<String, ComponentCommon> commons = new HashMap<>();
private final Map<String, Set<String>> componentToSharedMemory = new HashMap<>();
private final Map<String, SharedMemory> sharedMemory = new HashMap<>();
private boolean hasStatefulBolt = false;
private Map<String, StateSpoutSpec> stateSpouts = new HashMap<>();
private List<ByteBuffer> workerHooks = new ArrayList<>();
private static String mergeIntoJson(Map<String, Object> into, Map<String, Object> newMap) {
Map<String, Object> res = new HashMap<>(into);
res.putAll(newMap);
return JSONValue.toJSONString(res);
}
public StormTopology createTopology() {
Map<String, Bolt> boltSpecs = new HashMap<>();
Map<String, SpoutSpec> spoutSpecs = new HashMap<>();
maybeAddCheckpointSpout();
for (String boltId : bolts.keySet()) {
IRichBolt bolt = bolts.get(boltId);
bolt = maybeAddCheckpointTupleForwarder(bolt);
ComponentCommon common = getComponentCommon(boltId, bolt);
try {
maybeAddCheckpointInputs(common);
boltSpecs.put(boltId, new Bolt(ComponentObject.serialized_java(Utils.javaSerialize(bolt)), common));
} catch (RuntimeException wrapperCause) {
if (wrapperCause.getCause() != null && NotSerializableException.class.equals(wrapperCause.getCause().getClass())) {
throw new IllegalStateException("Bolt '" + boltId + "' contains a non-serializable field of type "
+ wrapperCause.getCause().getMessage() + ", "
+ "which was instantiated prior to topology creation. "
+ wrapperCause.getCause().getMessage()
+ " "
+ "should be instantiated within the prepare method of '"
+ boltId
+ " at the earliest.",
wrapperCause);
}
throw wrapperCause;
}
}
for (String spoutId : spouts.keySet()) {
IRichSpout spout = spouts.get(spoutId);
ComponentCommon common = getComponentCommon(spoutId, spout);
try {
spoutSpecs.put(spoutId, new SpoutSpec(ComponentObject.serialized_java(Utils.javaSerialize(spout)), common));
} catch (RuntimeException wrapperCause) {
if (wrapperCause.getCause() != null && NotSerializableException.class.equals(wrapperCause.getCause().getClass())) {
throw new IllegalStateException(
"Spout '" + spoutId + "' contains a non-serializable field of type "
+ wrapperCause.getCause().getMessage()
+ ", which was instantiated prior to topology creation. "
+ wrapperCause.getCause().getMessage()
+ " should be instantiated within the open method of '"
+ spoutId
+ " at the earliest.",
wrapperCause);
}
throw wrapperCause;
}
}
StormTopology stormTopology = new StormTopology(spoutSpecs,
boltSpecs,
new HashMap<>());
stormTopology.set_worker_hooks(workerHooks);
if (!componentToSharedMemory.isEmpty()) {
stormTopology.set_component_to_shared_memory(componentToSharedMemory);
stormTopology.set_shared_memory(sharedMemory);
}
return Utils.addVersions(stormTopology);
}
/**
* Define a new bolt in this topology with parallelism of just one thread.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param bolt the bolt
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, IRichBolt bolt) throws IllegalArgumentException {
return setBolt(id, bolt, null);
}
/**
* Define a new bolt in this topology with the specified amount of parallelism.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param bolt the bolt
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somewhere around the cluster.
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, IRichBolt bolt, Number parallelismHint) throws IllegalArgumentException {
validateUnusedId(id);
initCommon(id, bolt, parallelismHint);
bolts.put(id, bolt);
return new BoltGetter(id);
}
/**
* Define a new bolt in this topology. This defines a basic bolt, which is a simpler to use but more restricted kind of bolt. Basic
* bolts are intended for non-aggregation processing and automate the anchoring/acking process to achieve proper reliability in the
* topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param bolt the basic bolt
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, IBasicBolt bolt) throws IllegalArgumentException {
return setBolt(id, bolt, null);
}
/**
* Define a new bolt in this topology. This defines a basic bolt, which is a simpler to use but more restricted kind of bolt. Basic
* bolts are intended for non-aggregation processing and automate the anchoring/acking process to achieve proper reliability in the
* topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param bolt the basic bolt
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somewhere around the cluster.
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, IBasicBolt bolt, Number parallelismHint) throws IllegalArgumentException {
return setBolt(id, new BasicBoltExecutor(bolt), parallelismHint);
}
/**
* Define a new bolt in this topology. This defines a windowed bolt, intended for windowing operations. The {@link
* IWindowedBolt#execute(TupleWindow)} method is triggered for each window interval with the list of current events in the window.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param bolt the windowed bolt
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, IWindowedBolt bolt) throws IllegalArgumentException {
return setBolt(id, bolt, null);
}
/**
* Define a new bolt in this topology. This defines a windowed bolt, intended for windowing operations. The {@link
* IWindowedBolt#execute(TupleWindow)} method is triggered for each window interval with the list of current events in the window.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param bolt the windowed bolt
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somwehere around the cluster.
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, IWindowedBolt bolt, Number parallelismHint) throws IllegalArgumentException {
return setBolt(id, new WindowedBoltExecutor(bolt), parallelismHint);
}
/**
* Define a new bolt in this topology. This defines a stateful bolt, that requires its state (of computation) to be saved. When this
* bolt is initialized, the {@link IStatefulBolt#initState(State)} method is invoked after {@link IStatefulBolt#prepare(Map,
* TopologyContext, OutputCollector)} but before {@link IStatefulBolt#execute(Tuple)} with its previously saved state.
* <p>
* The framework provides at-least once guarantee for the state updates. Bolts (both stateful and non-stateful) in a stateful topology
* are expected to anchor the tuples while emitting and ack the input tuples once its processed.
* </p>
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param bolt the stateful bolt
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public <T extends State> BoltDeclarer setBolt(String id, IStatefulBolt<T> bolt) throws IllegalArgumentException {
return setBolt(id, bolt, null);
}
/**
* Define a new bolt in this topology. This defines a stateful bolt, that requires its state (of computation) to be saved. When this
* bolt is initialized, the {@link IStatefulBolt#initState(State)} method is invoked after {@link IStatefulBolt#prepare(Map,
* TopologyContext, OutputCollector)} but before {@link IStatefulBolt#execute(Tuple)} with its previously saved state.
* <p>
* The framework provides at-least once guarantee for the state updates. Bolts (both stateful and non-stateful) in a stateful topology
* are expected to anchor the tuples while emitting and ack the input tuples once its processed.
* </p>
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param bolt the stateful bolt
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somwehere around the cluster.
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public <T extends State> BoltDeclarer setBolt(String id, IStatefulBolt<T> bolt, Number parallelismHint) throws
IllegalArgumentException {
hasStatefulBolt = true;
return setBolt(id, new StatefulBoltExecutor<T>(bolt), parallelismHint);
}
/**
* Define a new bolt in this topology. This defines a stateful windowed bolt, intended for stateful windowing operations. The {@link
* IStatefulWindowedBolt#execute(TupleWindow)} method is triggered for each window interval with the list of current events in the
* window. During initialization of this bolt {@link IStatefulWindowedBolt#initState(State)} is invoked with its previously saved
* state.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param bolt the stateful windowed bolt
* @param <T> the type of the state (e.g. {@link org.apache.storm.state.KeyValueState})
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public <T extends State> BoltDeclarer setBolt(String id, IStatefulWindowedBolt<T> bolt) throws IllegalArgumentException {
return setBolt(id, bolt, null);
}
/**
* Define a new bolt in this topology. This defines a stateful windowed bolt, intended for stateful windowing operations. The {@link
* IStatefulWindowedBolt#execute(TupleWindow)} method is triggered for each window interval with the list of current events in the
* window. During initialization of this bolt {@link IStatefulWindowedBolt#initState(State)} is invoked with its previously saved
* state.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param bolt the stateful windowed bolt
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somwehere around the cluster.
* @param <T> the type of the state (e.g. {@link org.apache.storm.state.KeyValueState})
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public <T extends State> BoltDeclarer setBolt(String id, IStatefulWindowedBolt<T> bolt, Number parallelismHint) throws
IllegalArgumentException {
hasStatefulBolt = true;
IStatefulBolt<T> executor;
if (bolt.isPersistent()) {
executor = new PersistentWindowedBoltExecutor<>(bolt);
} else {
executor = new StatefulWindowedBoltExecutor<T>(bolt);
}
return setBolt(id, new StatefulBoltExecutor<T>(executor), parallelismHint);
}
/**
* Define a new bolt in this topology. This defines a lambda basic bolt, which is a simpler to use but more restricted kind of bolt.
* Basic bolts are intended for non-aggregation processing and automate the anchoring/acking process to achieve proper reliability in
* the topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param biConsumer lambda expression that implements tuple processing for this bolt
* @param fields fields for tuple that should be emitted to downstream bolts
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, SerializableBiConsumer<Tuple, BasicOutputCollector> biConsumer, String... fields) throws
IllegalArgumentException {
return setBolt(id, biConsumer, null, fields);
}
/**
* Define a new bolt in this topology. This defines a lambda basic bolt, which is a simpler to use but more restricted kind of bolt.
* Basic bolts are intended for non-aggregation processing and automate the anchoring/acking process to achieve proper reliability in
* the topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param biConsumer lambda expression that implements tuple processing for this bolt
* @param fields fields for tuple that should be emitted to downstream bolts
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somewhere around the cluster.
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, SerializableBiConsumer<Tuple, BasicOutputCollector> biConsumer, Number parallelismHint,
String... fields) throws IllegalArgumentException {
return setBolt(id, new LambdaBiConsumerBolt(biConsumer, fields), parallelismHint);
}
/**
* Define a new bolt in this topology. This defines a lambda basic bolt, which is a simpler to use but more restricted kind of bolt.
* Basic bolts are intended for non-aggregation processing and automate the anchoring/acking process to achieve proper reliability in
* the topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's outputs.
* @param consumer lambda expression that implements tuple processing for this bolt
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, SerializableConsumer<Tuple> consumer) throws IllegalArgumentException {
return setBolt(id, consumer, null);
}
/**
* Define a new bolt in this topology. This defines a lambda basic bolt, which is a simpler to use but more restricted kind of bolt.
* Basic bolts are intended for non-aggregation processing and automate the anchoring/acking process to achieve proper reliability in
* the topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this bolt's
* outputs.
* @param consumer lambda expression that implements tuple processing for this bolt
* @param parallelismHint the number of tasks that should be assigned to execute this bolt. Each task will run on a thread in a process
* somewhere around the cluster.
* @return use the returned object to declare the inputs to this component
*
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public BoltDeclarer setBolt(String id, SerializableConsumer<Tuple> consumer, Number parallelismHint) throws IllegalArgumentException {
return setBolt(id, new LambdaConsumerBolt(consumer), parallelismHint);
}
/**
* Define a new spout in this topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this spout's outputs.
* @param spout the spout
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public SpoutDeclarer setSpout(String id, IRichSpout spout) throws IllegalArgumentException {
return setSpout(id, spout, null);
}
/**
* Define a new spout in this topology with the specified parallelism. If the spout declares itself as non-distributed, the
* parallelism_hint will be ignored and only one task will be allocated to this component.
*
* @param id the id of this component. This id is referenced by other components that want to consume this spout's
* outputs.
* @param parallelismHint the number of tasks that should be assigned to execute this spout. Each task will run on a thread in a
* process somewhere around the cluster.
* @param spout the spout
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public SpoutDeclarer setSpout(String id, IRichSpout spout, Number parallelismHint) throws IllegalArgumentException {
validateUnusedId(id);
initCommon(id, spout, parallelismHint);
spouts.put(id, spout);
return new SpoutGetter(id);
}
/**
* Define a new spout in this topology.
*
* @param id the id of this component. This id is referenced by other components that want to consume this spout's outputs.
* @param supplier lambda expression that implements tuple generating for this spout
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public SpoutDeclarer setSpout(String id, SerializableSupplier<?> supplier) throws IllegalArgumentException {
return setSpout(id, supplier, null);
}
/**
* Define a new spout in this topology with the specified parallelism. If the spout declares itself as non-distributed, the
* parallelism_hint will be ignored and only one task will be allocated to this component.
*
* @param id the id of this component. This id is referenced by other components that want to consume this spout's
* outputs.
* @param parallelismHint the number of tasks that should be assigned to execute this spout. Each task will run on a thread in a
* process somewhere around the cluster.
* @param supplier lambda expression that implements tuple generating for this spout
* @throws IllegalArgumentException if {@code parallelism_hint} is not positive
*/
public SpoutDeclarer setSpout(String id, SerializableSupplier<?> supplier, Number parallelismHint) throws IllegalArgumentException {
return setSpout(id, new LambdaSpout(supplier), parallelismHint);
}
public void setStateSpout(String id, IRichStateSpout stateSpout) throws IllegalArgumentException {
setStateSpout(id, stateSpout, null);
}
public void setStateSpout(String id, IRichStateSpout stateSpout, Number parallelismHint) throws IllegalArgumentException {
validateUnusedId(id);
// TODO: finish
}
/**
* Add a new worker lifecycle hook.
*
* @param workerHook the lifecycle hook to add
*/
public void addWorkerHook(IWorkerHook workerHook) {
if (null == workerHook) {
throw new IllegalArgumentException("WorkerHook must not be null.");
}
workerHooks.add(ByteBuffer.wrap(Utils.javaSerialize(workerHook)));
}
private void validateUnusedId(String id) {
if (bolts.containsKey(id)) {
throw new IllegalArgumentException("Bolt has already been declared for id " + id);
}
if (spouts.containsKey(id)) {
throw new IllegalArgumentException("Spout has already been declared for id " + id);
}
if (stateSpouts.containsKey(id)) {
throw new IllegalArgumentException("State spout has already been declared for id " + id);
}
}
/**
* If the topology has at least one stateful bolt add a {@link CheckpointSpout} component to the topology.
*/
private void maybeAddCheckpointSpout() {
if (hasStatefulBolt) {
setSpout(CHECKPOINT_COMPONENT_ID, new CheckpointSpout(), 1);
}
}
private void maybeAddCheckpointInputs(ComponentCommon common) {
if (hasStatefulBolt) {
addCheckPointInputs(common);
}
}
/**
* If the topology has at least one stateful bolt all the non-stateful bolts are wrapped in {@link CheckpointTupleForwarder} so that the
* checkpoint tuples can flow through the topology.
*/
private IRichBolt maybeAddCheckpointTupleForwarder(IRichBolt bolt) {
if (hasStatefulBolt && !(bolt instanceof StatefulBoltExecutor)) {
bolt = new CheckpointTupleForwarder(bolt);
}
return bolt;
}
/**
* For bolts that has incoming streams from spouts (the root bolts), add checkpoint stream from checkpoint spout to its input. For other
* bolts, add checkpoint stream from the previous bolt to its input.
*/
private void addCheckPointInputs(ComponentCommon component) {
Set<GlobalStreamId> checkPointInputs = new HashSet<>();
for (GlobalStreamId inputStream : component.get_inputs().keySet()) {
String sourceId = inputStream.get_componentId();
if (spouts.containsKey(sourceId)) {
checkPointInputs.add(new GlobalStreamId(CHECKPOINT_COMPONENT_ID, CHECKPOINT_STREAM_ID));
} else {
checkPointInputs.add(new GlobalStreamId(sourceId, CHECKPOINT_STREAM_ID));
}
}
for (GlobalStreamId streamId : checkPointInputs) {
component.put_to_inputs(streamId, Grouping.all(new NullStruct()));
}
}
private ComponentCommon getComponentCommon(String id, IComponent component) {
ComponentCommon ret = new ComponentCommon(commons.get(id));
OutputFieldsGetter getter = new OutputFieldsGetter();
component.declareOutputFields(getter);
ret.set_streams(getter.getFieldsDeclaration());
return ret;
}
private void initCommon(String id, IComponent component, Number parallelism) throws IllegalArgumentException {
ComponentCommon common = new ComponentCommon();
common.set_inputs(new HashMap<GlobalStreamId, Grouping>());
if (parallelism != null) {
int dop = parallelism.intValue();
if (dop < 1) {
throw new IllegalArgumentException("Parallelism must be positive.");
}
common.set_parallelism_hint(dop);
}
Map<String, Object> conf = component.getComponentConfiguration();
if (conf != null) {
common.set_json_conf(JSONValue.toJSONString(conf));
}
commons.put(id, common);
}
protected class ConfigGetter<T extends ComponentConfigurationDeclarer> extends BaseConfigurationDeclarer<T> {
String id;
public ConfigGetter(String id) {
this.id = id;
}
@SuppressWarnings("unchecked")
@Override
public T addConfigurations(Map<String, Object> conf) {
if (conf != null) {
if (conf.containsKey(Config.TOPOLOGY_KRYO_REGISTER)) {
throw new IllegalArgumentException("Cannot set serializations for a component using fluent API");
}
if (!conf.isEmpty()) {
String currConf = commons.get(id).get_json_conf();
commons.get(id).set_json_conf(mergeIntoJson(parseJson(currConf), conf));
}
}
return (T) this;
}
/**
* return the current component configuration.
*
* @return the current configuration.
*/
@Override
public Map<String, Object> getComponentConfiguration() {
return parseJson(commons.get(id).get_json_conf());
}
@Override
public T addResources(Map<String, Double> resources) {
if (resources != null && !resources.isEmpty()) {
String currConf = commons.get(id).get_json_conf();
Map<String, Object> conf = parseJson(currConf);
Map<String, Double> currentResources =
(Map<String, Double>) conf.computeIfAbsent(Config.TOPOLOGY_COMPONENT_RESOURCES_MAP, (k) -> new HashMap<>());
currentResources.putAll(resources);
commons.get(id).set_json_conf(JSONValue.toJSONString(conf));
}
return (T) this;
}
@SuppressWarnings("unchecked")
@Override
public T addResource(String resourceName, Number resourceValue) {
Map<String, Object> componentConf = parseJson(commons.get(id).get_json_conf());
Map<String, Double> resourcesMap = (Map<String, Double>) componentConf.computeIfAbsent(
Config.TOPOLOGY_COMPONENT_RESOURCES_MAP, (k) -> new HashMap<>());
resourcesMap.put(resourceName, resourceValue.doubleValue());
return addConfiguration(Config.TOPOLOGY_COMPONENT_RESOURCES_MAP, resourcesMap);
}
@SuppressWarnings("unchecked")
@Override
public T addSharedMemory(SharedMemory request) {
SharedMemory found = sharedMemory.get(request.get_name());
if (found != null && !found.equals(request)) {
throw new IllegalArgumentException("Cannot have multiple different shared memory regions with the same name");
}
sharedMemory.put(request.get_name(), request);
Set<String> mems = componentToSharedMemory.computeIfAbsent(id, (k) -> new HashSet<>());
mems.add(request.get_name());
return (T) this;
}
}
protected class SpoutGetter extends ConfigGetter<SpoutDeclarer> implements SpoutDeclarer {
public SpoutGetter(String id) {
super(id);
}
}
protected class BoltGetter extends ConfigGetter<BoltDeclarer> implements BoltDeclarer {
private String boltId;
public BoltGetter(String boltId) {
super(boltId);
this.boltId = boltId;
}
public BoltDeclarer fieldsGrouping(String componentId, Fields fields) {
return fieldsGrouping(componentId, Utils.DEFAULT_STREAM_ID, fields);
}
public BoltDeclarer fieldsGrouping(String componentId, String streamId, Fields fields) {
return grouping(componentId, streamId, Grouping.fields(fields.toList()));
}
public BoltDeclarer globalGrouping(String componentId) {
return globalGrouping(componentId, Utils.DEFAULT_STREAM_ID);
}
public BoltDeclarer globalGrouping(String componentId, String streamId) {
return grouping(componentId, streamId, Grouping.fields(new ArrayList<String>()));
}
public BoltDeclarer shuffleGrouping(String componentId) {
return shuffleGrouping(componentId, Utils.DEFAULT_STREAM_ID);
}
public BoltDeclarer shuffleGrouping(String componentId, String streamId) {
return grouping(componentId, streamId, Grouping.shuffle(new NullStruct()));
}
public BoltDeclarer localOrShuffleGrouping(String componentId) {
return localOrShuffleGrouping(componentId, Utils.DEFAULT_STREAM_ID);
}
public BoltDeclarer localOrShuffleGrouping(String componentId, String streamId) {
return grouping(componentId, streamId, Grouping.local_or_shuffle(new NullStruct()));
}
public BoltDeclarer noneGrouping(String componentId) {
return noneGrouping(componentId, Utils.DEFAULT_STREAM_ID);
}
public BoltDeclarer noneGrouping(String componentId, String streamId) {
return grouping(componentId, streamId, Grouping.none(new NullStruct()));
}
public BoltDeclarer allGrouping(String componentId) {
return allGrouping(componentId, Utils.DEFAULT_STREAM_ID);
}
public BoltDeclarer allGrouping(String componentId, String streamId) {
return grouping(componentId, streamId, Grouping.all(new NullStruct()));
}
public BoltDeclarer directGrouping(String componentId) {
return directGrouping(componentId, Utils.DEFAULT_STREAM_ID);
}
public BoltDeclarer directGrouping(String componentId, String streamId) {
return grouping(componentId, streamId, Grouping.direct(new NullStruct()));
}
private BoltDeclarer grouping(String componentId, String streamId, Grouping grouping) {
commons.get(boltId).put_to_inputs(new GlobalStreamId(componentId, streamId), grouping);
return this;
}
@Override
public BoltDeclarer grouping(GlobalStreamId id, Grouping grouping) {
return grouping(id.get_componentId(), id.get_streamId(), grouping);
}
@Override
public BoltDeclarer partialKeyGrouping(String componentId, Fields fields) {
return customGrouping(componentId, new PartialKeyGrouping(fields));
}
@Override
public BoltDeclarer partialKeyGrouping(String componentId, String streamId, Fields fields) {
return customGrouping(componentId, streamId, new PartialKeyGrouping(fields));
}
@Override
public BoltDeclarer customGrouping(String componentId, CustomStreamGrouping grouping) {
return customGrouping(componentId, Utils.DEFAULT_STREAM_ID, grouping);
}
@Override
public BoltDeclarer customGrouping(String componentId, String streamId, CustomStreamGrouping grouping) {
return grouping(componentId, streamId, Grouping.custom_serialized(Utils.javaSerialize(grouping)));
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.authorization.implementation;
import com.azure.resourcemanager.authorization.AuthorizationManager;
import com.azure.resourcemanager.authorization.fluent.models.KeyCredentialInner;
import com.azure.resourcemanager.authorization.fluent.models.PasswordCredentialInner;
import com.azure.resourcemanager.authorization.fluent.models.ServicePrincipalInner;
import com.azure.resourcemanager.authorization.models.ActiveDirectoryApplication;
import com.azure.resourcemanager.authorization.models.BuiltInRole;
import com.azure.resourcemanager.authorization.models.CertificateCredential;
import com.azure.resourcemanager.authorization.models.PasswordCredential;
import com.azure.resourcemanager.authorization.models.RoleAssignment;
import com.azure.resourcemanager.authorization.models.ServicePrincipal;
import com.azure.resourcemanager.authorization.models.ServicePrincipalCreateParameters;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import com.azure.resourcemanager.resources.fluentcore.model.implementation.CreatableUpdatableImpl;
import com.azure.resourcemanager.resources.models.ResourceGroup;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
/** Implementation for ServicePrincipal and its parent interfaces. */
class ServicePrincipalImpl extends CreatableUpdatableImpl<ServicePrincipal, ServicePrincipalInner, ServicePrincipalImpl>
implements ServicePrincipal,
ServicePrincipal.Definition,
ServicePrincipal.Update,
HasCredential<ServicePrincipalImpl> {
private AuthorizationManager manager;
private Map<String, PasswordCredential> cachedPasswordCredentials;
private Map<String, CertificateCredential> cachedCertificateCredentials;
private Map<String, RoleAssignment> cachedRoleAssignments;
private ServicePrincipalCreateParameters createParameters;
private Creatable<ActiveDirectoryApplication> applicationCreatable;
private Map<String, BuiltInRole> rolesToCreate;
private Set<String> rolesToDelete;
String assignedSubscription;
private List<CertificateCredentialImpl<?>> certificateCredentialsToCreate;
private List<PasswordCredentialImpl<?>> passwordCredentialsToCreate;
private Set<String> certificateCredentialsToDelete;
private Set<String> passwordCredentialsToDelete;
ServicePrincipalImpl(ServicePrincipalInner innerObject, AuthorizationManager manager) {
super(innerObject.displayName(), innerObject);
this.manager = manager;
this.createParameters = new ServicePrincipalCreateParameters();
this.createParameters.withAccountEnabled(true);
this.cachedRoleAssignments = new HashMap<>();
this.rolesToCreate = new HashMap<>();
this.rolesToDelete = new HashSet<>();
this.cachedCertificateCredentials = new HashMap<>();
this.certificateCredentialsToCreate = new ArrayList<>();
this.certificateCredentialsToDelete = new HashSet<>();
this.cachedPasswordCredentials = new HashMap<>();
this.passwordCredentialsToCreate = new ArrayList<>();
this.passwordCredentialsToDelete = new HashSet<>();
}
@Override
public String applicationId() {
return innerModel().appId();
}
@Override
public List<String> servicePrincipalNames() {
return innerModel().servicePrincipalNames();
}
@Override
public Map<String, PasswordCredential> passwordCredentials() {
return Collections.unmodifiableMap(cachedPasswordCredentials);
}
@Override
public Map<String, CertificateCredential> certificateCredentials() {
return Collections.unmodifiableMap(cachedCertificateCredentials);
}
@Override
public Set<RoleAssignment> roleAssignments() {
return Collections.unmodifiableSet(new HashSet<>(cachedRoleAssignments.values()));
}
@Override
protected Mono<ServicePrincipalInner> getInnerAsync() {
return manager.serviceClient().getServicePrincipals().getAsync(id());
}
@Override
public Mono<ServicePrincipal> createResourceAsync() {
Mono<ServicePrincipal> sp = Mono.just(this);
if (isInCreateMode()) {
if (applicationCreatable != null) {
ActiveDirectoryApplication application = this.taskResult(applicationCreatable.key());
createParameters.withAppId(application.applicationId());
}
sp = manager.serviceClient().getServicePrincipals()
.createAsync(createParameters).map(innerToFluentMap(this));
}
return sp
.flatMap(
servicePrincipal ->
submitCredentialsAsync(servicePrincipal).mergeWith(submitRolesAsync(servicePrincipal)).last())
.map(
servicePrincipal -> {
for (PasswordCredentialImpl<?> passwordCredential : passwordCredentialsToCreate) {
passwordCredential.exportAuthFile((ServicePrincipalImpl) servicePrincipal);
}
for (CertificateCredentialImpl<?> certificateCredential : certificateCredentialsToCreate) {
certificateCredential.exportAuthFile((ServicePrincipalImpl) servicePrincipal);
}
passwordCredentialsToCreate.clear();
certificateCredentialsToCreate.clear();
return servicePrincipal;
});
}
private Mono<ServicePrincipal> submitCredentialsAsync(final ServicePrincipal sp) {
Mono<ServicePrincipal> mono = Mono.empty();
if (!certificateCredentialsToCreate.isEmpty() || !certificateCredentialsToDelete.isEmpty()) {
Map<String, CertificateCredential> newCerts = new HashMap<>(cachedCertificateCredentials);
for (String delete : certificateCredentialsToDelete) {
newCerts.remove(delete);
}
for (CertificateCredential create : certificateCredentialsToCreate) {
newCerts.put(create.name(), create);
}
List<KeyCredentialInner> updateKeyCredentials = new ArrayList<>();
for (CertificateCredential certificateCredential : newCerts.values()) {
updateKeyCredentials.add(certificateCredential.innerModel());
}
mono =
mono
.concatWith(
manager()
.serviceClient()
.getServicePrincipals()
.updateKeyCredentialsAsync(sp.id(), updateKeyCredentials)
.then(Mono.just(ServicePrincipalImpl.this)))
.last();
}
if (!passwordCredentialsToCreate.isEmpty() || !passwordCredentialsToDelete.isEmpty()) {
Map<String, PasswordCredential> newPasses = new HashMap<>(cachedPasswordCredentials);
for (String delete : passwordCredentialsToDelete) {
newPasses.remove(delete);
}
for (PasswordCredential create : passwordCredentialsToCreate) {
newPasses.put(create.name(), create);
}
List<PasswordCredentialInner> updatePasswordCredentials = new ArrayList<>();
for (PasswordCredential passwordCredential : newPasses.values()) {
updatePasswordCredentials.add(passwordCredential.innerModel());
}
mono =
mono
.concatWith(
manager()
.serviceClient()
.getServicePrincipals()
.updatePasswordCredentialsAsync(sp.id(), updatePasswordCredentials)
.then(Mono.just(ServicePrincipalImpl.this)))
.last();
}
return mono
.flatMap(
servicePrincipal -> {
passwordCredentialsToDelete.clear();
certificateCredentialsToDelete.clear();
return refreshCredentialsAsync();
});
}
private Mono<ServicePrincipal> submitRolesAsync(final ServicePrincipal servicePrincipal) {
Mono<ServicePrincipal> create;
if (rolesToCreate.isEmpty()) {
create = Mono.just(servicePrincipal);
} else {
create =
Flux
.fromIterable(rolesToCreate.entrySet())
.flatMap(
roleEntry ->
manager()
.roleAssignments()
.define(this.manager().internalContext().randomUuid())
.forServicePrincipal(servicePrincipal)
.withBuiltInRole(roleEntry.getValue())
.withScope(roleEntry.getKey())
.createAsync())
.doOnNext(
indexable ->
cachedRoleAssignments.put(indexable.id(), indexable))
.last()
.map(
indexable -> {
rolesToCreate.clear();
return servicePrincipal;
});
}
Mono<ServicePrincipal> delete;
if (rolesToDelete.isEmpty()) {
delete = Mono.just(servicePrincipal);
} else {
delete =
Flux
.fromIterable(rolesToDelete)
.flatMap(
role ->
manager()
.roleAssignments()
.deleteByIdAsync(cachedRoleAssignments.get(role).id())
.thenReturn(role))
.doOnNext(s -> cachedRoleAssignments.remove(s))
.last()
.map(
s -> {
rolesToDelete.clear();
return servicePrincipal;
});
}
return create.mergeWith(delete).last();
}
@Override
public boolean isInCreateMode() {
return id() == null;
}
Mono<ServicePrincipal> refreshCredentialsAsync() {
return Mono
.just(ServicePrincipalImpl.this)
.map(
(Function<ServicePrincipalImpl, ServicePrincipal>)
servicePrincipal -> {
servicePrincipal.cachedCertificateCredentials.clear();
servicePrincipal.cachedPasswordCredentials.clear();
return servicePrincipal;
})
.concatWith(
manager()
.serviceClient()
.getServicePrincipals()
.listKeyCredentialsAsync(id())
.map(
keyCredentialInner -> {
CertificateCredential credential = new CertificateCredentialImpl<>(keyCredentialInner);
ServicePrincipalImpl.this.cachedCertificateCredentials.put(credential.name(), credential);
return ServicePrincipalImpl.this;
}))
.concatWith(
manager()
.serviceClient()
.getServicePrincipals()
.listPasswordCredentialsAsync(id())
.map(
passwordCredentialInner -> {
PasswordCredential credential = new PasswordCredentialImpl<>(passwordCredentialInner);
ServicePrincipalImpl.this.cachedPasswordCredentials.put(credential.name(), credential);
return ServicePrincipalImpl.this;
}))
.last();
}
@Override
public Mono<ServicePrincipal> refreshAsync() {
return getInnerAsync().map(innerToFluentMap(this)).flatMap(application -> refreshCredentialsAsync());
}
@Override
public CertificateCredentialImpl<ServicePrincipalImpl> defineCertificateCredential(String name) {
return new CertificateCredentialImpl<>(name, this);
}
@Override
public PasswordCredentialImpl<ServicePrincipalImpl> definePasswordCredential(String name) {
return new PasswordCredentialImpl<>(name, this);
}
@Override
public ServicePrincipalImpl withoutCredential(String name) {
if (cachedPasswordCredentials.containsKey(name)) {
passwordCredentialsToDelete.add(name);
} else if (cachedCertificateCredentials.containsKey(name)) {
certificateCredentialsToDelete.add(name);
}
return this;
}
@Override
public ServicePrincipalImpl withCertificateCredential(CertificateCredentialImpl<?> credential) {
this.certificateCredentialsToCreate.add(credential);
return this;
}
@Override
public ServicePrincipalImpl withPasswordCredential(PasswordCredentialImpl<?> credential) {
this.passwordCredentialsToCreate.add(credential);
return this;
}
@Override
public ServicePrincipalImpl withExistingApplication(String id) {
createParameters.withAppId(id);
return this;
}
@Override
public ServicePrincipalImpl withExistingApplication(ActiveDirectoryApplication application) {
createParameters.withAppId(application.applicationId());
return this;
}
@Override
public ServicePrincipalImpl withNewApplication(Creatable<ActiveDirectoryApplication> applicationCreatable) {
this.addDependency(applicationCreatable);
this.applicationCreatable = applicationCreatable;
return this;
}
@Override
public ServicePrincipalImpl withNewApplication(String signOnUrl) {
return withNewApplication(
manager.applications().define(name()).withSignOnUrl(signOnUrl).withIdentifierUrl(signOnUrl));
}
@Override
public ServicePrincipalImpl withNewRole(BuiltInRole role, String scope) {
this.rolesToCreate.put(scope, role);
return this;
}
@Override
public ServicePrincipalImpl withNewRoleInSubscription(BuiltInRole role, String subscriptionId) {
this.assignedSubscription = subscriptionId;
return withNewRole(role, "subscriptions/" + subscriptionId);
}
@Override
public ServicePrincipalImpl withNewRoleInResourceGroup(BuiltInRole role, ResourceGroup resourceGroup) {
return withNewRole(role, resourceGroup.id());
}
@Override
public Update withoutRole(RoleAssignment roleAssignment) {
this.rolesToDelete.add(roleAssignment.id());
return this;
}
@Override
public String id() {
return innerModel().objectId();
}
@Override
public AuthorizationManager manager() {
return this.manager;
}
}
| |
/*
* Copyright 2014 Attila Szegedi, Daniel Dekany, Jonathan Revusky
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package freemarker.test.servlet;
import static org.junit.Assert.*;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.webapp.WebAppContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WebAppTestCase {
public static final String IGNORED_MASK = "[IGNORED]";
private static final Logger LOG = LoggerFactory.getLogger(WebAppTestCase.class);
private static final String ATTR_JETTY_CONTAINER_INCLUDE_JAR_PATTERN
= "org.eclipse.jetty.server.webapp.ContainerIncludeJarPattern";
private static final String EXPECTED_DIR = "/WEB-INF/expected/";
private static Server server;
private static ContextHandlerCollection contextHandlers;
private static Map<String, WebAppContext> deployedWebApps = new HashMap<String, WebAppContext>();
@BeforeClass
public static void beforeClass() throws Exception {
// Work around Java 5 bug(?) that causes Jasper to fail with "zip file closed" when it reads the JSTL jar:
org.eclipse.jetty.util.resource.Resource.setDefaultUseCaches(false);
LOG.info("Starting embedded Jetty...");
server = new Server(0);
contextHandlers = new ContextHandlerCollection();
server.setHandler(contextHandlers);
server.start();
}
@AfterClass
public static void afterClass() throws Exception {
LOG.info("Stopping embedded Jetty...");
server.stop();
server.join(); // TODO redundant?
}
protected final String getResponseContent(String webAppName, String webAppRelURL) throws Exception {
HTTPResponse resp = getHTTPResponse(webAppName, webAppRelURL);
if (resp.getStatusCode() != HttpURLConnection.HTTP_OK) {
fail("Expected HTTP status " + HttpURLConnection.HTTP_OK + ", but got "
+ resp.getStatusCode() + " (message: " + resp.getStatusMessage() + ") for URI "
+ resp.getURI());
}
return resp.getContent();
}
protected final int getResponseStatusCode(String webAppName, String webAppRelURL) throws Exception {
HTTPResponse resp = getHTTPResponse(webAppName, webAppRelURL);
return resp.getStatusCode();
}
protected final HTTPResponse getHTTPResponse(String webAppName, String webAppRelURL) throws Exception {
if (webAppName.startsWith("/") || webAppName.endsWith("/")) {
throw new IllegalArgumentException("\"webAppName\" can't start or end with \"/\": " + webAppName);
}
if (webAppRelURL.startsWith("/") || webAppRelURL.endsWith("/")) {
throw new IllegalArgumentException("\"webappRelURL\" can't start or end with \"/\": " + webAppRelURL);
}
ensureWebAppIsDeployed(webAppName);
final URI uri = new URI("http://localhost:" + server.getConnectors()[0].getLocalPort()
+ "/" + webAppName + "/" + webAppRelURL);
final HttpURLConnection httpCon = (HttpURLConnection) uri.toURL().openConnection();
httpCon.connect();
try {
LOG.debug("HTTP GET: {}", uri);
final int responseCode = httpCon.getResponseCode();
final String content;
if (responseCode == 200) {
InputStream in = httpCon.getInputStream();
try {
content = IOUtils.toString(in, "UTF-8");
} finally {
in.close();
}
} else {
content = null;
}
return new HTTPResponse(
responseCode, httpCon.getResponseMessage(),
content,
uri);
} finally {
httpCon.disconnect();
}
}
/**
* Compares the output of the JSP and the FTL version of the same page, ignoring some of the whitespace differences.
* @param webAppRelURLWithoutExt something like {@code "tester?view=foo"}, which will be extended to
* {@code "tester?view=foo.jsp"} and {@code "tester?view=foo.ftl"}, and then the output of these extended
* URL-s will be compared.
*/
protected void assertJSPAndFTLOutputEquals(String webAppName, String webAppRelURLWithoutExt) throws Exception {
assertOutputsEqual(webAppName, webAppRelURLWithoutExt + ".jsp", webAppRelURLWithoutExt + ".ftl");
}
protected void assertOutputsEqual(String webAppName, String webAppRelURL1, final String webAppRelURL2)
throws Exception {
String jspOutput = normalizeWS(getResponseContent(webAppName, webAppRelURL1), true);
String ftlOutput = normalizeWS(getResponseContent(webAppName, webAppRelURL2), true);
assertEquals(jspOutput, ftlOutput);
}
protected void assertExpectedEqualsOutput(String webAppName, String expectedFileName, String webAppRelURL)
throws Exception {
assertExpectedEqualsOutput(webAppName, expectedFileName, webAppRelURL, true);
}
protected void assertExpectedEqualsOutput(String webAppName, String expectedFileName, String webAppRelURL,
boolean compressWS) throws Exception {
assertExpectedEqualsOutput(webAppName, expectedFileName, webAppRelURL, compressWS, null);
}
/**
* @param expectedFileName
* The name of the file that stores the expected content, relatively to
* {@code servketContext:/WEB-INF/expected}.
* @param ignoredParts
* Parts that will be search-and-replaced with {@value #IGNORED_MASK} with both in the expected and
* actual outputs.
*/
protected void assertExpectedEqualsOutput(String webAppName, String expectedFileName, String webAppRelURL,
boolean compressWS, List<Pattern> ignoredParts) throws Exception {
final String actual = normalizeWS(getResponseContent(webAppName, webAppRelURL), compressWS);
final String expected;
{
final InputStream in = new URL(getWebAppDirURL(webAppName) + EXPECTED_DIR + expectedFileName).openStream();
try {
expected = normalizeWS(IOUtils.toString(in, "utf-8"), compressWS);
} finally {
in.close();
}
}
assertEquals(maskIgnored(expected, ignoredParts), maskIgnored(actual, ignoredParts));
}
private String maskIgnored(String s, List<Pattern> ignoredParts) {
if (ignoredParts == null) return s;
for (Pattern ignoredPart : ignoredParts) {
s = ignoredPart.matcher(s).replaceAll(IGNORED_MASK);
}
return s;
}
protected synchronized void restartWebAppIfStarted(String webAppName) throws Exception {
WebAppContext context = deployedWebApps.get(webAppName);
if (context != null) {
context.stop();
context.start();
}
}
private Pattern BR = Pattern.compile("\r\n|\r");
private Pattern MULTI_LINE_WS = Pattern.compile("[\t ]*[\r\n][\t \r\n]*", Pattern.DOTALL);
private Pattern SAME_LINE_WS = Pattern.compile("[\t ]+", Pattern.DOTALL);
private String normalizeWS(String s, boolean compressWS) {
if (compressWS) {
return SAME_LINE_WS.matcher(
MULTI_LINE_WS.matcher(s).replaceAll("\n"))
.replaceAll(" ")
.trim();
} else {
return BR.matcher(s).replaceAll("\n");
}
}
private synchronized void ensureWebAppIsDeployed(String webAppName) throws Exception {
if (deployedWebApps.containsKey(webAppName)) {
return;
}
final String webAppDirURL = getWebAppDirURL(webAppName);
WebAppContext context = new WebAppContext(webAppDirURL, "/" + webAppName);
// Pattern of jar file names scanned for META-INF/*.tld:
context.setAttribute(
ATTR_JETTY_CONTAINER_INCLUDE_JAR_PATTERN,
".*taglib.*\\.jar$");
contextHandlers.addHandler(context);
// As we add this after the Server was started, it has to be started manually:
context.start();
deployedWebApps.put(webAppName, context);
LOG.info("Deployed web app.: {}", webAppName);
}
private String getWebAppDirURL(String webAppName) throws IOException {
final URL webXmlURL;
{
final String relResPath = "webapps/" + webAppName + "/WEB-INF/web.xml";
Class<?> baseClass = this.getClass();
findWebXmlURL: do {
URL r = baseClass.getResource(relResPath);
if (r != null) {
webXmlURL = r;
break findWebXmlURL;
}
baseClass = baseClass.getSuperclass();
if (!WebAppTestCase.class.isAssignableFrom(baseClass)) {
throw new IOException("Can't find test class relative resource: " + relResPath);
}
} while (true);
}
try {
return webXmlURL.toURI().resolve("..").toString();
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to get grandparent URL for " + webXmlURL, e);
}
}
private static class HTTPResponse {
private final int statusCode;
private final String content;
private final String statusMessage;
private final URI uri;
public HTTPResponse(int statusCode, String statusMessage, String content, URI uri) {
this.statusCode = statusCode;
this.content = content;
this.statusMessage = statusMessage;
this.uri = uri;
}
public String getStatusMessage() {
return statusMessage;
}
public int getStatusCode() {
return statusCode;
}
public String getContent() {
return content;
}
public URI getURI() {
return uri;
}
}
}
| |
/*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.dom;
import com.sun.org.apache.xerces.internal.impl.RevalidationHandler;
import com.sun.org.apache.xerces.internal.parsers.DOMParserImpl;
import com.sun.org.apache.xerces.internal.parsers.DTDConfiguration;
import com.sun.org.apache.xerces.internal.parsers.XIncludeAwareParserConfiguration;
import com.sun.org.apache.xerces.internal.util.XMLChar;
import com.sun.org.apache.xerces.internal.utils.ObjectFactory;
import com.sun.org.apache.xerces.internal.xni.grammars.XMLGrammarDescription;
import com.sun.org.apache.xml.internal.serialize.DOMSerializerImpl;
import org.w3c.dom.DOMException;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Element;
import org.w3c.dom.ls.LSParser;
import org.w3c.dom.ls.DOMImplementationLS;
import org.w3c.dom.ls.LSInput;
import org.w3c.dom.ls.LSOutput;
import org.w3c.dom.ls.LSSerializer;
/**
* The DOMImplementation class is description of a particular
* implementation of the Document Object Model. As such its data is
* static, shared by all instances of this implementation.
* <P>
* The DOM API requires that it be a real object rather than static
* methods. However, there's nothing that says it can't be a singleton,
* so that's how I've implemented it.
* <P>
* This particular class, along with CoreDocumentImpl, supports the DOM
* Core and Load/Save (Experimental). Optional modules are supported by
* the more complete DOMImplementation class along with DocumentImpl.
*
* @xerces.internal
*
* @version $Id: CoreDOMImplementationImpl.java,v 1.6 2010-11-01 04:39:37 joehw Exp $
* @since PR-DOM-Level-1-19980818.
*/
public class CoreDOMImplementationImpl
implements DOMImplementation, DOMImplementationLS {
//
// Data
//
// validators pool
private static final int SIZE = 2;
private RevalidationHandler validators[] = new RevalidationHandler[SIZE];
private RevalidationHandler dtdValidators[] = new RevalidationHandler[SIZE];
private int freeValidatorIndex = -1;
private int freeDTDValidatorIndex = -1;
private int currentSize = SIZE;
// Document and doctype counter. Used to assign order to documents and
// doctypes without owners, on an demand basis. Used for
// compareDocumentPosition
private int docAndDoctypeCounter = 0;
// static
/** Dom implementation singleton. */
static CoreDOMImplementationImpl singleton =
new CoreDOMImplementationImpl();
//
// Public methods
//
/** NON-DOM: Obtain and return the single shared object */
public static DOMImplementation getDOMImplementation() {
return singleton;
}
//
// DOMImplementation methods
//
/**
* Test if the DOM implementation supports a specific "feature" --
* currently meaning language and level thereof.
*
* @param feature The package name of the feature to test.
* In Level 1, supported values are "HTML" and "XML" (case-insensitive).
* At this writing, com.sun.org.apache.xerces.internal.dom supports only XML.
*
* @param version The version number of the feature being tested.
* This is interpreted as "Version of the DOM API supported for the
* specified Feature", and in Level 1 should be "1.0"
*
* @return true iff this implementation is compatable with the specified
* feature and version.
*/
public boolean hasFeature(String feature, String version) {
boolean anyVersion = version == null || version.length() == 0;
// check if Xalan implementation is around and if yes report true for supporting
// XPath API
// if a plus sign "+" is prepended to any feature name, implementations
// are considered in which the specified feature may not be directly
// castable DOMImplementation.getFeature(feature, version). Without a
// plus, only features whose interfaces are directly castable are considered.
if ((feature.equalsIgnoreCase("+XPath"))
&& (anyVersion || version.equals("3.0"))) {
try {
Class xpathClass = ObjectFactory.findProviderClass(
"com.sun.org.apache.xpath.internal.domapi.XPathEvaluatorImpl", true);
// Check if the DOM XPath implementation implements
// the interface org.w3c.dom.XPathEvaluator
Class interfaces[] = xpathClass.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
if (interfaces[i].getName().equals(
"org.w3c.dom.xpath.XPathEvaluator")) {
return true;
}
}
} catch (Exception e) {
return false;
}
return true;
}
if (feature.startsWith("+")) {
feature = feature.substring(1);
}
return (
feature.equalsIgnoreCase("Core")
&& (anyVersion
|| version.equals("1.0")
|| version.equals("2.0")
|| version.equals("3.0")))
|| (feature.equalsIgnoreCase("XML")
&& (anyVersion
|| version.equals("1.0")
|| version.equals("2.0")
|| version.equals("3.0")))
|| (feature.equalsIgnoreCase("LS")
&& (anyVersion || version.equals("3.0")));
} // hasFeature(String,String):boolean
/**
* Introduced in DOM Level 2. <p>
*
* Creates an empty DocumentType node.
*
* @param qualifiedName The qualified name of the document type to be created.
* @param publicID The document type public identifier.
* @param systemID The document type system identifier.
* @since WD-DOM-Level-2-19990923
*/
public DocumentType createDocumentType( String qualifiedName,
String publicID, String systemID) {
// REVISIT: this might allow creation of invalid name for DOCTYPE
// xmlns prefix.
// also there is no way for a user to turn off error checking.
checkQName(qualifiedName);
return new DocumentTypeImpl(null, qualifiedName, publicID, systemID);
}
final void checkQName(String qname){
int index = qname.indexOf(':');
int lastIndex = qname.lastIndexOf(':');
int length = qname.length();
// it is an error for NCName to have more than one ':'
// check if it is valid QName [Namespace in XML production 6]
if (index == 0 || index == length - 1 || lastIndex != index) {
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"NAMESPACE_ERR",
null);
throw new DOMException(DOMException.NAMESPACE_ERR, msg);
}
int start = 0;
// Namespace in XML production [6]
if (index > 0) {
// check that prefix is NCName
if (!XMLChar.isNCNameStart(qname.charAt(start))) {
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"INVALID_CHARACTER_ERR",
null);
throw new DOMException(DOMException.INVALID_CHARACTER_ERR, msg);
}
for (int i = 1; i < index; i++) {
if (!XMLChar.isNCName(qname.charAt(i))) {
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"INVALID_CHARACTER_ERR",
null);
throw new DOMException(
DOMException.INVALID_CHARACTER_ERR,
msg);
}
}
start = index + 1;
}
// check local part
if (!XMLChar.isNCNameStart(qname.charAt(start))) {
// REVISIT: add qname parameter to the message
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"INVALID_CHARACTER_ERR",
null);
throw new DOMException(DOMException.INVALID_CHARACTER_ERR, msg);
}
for (int i = start + 1; i < length; i++) {
if (!XMLChar.isNCName(qname.charAt(i))) {
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"INVALID_CHARACTER_ERR",
null);
throw new DOMException(DOMException.INVALID_CHARACTER_ERR, msg);
}
}
}
/**
* Introduced in DOM Level 2. <p>
*
* Creates an XML Document object of the specified type with its document
* element.
*
* @param namespaceURI The namespace URI of the document
* element to create, or null.
* @param qualifiedName The qualified name of the document
* element to create.
* @param doctype The type of document to be created or null.<p>
*
* When doctype is not null, its
* Node.ownerDocument attribute is set to
* the document being created.
* @return Document A new Document object.
* @throws DOMException WRONG_DOCUMENT_ERR: Raised if doctype has
* already been used with a different document.
* @since WD-DOM-Level-2-19990923
*/
public Document createDocument(
String namespaceURI,
String qualifiedName,
DocumentType doctype)
throws DOMException {
if (doctype != null && doctype.getOwnerDocument() != null) {
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"WRONG_DOCUMENT_ERR",
null);
throw new DOMException(DOMException.WRONG_DOCUMENT_ERR, msg);
}
CoreDocumentImpl doc = new CoreDocumentImpl(doctype);
Element e = doc.createElementNS(namespaceURI, qualifiedName);
doc.appendChild(e);
return doc;
}
/**
* DOM Level 3 WD - Experimental.
*/
public Object getFeature(String feature, String version) {
if (singleton.hasFeature(feature, version)) {
if ((feature.equalsIgnoreCase("+XPath"))) {
try {
Class xpathClass = ObjectFactory.findProviderClass(
"com.sun.org.apache.xpath.internal.domapi.XPathEvaluatorImpl", true);
// Check if the DOM XPath implementation implements
// the interface org.w3c.dom.XPathEvaluator
Class interfaces[] = xpathClass.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
if (interfaces[i].getName().equals(
"org.w3c.dom.xpath.XPathEvaluator")) {
return xpathClass.newInstance();
}
}
} catch (Exception e) {
return null;
}
} else {
return singleton;
}
}
return null;
}
// DOM L3 LS
/**
* DOM Level 3 LS CR - Experimental.
* Create a new <code>LSParser</code>. The newly constructed parser may
* then be configured by means of its <code>DOMConfiguration</code>
* object, and used to parse documents by means of its <code>parse</code>
* method.
* @param mode The <code>mode</code> argument is either
* <code>MODE_SYNCHRONOUS</code> or <code>MODE_ASYNCHRONOUS</code>, if
* <code>mode</code> is <code>MODE_SYNCHRONOUS</code> then the
* <code>LSParser</code> that is created will operate in synchronous
* mode, if it's <code>MODE_ASYNCHRONOUS</code> then the
* <code>LSParser</code> that is created will operate in asynchronous
* mode.
* @param schemaType An absolute URI representing the type of the schema
* language used during the load of a <code>Document</code> using the
* newly created <code>LSParser</code>. Note that no lexical checking
* is done on the absolute URI. In order to create a
* <code>LSParser</code> for any kind of schema types (i.e. the
* LSParser will be free to use any schema found), use the value
* <code>null</code>.
* <p ><b>Note:</b> For W3C XML Schema [<a href='http://www.w3.org/TR/2001/REC-xmlschema-1-20010502/'>XML Schema Part 1</a>]
* , applications must use the value
* <code>"http://www.w3.org/2001/XMLSchema"</code>. For XML DTD [<a href='http://www.w3.org/TR/2000/REC-xml-20001006'>XML 1.0</a>],
* applications must use the value
* <code>"http://www.w3.org/TR/REC-xml"</code>. Other Schema languages
* are outside the scope of the W3C and therefore should recommend an
* absolute URI in order to use this method.
* @return The newly created <code>LSParser</code> object. This
* <code>LSParser</code> is either synchronous or asynchronous
* depending on the value of the <code>mode</code> argument.
* <p ><b>Note:</b> By default, the newly created <code>LSParser</code>
* does not contain a <code>DOMErrorHandler</code>, i.e. the value of
* the "<a href='http://www.w3.org/TR/2003/WD-DOM-Level-3-Core-20030609/core.html#parameter-error-handler'>
* error-handler</a>" configuration parameter is <code>null</code>. However, implementations
* may provide a default error handler at creation time. In that case,
* the initial value of the <code>"error-handler"</code> configuration
* parameter on the new created <code>LSParser</code> contains a
* reference to the default error handler.
* @exception DOMException
* NOT_SUPPORTED_ERR: Raised if the requested mode or schema type is
* not supported.
*/
public LSParser createLSParser(short mode, String schemaType)
throws DOMException {
if (mode != DOMImplementationLS.MODE_SYNCHRONOUS || (schemaType !=null &&
!"http://www.w3.org/2001/XMLSchema".equals(schemaType) &&
!"http://www.w3.org/TR/REC-xml".equals(schemaType))) {
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.DOM_DOMAIN,
"NOT_SUPPORTED_ERR",
null);
throw new DOMException(DOMException.NOT_SUPPORTED_ERR, msg);
}
if (schemaType != null
&& schemaType.equals("http://www.w3.org/TR/REC-xml")) {
return new DOMParserImpl(new DTDConfiguration(),
schemaType);
}
else {
// create default parser configuration validating against XMLSchemas
return new DOMParserImpl(new XIncludeAwareParserConfiguration(),
schemaType);
}
}
/**
* DOM Level 3 LS CR - Experimental.
* Create a new <code>LSSerializer</code> object.
* @return The newly created <code>LSSerializer</code> object.
* <p ><b>Note:</b> By default, the newly created
* <code>LSSerializer</code> has no <code>DOMErrorHandler</code>,
* i.e. the value of the <code>"error-handler"</code> configuration
* parameter is <code>null</code>. However, implementations may
* provide a default error handler at creation time. In that case, the
* initial value of the <code>"error-handler"</code> configuration
* parameter on the new created <code>LSSerializer</code> contains a
* reference to the default error handler.
*/
public LSSerializer createLSSerializer() {
return new DOMSerializerImpl();
}
/**
* DOM Level 3 LS CR - Experimental.
* Create a new empty input source.
* @return The newly created input object.
*/
public LSInput createLSInput() {
return new DOMInputImpl();
}
//
// Protected methods
//
/** NON-DOM: retrieve validator. */
synchronized RevalidationHandler getValidator(String schemaType) {
// REVISIT: implement retrieving DTD validator
if (schemaType == XMLGrammarDescription.XML_SCHEMA) {
// create new validator - we should not attempt
// to restrict the number of validation handlers being
// requested
if(freeValidatorIndex < 0) {
return (RevalidationHandler) (ObjectFactory
.newInstance(
"com.sun.org.apache.xerces.internal.impl.xs.XMLSchemaValidator",
ObjectFactory.findClassLoader(),
true));
}
// return first available validator
RevalidationHandler val = validators[freeValidatorIndex];
validators[freeValidatorIndex--] = null;
return val;
}
else if(schemaType == XMLGrammarDescription.XML_DTD) {
if(freeDTDValidatorIndex < 0) {
return (RevalidationHandler) (ObjectFactory
.newInstance(
"com.sun.org.apache.xerces.internal.impl.dtd.XMLDTDValidator",
ObjectFactory.findClassLoader(),
true));
}
// return first available validator
RevalidationHandler val = dtdValidators[freeDTDValidatorIndex];
dtdValidators[freeDTDValidatorIndex--] = null;
return val;
}
return null;
}
/** NON-DOM: release validator */
synchronized void releaseValidator(String schemaType,
RevalidationHandler validator) {
// REVISIT: implement support for DTD validators as well
if(schemaType == XMLGrammarDescription.XML_SCHEMA) {
++freeValidatorIndex;
if (validators.length == freeValidatorIndex ){
// resize size of the validators
currentSize+=SIZE;
RevalidationHandler newarray[] = new RevalidationHandler[currentSize];
System.arraycopy(validators, 0, newarray, 0, validators.length);
validators = newarray;
}
validators[freeValidatorIndex]=validator;
}
else if(schemaType == XMLGrammarDescription.XML_DTD) {
++freeDTDValidatorIndex;
if (dtdValidators.length == freeDTDValidatorIndex ){
// resize size of the validators
currentSize+=SIZE;
RevalidationHandler newarray[] = new RevalidationHandler[currentSize];
System.arraycopy(dtdValidators, 0, newarray, 0, dtdValidators.length);
dtdValidators = newarray;
}
dtdValidators[freeDTDValidatorIndex]=validator;
}
}
/** NON-DOM: increment document/doctype counter */
protected synchronized int assignDocumentNumber() {
return ++docAndDoctypeCounter;
}
/** NON-DOM: increment document/doctype counter */
protected synchronized int assignDocTypeNumber() {
return ++docAndDoctypeCounter;
}
/* DOM Level 3 LS CR - Experimental.
*
* Create a new empty output destination object where
* <code>LSOutput.characterStream</code>,
* <code>LSOutput.byteStream</code>, <code>LSOutput.systemId</code>,
* <code>LSOutput.encoding</code> are null.
* @return The newly created output object.
*/
public LSOutput createLSOutput() {
return new DOMOutputImpl();
}
} // class DOMImplementationImpl
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.config;
import static java.util.stream.Collectors.toList;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.RefNames;
import com.google.gerrit.extensions.annotations.ExtensionPoint;
import com.google.gerrit.extensions.api.projects.ConfigValue;
import com.google.gerrit.extensions.api.projects.ProjectConfigEntryType;
import com.google.gerrit.extensions.events.GitReferenceUpdatedListener;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.extensions.registration.Extension;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.project.ProjectConfig;
import com.google.gerrit.server.project.ProjectState;
import com.google.inject.Inject;
import com.google.inject.ProvisionException;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
@ExtensionPoint
public class ProjectConfigEntry {
private final String displayName;
private final String description;
private final boolean inheritable;
private final String defaultValue;
private final ProjectConfigEntryType type;
private final List<String> permittedValues;
public ProjectConfigEntry(String displayName, String defaultValue) {
this(displayName, defaultValue, false);
}
public ProjectConfigEntry(String displayName, String defaultValue, boolean inheritable) {
this(displayName, defaultValue, inheritable, null);
}
public ProjectConfigEntry(
String displayName, String defaultValue, boolean inheritable, String description) {
this(displayName, defaultValue, ProjectConfigEntryType.STRING, null, inheritable, description);
}
public ProjectConfigEntry(String displayName, int defaultValue) {
this(displayName, defaultValue, false);
}
public ProjectConfigEntry(String displayName, int defaultValue, boolean inheritable) {
this(displayName, defaultValue, inheritable, null);
}
public ProjectConfigEntry(
String displayName, int defaultValue, boolean inheritable, String description) {
this(
displayName,
Integer.toString(defaultValue),
ProjectConfigEntryType.INT,
null,
inheritable,
description);
}
public ProjectConfigEntry(String displayName, long defaultValue) {
this(displayName, defaultValue, false);
}
public ProjectConfigEntry(String displayName, long defaultValue, boolean inheritable) {
this(displayName, defaultValue, inheritable, null);
}
public ProjectConfigEntry(
String displayName, long defaultValue, boolean inheritable, String description) {
this(
displayName,
Long.toString(defaultValue),
ProjectConfigEntryType.LONG,
null,
inheritable,
description);
}
// For inheritable boolean use 'LIST' type with InheritableBoolean
public ProjectConfigEntry(String displayName, boolean defaultValue) {
this(displayName, defaultValue, null);
}
// For inheritable boolean use 'LIST' type with InheritableBoolean
public ProjectConfigEntry(String displayName, boolean defaultValue, String description) {
this(
displayName,
Boolean.toString(defaultValue),
ProjectConfigEntryType.BOOLEAN,
null,
false,
description);
}
public ProjectConfigEntry(String displayName, String defaultValue, List<String> permittedValues) {
this(displayName, defaultValue, permittedValues, false);
}
public ProjectConfigEntry(
String displayName, String defaultValue, List<String> permittedValues, boolean inheritable) {
this(displayName, defaultValue, permittedValues, inheritable, null);
}
public ProjectConfigEntry(
String displayName,
String defaultValue,
List<String> permittedValues,
boolean inheritable,
String description) {
this(
displayName,
defaultValue,
ProjectConfigEntryType.LIST,
permittedValues,
inheritable,
description);
}
public <T extends Enum<?>> ProjectConfigEntry(
String displayName, T defaultValue, Class<T> permittedValues) {
this(displayName, defaultValue, permittedValues, false);
}
public <T extends Enum<?>> ProjectConfigEntry(
String displayName, T defaultValue, Class<T> permittedValues, boolean inheritable) {
this(displayName, defaultValue, permittedValues, inheritable, null);
}
public <T extends Enum<?>> ProjectConfigEntry(
String displayName,
T defaultValue,
Class<T> permittedValues,
boolean inheritable,
String description) {
this(
displayName,
defaultValue.name(),
ProjectConfigEntryType.LIST,
Arrays.stream(permittedValues.getEnumConstants()).map(Enum::name).collect(toList()),
inheritable,
description);
}
public ProjectConfigEntry(
String displayName,
String defaultValue,
ProjectConfigEntryType type,
List<String> permittedValues,
boolean inheritable,
String description) {
this.displayName = displayName;
this.defaultValue = defaultValue;
this.type = type;
this.permittedValues = permittedValues;
this.inheritable = inheritable;
this.description = description;
if (type == ProjectConfigEntryType.ARRAY && inheritable) {
throw new ProvisionException("ARRAY doesn't support inheritable values");
}
}
public String getDisplayName() {
return displayName;
}
public String getDescription() {
return description;
}
public boolean isInheritable() {
return inheritable;
}
public String getDefaultValue() {
return defaultValue;
}
public ProjectConfigEntryType getType() {
return type;
}
public List<String> getPermittedValues() {
return permittedValues;
}
/**
* Returns whether the project is editable
*
* @param project project state.
*/
public boolean isEditable(ProjectState project) {
return true;
}
/**
* Returns any warning associated with the project
*
* @param project project state.
*/
public String getWarning(ProjectState project) {
return null;
}
/**
* Called before the project config is updated. To modify the value before the project config is
* updated, override this method and return the modified value. Default implementation returns the
* same value.
*
* @param configValue the original configValue that was entered.
* @return the modified configValue.
*/
public ConfigValue preUpdate(ConfigValue configValue) {
return configValue;
}
/**
* Called after reading the project config value. To modify the value before returning it to the
* client, override this method and return the modified value. Default implementation returns the
* same value.
*
* @param project the project.
* @param value the actual value of the config entry (computed out of the configured value, the
* inherited value and the default value).
* @return the modified value.
*/
public String onRead(ProjectState project, String value) {
return value;
}
/**
* Called after reading the project config value of type ARRAY. To modify the values before
* returning it to the client, override this method and return the modified values. Default
* implementation returns the same values.
*
* @param project the project.
* @param values the actual values of the config entry (computed out of the configured value, the
* inherited value and the default value).
* @return the modified values.
*/
public List<String> onRead(ProjectState project, List<String> values) {
return values;
}
/**
* Called after a project config is updated.
*
* @param project project name.
* @param oldValue old entry value.
* @param newValue new entry value.
*/
public void onUpdate(Project.NameKey project, String oldValue, String newValue) {}
/**
* Called after a project config is updated.
*
* @param project project name.
* @param oldValue old entry value.
* @param newValue new entry value.
*/
public void onUpdate(Project.NameKey project, Boolean oldValue, Boolean newValue) {}
/**
* Called after a project config is updated.
*
* @param project project name.
* @param oldValue old entry value.
* @param newValue new entry value.
*/
public void onUpdate(Project.NameKey project, Integer oldValue, Integer newValue) {}
/**
* Called after a project config is updated.
*
* @param project project name.
* @param oldValue old entry value.
* @param newValue new entry value.
*/
public void onUpdate(Project.NameKey project, Long oldValue, Long newValue) {}
public static class UpdateChecker implements GitReferenceUpdatedListener {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final GitRepositoryManager repoManager;
private final DynamicMap<ProjectConfigEntry> pluginConfigEntries;
private final ProjectConfig.Factory projectConfigFactory;
@Inject
UpdateChecker(
GitRepositoryManager repoManager,
DynamicMap<ProjectConfigEntry> pluginConfigEntries,
ProjectConfig.Factory projectConfigFactory) {
this.repoManager = repoManager;
this.pluginConfigEntries = pluginConfigEntries;
this.projectConfigFactory = projectConfigFactory;
}
@Override
public void onGitReferenceUpdated(Event event) {
Project.NameKey p = Project.nameKey(event.getProjectName());
if (!event.getRefName().equals(RefNames.REFS_CONFIG)) {
return;
}
try {
ProjectConfig oldCfg = parseConfig(p, event.getOldObjectId());
ProjectConfig newCfg = parseConfig(p, event.getNewObjectId());
if (oldCfg != null && newCfg != null) {
for (Extension<ProjectConfigEntry> e : pluginConfigEntries) {
ProjectConfigEntry configEntry = e.getProvider().get();
String newValue = getValue(newCfg, e);
String oldValue = getValue(oldCfg, e);
if ((newValue == null && oldValue == null)
|| (newValue != null && newValue.equals(oldValue))) {
return;
}
switch (configEntry.getType()) {
case BOOLEAN:
configEntry.onUpdate(p, toBoolean(oldValue), toBoolean(newValue));
break;
case INT:
configEntry.onUpdate(p, toInt(oldValue), toInt(newValue));
break;
case LONG:
configEntry.onUpdate(p, toLong(oldValue), toLong(newValue));
break;
case LIST:
case STRING:
case ARRAY:
default:
configEntry.onUpdate(p, oldValue, newValue);
}
}
}
} catch (IOException | ConfigInvalidException e) {
logger.atSevere().withCause(e).log(
"Failed to check if plugin config of project %s was updated.", p.get());
}
}
private ProjectConfig parseConfig(Project.NameKey p, String idStr)
throws IOException, ConfigInvalidException, RepositoryNotFoundException {
ObjectId id = ObjectId.fromString(idStr);
if (ObjectId.zeroId().equals(id)) {
return null;
}
try (Repository repo = repoManager.openRepository(p)) {
ProjectConfig pc = projectConfigFactory.create(p);
pc.load(repo, id);
return pc;
}
}
private static String getValue(ProjectConfig cfg, Extension<ProjectConfigEntry> e) {
String value = cfg.getPluginConfig(e.getPluginName()).getString(e.getExportName());
if (value == null) {
value = e.getProvider().get().getDefaultValue();
}
return value;
}
}
private static Boolean toBoolean(String value) {
return value != null ? Boolean.parseBoolean(value) : null;
}
private static Integer toInt(String value) {
return value != null ? Integer.parseInt(value) : null;
}
private static Long toLong(String value) {
return value != null ? Long.parseLong(value) : null;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.